2020-12-05 13:08:24 +01:00
# include "stdafx.h"
2020-01-17 17:24:33 +01:00
# include "VKGSRender.h"
2021-01-09 19:46:50 +01:00
# include "vkutils/buffer_object.h"
2023-02-13 19:57:38 +01:00
# include "Emu/RSX/Overlays/overlay_manager.h"
2021-03-10 00:58:08 +01:00
# include "Emu/RSX/Overlays/overlays.h"
2020-08-11 19:25:04 +02:00
# include "Emu/Cell/Modules/cellVideoOut.h"
2020-01-17 17:24:33 +01:00
2021-08-04 02:04:48 +02:00
# include "upscalers/bilinear_pass.hpp"
# include "upscalers/fsr_pass.h"
2023-01-15 23:13:25 +01:00
# include "upscalers/nearest_pass.hpp"
2020-12-18 15:43:34 +01:00
# include "util/asm.hpp"
2022-11-05 11:05:36 +01:00
# include "util/video_provider.h"
2023-01-14 00:07:07 +01:00
extern atomic_t < bool > g_user_asked_for_screenshot ;
2022-11-05 11:05:36 +01:00
extern atomic_t < recording_mode > g_recording_mode ;
2020-12-18 15:43:34 +01:00
2020-01-17 17:24:33 +01:00
void VKGSRender : : reinitialize_swapchain ( )
{
m_swapchain_dims . width = m_frame - > client_width ( ) ;
m_swapchain_dims . height = m_frame - > client_height ( ) ;
// Reject requests to acquire new swapchain if the window is minimized
// The NVIDIA driver will spam VK_ERROR_OUT_OF_DATE_KHR if you try to acquire an image from the swapchain and the window is minimized
// However, any attempt to actually renew the swapchain will crash the driver with VK_ERROR_DEVICE_LOST while the window is in this state
if ( m_swapchain_dims . width = = 0 | | m_swapchain_dims . height = = 0 )
{
swapchain_unavailable = true ;
return ;
}
// NOTE: This operation will create a hard sync point
2022-02-19 22:05:36 +01:00
close_and_submit_command_buffer ( ) ;
2020-01-17 17:24:33 +01:00
m_current_command_buffer - > reset ( ) ;
2022-02-20 15:32:21 +01:00
m_current_command_buffer - > begin ( ) ;
2020-01-17 17:24:33 +01:00
for ( auto & ctx : frame_context_storage )
{
2021-05-22 20:46:10 +02:00
if ( ctx . present_image = = umax )
2020-01-17 17:24:33 +01:00
continue ;
// Release present image by presenting it
2022-02-19 22:05:36 +01:00
frame_context_cleanup ( & ctx ) ;
2020-01-17 17:24:33 +01:00
}
2021-08-04 02:04:48 +02:00
// Discard the current upscaling pipeline if any
m_upscaler . reset ( ) ;
2020-01-17 17:24:33 +01:00
// Drain all the queues
vkDeviceWaitIdle ( * m_device ) ;
// Rebuild swapchain. Old swapchain destruction is handled by the init_swapchain call
if ( ! m_swapchain - > init ( m_swapchain_dims . width , m_swapchain_dims . height ) )
{
2020-02-01 09:07:25 +01:00
rsx_log . warning ( " Swapchain initialization failed. Request ignored [%dx%d] " , m_swapchain_dims . width , m_swapchain_dims . height ) ;
2020-01-17 17:24:33 +01:00
swapchain_unavailable = true ;
return ;
}
// Prepare new swapchain images for use
for ( u32 i = 0 ; i < m_swapchain - > get_swap_image_count ( ) ; + + i )
{
const auto target_layout = m_swapchain - > get_optimal_present_layout ( ) ;
const auto target_image = m_swapchain - > get_image ( i ) ;
VkClearColorValue clear_color { } ;
VkImageSubresourceRange range = { VK_IMAGE_ASPECT_COLOR_BIT , 0 , 1 , 0 , 1 } ;
vk : : change_image_layout ( * m_current_command_buffer , target_image , VK_IMAGE_LAYOUT_UNDEFINED , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , range ) ;
vkCmdClearColorImage ( * m_current_command_buffer , target_image , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , & clear_color , 1 , & range ) ;
vk : : change_image_layout ( * m_current_command_buffer , target_image , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , target_layout , range ) ;
}
2020-01-17 21:17:14 +01:00
// Will have to block until rendering is completed
2020-01-17 17:24:33 +01:00
vk : : fence resize_fence ( * m_device ) ;
2020-01-17 21:17:14 +01:00
// Flush the command buffer
2020-01-17 17:24:33 +01:00
close_and_submit_command_buffer ( & resize_fence ) ;
vk : : wait_for_fence ( & resize_fence ) ;
m_current_command_buffer - > reset ( ) ;
2022-02-20 15:32:21 +01:00
m_current_command_buffer - > begin ( ) ;
2020-01-17 17:24:33 +01:00
swapchain_unavailable = false ;
should_reinitialize_swapchain = false ;
}
void VKGSRender : : present ( vk : : frame_context_t * ctx )
{
2021-05-22 20:46:10 +02:00
ensure ( ctx - > present_image ! = umax ) ;
2020-01-17 17:24:33 +01:00
// Partial CS flush
ctx - > swap_command_buffer - > flush ( ) ;
if ( ! swapchain_unavailable )
{
switch ( VkResult error = m_swapchain - > present ( ctx - > present_wait_semaphore , ctx - > present_image ) )
{
case VK_SUCCESS :
break ;
case VK_SUBOPTIMAL_KHR :
should_reinitialize_swapchain = true ;
break ;
case VK_ERROR_OUT_OF_DATE_KHR :
swapchain_unavailable = true ;
break ;
default :
2022-03-27 18:58:58 +02:00
// Other errors not part of rpcs3. This can be caused by 3rd party injectors with bad code, of which we have no control over.
// Let the application attempt to recover instead of crashing outright.
rsx_log . error ( " VkPresent returned unexpected error code %lld. Will attempt to recreate the swapchain. Please disable 3rd party injector tools. " , static_cast < s64 > ( error ) ) ;
swapchain_unavailable = true ;
break ;
2020-01-17 17:24:33 +01:00
}
}
// Presentation image released; reset value
2021-05-22 20:46:10 +02:00
ctx - > present_image = - 1 ;
2020-01-17 17:24:33 +01:00
}
void VKGSRender : : advance_queued_frames ( )
{
// Check all other frames for completion and clear resources
check_present_status ( ) ;
2020-07-23 22:13:51 +02:00
// Run video memory balancer
2021-07-25 21:14:47 +02:00
m_device - > rebalance_memory_type_usage ( ) ;
2020-07-23 22:13:51 +02:00
vk : : vmm_check_memory_usage ( ) ;
2020-01-17 21:17:14 +01:00
// m_rtts storage is double buffered and should be safe to tag on frame boundary
2023-06-01 01:47:08 +02:00
m_rtts . trim ( * m_current_command_buffer , vk : : vmm_determine_memory_load_severity ( ) ) ;
2020-01-17 17:24:33 +01:00
2020-01-17 21:17:14 +01:00
// Texture cache is also double buffered to prevent use-after-free
2020-01-17 17:24:33 +01:00
m_texture_cache . on_frame_end ( ) ;
m_samplers_dirty . store ( true ) ;
vk : : remove_unused_framebuffers ( ) ;
m_vertex_cache - > purge ( ) ;
m_current_frame - > tag_frame_end ( m_attrib_ring_info . get_current_put_pos_minus_one ( ) ,
m_vertex_env_ring_info . get_current_put_pos_minus_one ( ) ,
m_fragment_env_ring_info . get_current_put_pos_minus_one ( ) ,
m_vertex_layout_ring_info . get_current_put_pos_minus_one ( ) ,
m_fragment_texture_params_ring_info . get_current_put_pos_minus_one ( ) ,
m_fragment_constants_ring_info . get_current_put_pos_minus_one ( ) ,
m_transform_constants_ring_info . get_current_put_pos_minus_one ( ) ,
m_index_buffer_ring_info . get_current_put_pos_minus_one ( ) ,
2020-05-28 23:51:36 +02:00
m_texture_upload_buffer_ring_info . get_current_put_pos_minus_one ( ) ,
m_raster_env_ring_info . get_current_put_pos_minus_one ( ) ) ;
2020-01-17 17:24:33 +01:00
m_queued_frames . push_back ( m_current_frame ) ;
2020-12-09 08:47:45 +01:00
ensure ( m_queued_frames . size ( ) < = VK_MAX_ASYNC_FRAMES ) ;
2020-01-17 17:24:33 +01:00
m_current_queue_index = ( m_current_queue_index + 1 ) % VK_MAX_ASYNC_FRAMES ;
m_current_frame = & frame_context_storage [ m_current_queue_index ] ;
m_current_frame - > flags | = frame_context_state : : dirty ;
vk : : advance_frame_counter ( ) ;
}
void VKGSRender : : queue_swap_request ( )
{
2020-12-09 08:47:45 +01:00
ensure ( ! m_current_frame - > swap_command_buffer ) ;
2020-01-17 17:24:33 +01:00
m_current_frame - > swap_command_buffer = m_current_command_buffer ;
if ( m_swapchain - > is_headless ( ) )
{
m_swapchain - > end_frame ( * m_current_command_buffer , m_current_frame - > present_image ) ;
2022-02-19 22:05:36 +01:00
close_and_submit_command_buffer ( ) ;
2020-01-17 17:24:33 +01:00
}
else
{
2022-02-19 22:05:36 +01:00
close_and_submit_command_buffer ( nullptr ,
2020-01-17 17:24:33 +01:00
m_current_frame - > acquire_signal_semaphore ,
m_current_frame - > present_wait_semaphore ,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT ) ;
}
// Set up a present request for this frame as well
present ( m_current_frame ) ;
// Grab next cb in line and make it usable
2022-02-19 22:05:36 +01:00
m_current_command_buffer = m_primary_cb_list . next ( ) ;
2020-01-17 17:24:33 +01:00
m_current_command_buffer - > reset ( ) ;
2020-10-10 11:02:55 +02:00
m_current_command_buffer - > begin ( ) ;
2020-01-17 17:24:33 +01:00
// Set up new pointers for the next frame
advance_queued_frames ( ) ;
}
2022-02-19 22:05:36 +01:00
void VKGSRender : : frame_context_cleanup ( vk : : frame_context_t * ctx )
2020-01-17 17:24:33 +01:00
{
2020-12-09 08:47:45 +01:00
ensure ( ctx - > swap_command_buffer ) ;
2020-01-17 17:24:33 +01:00
2022-02-19 22:05:36 +01:00
// Perform hard swap here
if ( ctx - > swap_command_buffer - > wait ( FRAME_PRESENT_TIMEOUT ) ! = VK_SUCCESS )
2020-01-17 17:24:33 +01:00
{
2022-02-19 22:05:36 +01:00
// Lost surface/device, release swapchain
swapchain_unavailable = true ;
2020-01-17 17:24:33 +01:00
}
2022-02-19 22:05:36 +01:00
// Resource cleanup.
// TODO: This is some outdated crap.
2020-01-17 17:24:33 +01:00
{
if ( m_overlay_manager & & m_overlay_manager - > has_dirty ( ) )
{
2020-05-20 21:51:15 +02:00
auto ui_renderer = vk : : get_overlay_pass < vk : : ui_overlay_renderer > ( ) ;
2020-01-17 17:24:33 +01:00
m_overlay_manager - > lock ( ) ;
std : : vector < u32 > uids_to_dispose ;
uids_to_dispose . reserve ( m_overlay_manager - > get_dirty ( ) . size ( ) ) ;
for ( const auto & view : m_overlay_manager - > get_dirty ( ) )
{
2020-05-20 21:51:15 +02:00
ui_renderer - > remove_temp_resources ( view - > uid ) ;
2020-01-17 17:24:33 +01:00
uids_to_dispose . push_back ( view - > uid ) ;
}
m_overlay_manager - > unlock ( ) ;
m_overlay_manager - > dispose ( uids_to_dispose ) ;
}
2022-01-18 22:47:50 +01:00
vk : : get_resource_manager ( ) - > trim ( ) ;
2020-01-17 17:24:33 +01:00
vk : : reset_global_resources ( ) ;
ctx - > buffer_views_to_clean . clear ( ) ;
2020-04-27 16:37:31 +02:00
const auto shadermode = g_cfg . video . shadermode . get ( ) ;
if ( shadermode = = shader_mode : : async_with_interpreter | | shadermode = = shader_mode : : interpreter_only )
2020-04-09 19:50:27 +02:00
{
// TODO: This is jank AF
m_vertex_instructions_buffer . reset_allocation_stats ( ) ;
m_fragment_instructions_buffer . reset_allocation_stats ( ) ;
}
2020-01-17 17:24:33 +01:00
if ( ctx - > last_frame_sync_time > m_last_heap_sync_time )
{
m_last_heap_sync_time = ctx - > last_frame_sync_time ;
2020-01-17 21:17:14 +01:00
// Heap cleanup; deallocates memory consumed by the frame if it is still held
2020-01-17 17:24:33 +01:00
m_attrib_ring_info . m_get_pos = ctx - > attrib_heap_ptr ;
m_vertex_env_ring_info . m_get_pos = ctx - > vtx_env_heap_ptr ;
m_fragment_env_ring_info . m_get_pos = ctx - > frag_env_heap_ptr ;
m_fragment_constants_ring_info . m_get_pos = ctx - > frag_const_heap_ptr ;
m_transform_constants_ring_info . m_get_pos = ctx - > vtx_const_heap_ptr ;
m_vertex_layout_ring_info . m_get_pos = ctx - > vtx_layout_heap_ptr ;
m_fragment_texture_params_ring_info . m_get_pos = ctx - > frag_texparam_heap_ptr ;
m_index_buffer_ring_info . m_get_pos = ctx - > index_heap_ptr ;
m_texture_upload_buffer_ring_info . m_get_pos = ctx - > texture_upload_heap_ptr ;
m_attrib_ring_info . notify ( ) ;
m_vertex_env_ring_info . notify ( ) ;
m_fragment_env_ring_info . notify ( ) ;
m_fragment_constants_ring_info . notify ( ) ;
m_transform_constants_ring_info . notify ( ) ;
m_vertex_layout_ring_info . notify ( ) ;
m_fragment_texture_params_ring_info . notify ( ) ;
m_index_buffer_ring_info . notify ( ) ;
m_texture_upload_buffer_ring_info . notify ( ) ;
}
}
ctx - > swap_command_buffer = nullptr ;
// Remove from queued list
while ( ! m_queued_frames . empty ( ) )
{
auto frame = m_queued_frames . front ( ) ;
m_queued_frames . pop_front ( ) ;
if ( frame = = ctx )
{
break ;
}
}
vk : : advance_completed_frame_counter ( ) ;
}
2021-08-04 02:04:48 +02:00
vk : : viewable_image * VKGSRender : : get_present_source ( vk : : present_surface_info * info , const rsx : : avconf & avconfig )
2020-01-17 17:24:33 +01:00
{
2021-08-04 02:04:48 +02:00
vk : : viewable_image * image_to_flip = nullptr ;
2020-01-17 21:17:14 +01:00
// Check the surface store first
2020-08-15 23:33:34 +02:00
const auto format_bpp = rsx : : get_format_block_size_in_bytes ( info - > format ) ;
2020-01-17 21:17:14 +01:00
const auto overlap_info = m_rtts . get_merged_texture_memory_region ( * m_current_command_buffer ,
2022-03-13 09:32:04 +01:00
info - > address , info - > width , info - > height , info - > pitch , format_bpp , rsx : : surface_access : : transfer_read ) ;
2020-01-17 21:17:14 +01:00
if ( ! overlap_info . empty ( ) )
{
const auto & section = overlap_info . back ( ) ;
auto surface = vk : : as_rtt ( section . surface ) ;
2020-01-18 10:21:31 +01:00
bool viable = false ;
2020-01-17 21:17:14 +01:00
if ( section . base_address > = info - > address )
{
2022-03-05 16:42:53 +01:00
const auto surface_width = surface - > get_surface_width < rsx : : surface_metrics : : samples > ( ) ;
const auto surface_height = surface - > get_surface_height < rsx : : surface_metrics : : samples > ( ) ;
2020-01-17 21:17:14 +01:00
2020-01-18 10:21:31 +01:00
if ( section . base_address = = info - > address )
{
// Check for fit or crop
viable = ( surface_width > = info - > width & & surface_height > = info - > height ) ;
}
else
{
// Check for borders and letterboxing
const u32 inset_offset = section . base_address - info - > address ;
const u32 inset_y = inset_offset / info - > pitch ;
const u32 inset_x = ( inset_offset % info - > pitch ) / format_bpp ;
const u32 full_width = surface_width + inset_x + inset_x ;
const u32 full_height = surface_height + inset_y + inset_y ;
viable = ( full_width = = info - > width & & full_height = = info - > height ) ;
}
2020-01-17 21:17:14 +01:00
2020-01-18 10:21:31 +01:00
if ( viable )
2020-01-17 21:17:14 +01:00
{
2022-03-13 09:32:04 +01:00
image_to_flip = section . surface - > get_surface ( rsx : : surface_access : : transfer_read ) ;
2020-01-17 21:17:14 +01:00
2020-11-17 21:56:33 +01:00
std : : tie ( info - > width , info - > height ) = rsx : : apply_resolution_scale < true > (
2022-01-09 21:07:18 +01:00
std : : min ( surface_width , info - > width ) ,
std : : min ( surface_height , info - > height ) ) ;
2020-01-17 21:17:14 +01:00
}
}
}
2020-03-04 20:25:31 +01:00
else if ( auto surface = m_texture_cache . find_texture_from_dimensions < true > ( info - > address , info - > format ) ;
surface & & surface - > get_width ( ) > = info - > width & & surface - > get_height ( ) > = info - > height )
2020-01-17 21:17:14 +01:00
{
// Hack - this should be the first location to check for output
// The render might have been done offscreen or in software and a blit used to display
2021-08-04 02:04:48 +02:00
image_to_flip = dynamic_cast < vk : : viewable_image * > ( surface - > get_raw_texture ( ) ) ;
2020-01-17 21:17:14 +01:00
}
if ( ! image_to_flip )
{
// Read from cell
const auto range = utils : : address_range : : start_length ( info - > address , info - > pitch * info - > height ) ;
const u32 lookup_mask = rsx : : texture_upload_context : : blit_engine_dst | rsx : : texture_upload_context : : framebuffer_storage ;
const auto overlap = m_texture_cache . find_texture_from_range < true > ( range , 0 , lookup_mask ) ;
for ( const auto & section : overlap )
{
if ( ! section - > is_synchronized ( ) )
{
section - > copy_texture ( * m_current_command_buffer , true ) ;
}
}
if ( m_current_command_buffer - > flags & vk : : command_buffer : : cb_has_dma_transfer )
{
// Submit for processing to lower hard fault penalty
flush_command_queue ( ) ;
}
2020-08-11 19:25:04 +02:00
VkFormat format ;
2021-03-02 12:59:19 +01:00
switch ( avconfig . format )
2020-08-11 19:25:04 +02:00
{
default :
2021-03-02 12:59:19 +01:00
rsx_log . error ( " Unhandled video output format 0x%x " , avconfig . format ) ;
2020-08-11 19:25:04 +02:00
[[fallthrough]] ;
case CELL_VIDEO_OUT_BUFFER_COLOR_FORMAT_X8R8G8B8 :
format = VK_FORMAT_B8G8R8A8_UNORM ;
break ;
case CELL_VIDEO_OUT_BUFFER_COLOR_FORMAT_X8B8G8R8 :
format = VK_FORMAT_R8G8B8A8_UNORM ;
break ;
}
2020-01-17 21:17:14 +01:00
m_texture_cache . invalidate_range ( * m_current_command_buffer , range , rsx : : invalidation_cause : : read ) ;
2020-08-11 19:25:04 +02:00
image_to_flip = m_texture_cache . upload_image_simple ( * m_current_command_buffer , format , info - > address , info - > width , info - > height , info - > pitch ) ;
2020-01-17 21:17:14 +01:00
}
return image_to_flip ;
2020-01-17 17:24:33 +01:00
}
void VKGSRender : : flip ( const rsx : : display_flip_info_t & info )
{
// Check swapchain condition/status
if ( ! m_swapchain - > supports_automatic_wm_reports ( ) )
{
2020-02-19 18:03:59 +01:00
if ( m_swapchain_dims . width ! = m_frame - > client_width ( ) + 0u | |
m_swapchain_dims . height ! = m_frame - > client_height ( ) + 0u )
2020-01-17 17:24:33 +01:00
{
swapchain_unavailable = true ;
}
}
if ( swapchain_unavailable | | should_reinitialize_swapchain )
{
reinitialize_swapchain ( ) ;
}
m_profiler . start ( ) ;
if ( m_current_frame = = & m_aux_frame_context )
{
m_current_frame = & frame_context_storage [ m_current_queue_index ] ;
if ( m_current_frame - > swap_command_buffer )
{
// Its possible this flip request is triggered by overlays and the flip queue is in undefined state
2022-02-19 22:05:36 +01:00
frame_context_cleanup ( m_current_frame ) ;
2020-01-17 17:24:33 +01:00
}
// Swap aux storage and current frame; aux storage should always be ready for use at all times
m_current_frame - > swap_storage ( m_aux_frame_context ) ;
m_current_frame - > grab_resources ( m_aux_frame_context ) ;
}
else if ( m_current_frame - > swap_command_buffer )
{
if ( info . stats . draw_calls > 0 )
{
// This can be 'legal' if the window was being resized and no polling happened because of swapchain_unavailable flag
2020-02-01 09:07:25 +01:00
rsx_log . error ( " Possible data corruption on frame context storage detected " ) ;
2020-01-17 17:24:33 +01:00
}
// There were no draws and back-to-back flips happened
2022-02-19 22:05:36 +01:00
frame_context_cleanup ( m_current_frame ) ;
2020-01-17 17:24:33 +01:00
}
if ( info . skip_frame | | swapchain_unavailable )
{
if ( ! info . skip_frame )
{
2020-12-09 08:47:45 +01:00
ensure ( swapchain_unavailable ) ;
2020-01-17 17:24:33 +01:00
// Perform a mini-flip here without invoking present code
m_current_frame - > swap_command_buffer = m_current_command_buffer ;
flush_command_queue ( true ) ;
vk : : advance_frame_counter ( ) ;
2022-02-19 22:05:36 +01:00
frame_context_cleanup ( m_current_frame ) ;
2020-01-17 17:24:33 +01:00
}
m_frame - > flip ( m_context ) ;
rsx : : thread : : flip ( info ) ;
return ;
}
u32 buffer_width = display_buffers [ info . buffer ] . width ;
u32 buffer_height = display_buffers [ info . buffer ] . height ;
u32 buffer_pitch = display_buffers [ info . buffer ] . pitch ;
u32 av_format ;
2021-11-10 22:18:46 +01:00
const auto & avconfig = g_fxo - > get < rsx : : avconf > ( ) ;
2020-01-17 17:24:33 +01:00
2021-12-02 14:02:08 +01:00
if ( ! buffer_width )
{
buffer_width = avconfig . resolution_x ;
buffer_height = avconfig . resolution_y ;
}
2021-03-02 12:59:19 +01:00
if ( avconfig . state )
2020-01-17 17:24:33 +01:00
{
2021-03-02 12:59:19 +01:00
av_format = avconfig . get_compatible_gcm_format ( ) ;
2020-01-17 17:24:33 +01:00
if ( ! buffer_pitch )
2021-03-02 12:59:19 +01:00
buffer_pitch = buffer_width * avconfig . get_bpp ( ) ;
2020-01-17 17:24:33 +01:00
2023-04-07 20:08:07 +02:00
const u32 video_frame_height = ( avconfig . stereo_mode = = stereo_render_mode_options : : disabled ? avconfig . resolution_y : ( ( avconfig . resolution_y - 30 ) / 2 ) ) ;
2021-03-02 12:59:19 +01:00
buffer_width = std : : min ( buffer_width , avconfig . resolution_x ) ;
2020-01-07 17:07:09 +01:00
buffer_height = std : : min ( buffer_height , video_frame_height ) ;
2020-01-17 17:24:33 +01:00
}
else
{
av_format = CELL_GCM_TEXTURE_A8R8G8B8 ;
if ( ! buffer_pitch )
buffer_pitch = buffer_width * 4 ;
}
2020-01-17 21:17:14 +01:00
// Scan memory for required data. This is done early to optimize waiting for the driver image acquire below.
2021-08-04 02:04:48 +02:00
vk : : viewable_image * image_to_flip = nullptr , * image_to_flip2 = nullptr ;
2020-01-17 20:44:59 +01:00
if ( info . buffer < display_buffers_count & & buffer_width & & buffer_height )
2020-01-17 17:24:33 +01:00
{
2020-01-17 21:17:14 +01:00
vk : : present_surface_info present_info ;
present_info . width = buffer_width ;
present_info . height = buffer_height ;
present_info . pitch = buffer_pitch ;
present_info . format = av_format ;
2020-12-09 16:04:52 +01:00
present_info . address = rsx : : get_address ( display_buffers [ info . buffer ] . offset , CELL_GCM_LOCATION_LOCAL ) ;
2020-01-17 21:17:14 +01:00
image_to_flip = get_present_source ( & present_info , avconfig ) ;
2020-01-07 19:29:37 +01:00
2023-04-07 20:08:07 +02:00
if ( avconfig . stereo_mode ! = stereo_render_mode_options : : disabled ) [[unlikely]]
2020-01-07 19:29:37 +01:00
{
2020-11-17 21:56:33 +01:00
const auto [ unused , min_expected_height ] = rsx : : apply_resolution_scale < true > ( RSX_SURFACE_DIMENSION_IGNORED , buffer_height + 30 ) ;
2020-01-07 19:29:37 +01:00
if ( image_to_flip - > height ( ) < min_expected_height )
{
// Get image for second eye
const u32 image_offset = ( buffer_height + 30 ) * buffer_pitch + display_buffers [ info . buffer ] . offset ;
present_info . width = buffer_width ;
present_info . height = buffer_height ;
2020-12-09 16:04:52 +01:00
present_info . address = rsx : : get_address ( image_offset , CELL_GCM_LOCATION_LOCAL ) ;
2020-01-07 19:29:37 +01:00
image_to_flip2 = get_present_source ( & present_info , avconfig ) ;
}
else
{
// Account for possible insets
2020-11-17 21:56:33 +01:00
const auto [ unused2 , scaled_buffer_height ] = rsx : : apply_resolution_scale < true > ( RSX_SURFACE_DIMENSION_IGNORED , buffer_height ) ;
buffer_height = std : : min < u32 > ( image_to_flip - > height ( ) - min_expected_height , scaled_buffer_height ) ;
2020-01-07 19:29:37 +01:00
}
}
2020-01-17 21:17:14 +01:00
buffer_width = present_info . width ;
buffer_height = present_info . height ;
2020-01-17 17:24:33 +01:00
}
2022-09-14 14:03:51 +02:00
if ( info . emu_flip )
{
evaluate_cpu_usage_reduction_limits ( ) ;
}
2020-01-17 20:44:59 +01:00
// Prepare surface for new frame. Set no timeout here so that we wait for the next image if need be
2021-05-22 20:46:10 +02:00
ensure ( m_current_frame - > present_image = = umax ) ;
2020-12-09 08:47:45 +01:00
ensure ( m_current_frame - > swap_command_buffer = = nullptr ) ;
2020-01-17 17:24:33 +01:00
u64 timeout = m_swapchain - > get_swap_image_count ( ) < = VK_MAX_ASYNC_FRAMES ? 0ull : 100000000ull ;
while ( VkResult status = m_swapchain - > acquire_next_swapchain_image ( m_current_frame - > acquire_signal_semaphore , timeout , & m_current_frame - > present_image ) )
{
switch ( status )
{
case VK_TIMEOUT :
case VK_NOT_READY :
{
2020-01-17 20:44:59 +01:00
// In some cases, after a fullscreen switch, the driver only allows N-1 images to be acquirable, where N = number of available swap images.
// This means that any acquired images have to be released
// before acquireNextImage can return successfully. This is despite the driver reporting 2 swap chain images available
// This makes fullscreen performance slower than windowed performance as throughput is lowered due to losing one presentable image
// Found on AMD Crimson 17.7.2
2020-01-17 17:24:33 +01:00
2020-01-17 20:44:59 +01:00
// Whatever returned from status, this is now a spin
2020-01-17 17:24:33 +01:00
timeout = 0ull ;
check_present_status ( ) ;
continue ;
}
case VK_SUBOPTIMAL_KHR :
should_reinitialize_swapchain = true ;
break ;
case VK_ERROR_OUT_OF_DATE_KHR :
2020-02-01 09:07:25 +01:00
rsx_log . warning ( " vkAcquireNextImageKHR failed with VK_ERROR_OUT_OF_DATE_KHR. Flip request ignored until surface is recreated. " ) ;
2020-01-17 17:24:33 +01:00
swapchain_unavailable = true ;
reinitialize_swapchain ( ) ;
continue ;
default :
2020-12-09 16:04:52 +01:00
vk : : die_with_error ( status ) ;
2020-01-17 17:24:33 +01:00
}
2020-02-09 11:01:14 +01:00
if ( should_reinitialize_swapchain )
{
// Image is valid, new swapchain will be generated later
break ;
}
2020-01-17 17:24:33 +01:00
}
2020-01-17 20:44:59 +01:00
// Confirm that the driver did not silently fail
2021-05-22 20:46:10 +02:00
ensure ( m_current_frame - > present_image ! = umax ) ;
2020-01-17 17:24:33 +01:00
2020-01-17 21:17:14 +01:00
// Calculate output dimensions. Done after swapchain acquisition in case it was recreated.
2021-11-09 21:06:52 +01:00
areai aspect_ratio ;
2020-01-17 21:17:14 +01:00
if ( ! g_cfg . video . stretch_to_display_area )
{
2021-11-09 21:06:52 +01:00
const auto converted = avconfig . aspect_convert_region ( { buffer_width , buffer_height } , m_swapchain_dims ) ;
aspect_ratio = static_cast < areai > ( converted ) ;
}
else
{
aspect_ratio = { 0 , 0 , s32 ( m_swapchain_dims . width ) , s32 ( m_swapchain_dims . height ) } ;
2020-01-17 21:17:14 +01:00
}
2020-01-17 17:24:33 +01:00
2020-01-17 20:44:59 +01:00
// Blit contents to screen..
2020-01-17 17:24:33 +01:00
VkImage target_image = m_swapchain - > get_image ( m_current_frame - > present_image ) ;
const auto present_layout = m_swapchain - > get_optimal_present_layout ( ) ;
const VkImageSubresourceRange subresource_range = { VK_IMAGE_ASPECT_COLOR_BIT , 0 , 1 , 0 , 1 } ;
VkImageLayout target_layout = present_layout ;
VkRenderPass single_target_pass = VK_NULL_HANDLE ;
vk : : framebuffer_holder * direct_fbo = nullptr ;
2020-01-07 19:29:37 +01:00
rsx : : simple_array < vk : : viewable_image * > calibration_src ;
2020-01-17 17:24:33 +01:00
2021-11-09 21:06:52 +01:00
if ( ! image_to_flip | | aspect_ratio . x1 | | aspect_ratio . y1 )
2020-01-17 17:24:33 +01:00
{
2020-01-18 15:30:39 +01:00
// Clear the window background to black
VkClearColorValue clear_black { } ;
vk : : change_image_layout ( * m_current_command_buffer , target_image , present_layout , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , subresource_range ) ;
vkCmdClearColorImage ( * m_current_command_buffer , target_image , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , & clear_black , 1 , & subresource_range ) ;
2020-01-17 17:24:33 +01:00
2020-01-18 15:30:39 +01:00
target_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ;
}
2020-01-17 17:24:33 +01:00
2023-01-15 23:13:25 +01:00
const output_scaling_mode output_scaling = g_cfg . video . output_scaling . get ( ) ;
2021-11-05 00:02:33 +01:00
2023-01-15 23:13:25 +01:00
if ( ! m_upscaler | | m_output_scaling ! = output_scaling )
2021-08-04 02:04:48 +02:00
{
2023-01-15 23:13:25 +01:00
m_output_scaling = output_scaling ;
2021-11-05 00:02:33 +01:00
2023-01-15 23:13:25 +01:00
if ( m_output_scaling = = output_scaling_mode : : nearest )
{
m_upscaler = std : : make_unique < vk : : nearest_upscale_pass > ( ) ;
}
else if ( m_output_scaling = = output_scaling_mode : : fsr )
2021-08-04 02:04:48 +02:00
{
m_upscaler = std : : make_unique < vk : : fsr_upscale_pass > ( ) ;
}
else
{
m_upscaler = std : : make_unique < vk : : bilinear_upscale_pass > ( ) ;
}
}
2020-01-18 15:30:39 +01:00
if ( image_to_flip )
{
2020-04-14 19:13:52 +02:00
const bool use_full_rgb_range_output = g_cfg . video . full_rgb_range_output . get ( ) ;
2023-04-07 20:08:07 +02:00
if ( ! use_full_rgb_range_output | | ! rsx : : fcmp ( avconfig . gamma , 1.f ) | | avconfig . stereo_mode ! = stereo_render_mode_options : : disabled ) [[unlikely]]
2020-01-17 17:24:33 +01:00
{
2021-08-04 02:04:48 +02:00
if ( image_to_flip ) calibration_src . push_back ( image_to_flip ) ;
if ( image_to_flip2 ) calibration_src . push_back ( image_to_flip2 ) ;
2020-01-07 19:29:37 +01:00
2023-04-07 20:08:07 +02:00
if ( m_output_scaling = = output_scaling_mode : : fsr & & avconfig . stereo_mode = = stereo_render_mode_options : : disabled ) // 3D will be implemented later
2020-01-07 19:29:37 +01:00
{
2021-08-04 02:04:48 +02:00
// Run upscaling pass before the rest of the output effects pipeline
// This can be done with all upscalers but we already get bilinear upscaling for free if we just out the filters directly
VkImageBlit request = { } ;
request . srcSubresource = { image_to_flip - > aspect ( ) , 0 , 0 , 1 } ;
request . dstSubresource = { VK_IMAGE_ASPECT_COLOR_BIT , 0 , 0 , 1 } ;
request . srcOffsets [ 0 ] = { 0 , 0 , 0 } ;
request . srcOffsets [ 1 ] = { s32 ( buffer_width ) , s32 ( buffer_height ) , 1 } ;
request . dstOffsets [ 0 ] = { 0 , 0 , 0 } ;
2021-11-09 21:06:52 +01:00
request . dstOffsets [ 1 ] = { aspect_ratio . width ( ) , aspect_ratio . height ( ) , 1 } ;
2021-08-04 02:04:48 +02:00
for ( unsigned i = 0 ; i < calibration_src . size ( ) ; + + i )
{
const rsx : : flags32_t mode = ( i = = 0 ) ? UPSCALE_LEFT_VIEW : UPSCALE_RIGHT_VIEW ;
calibration_src [ i ] = m_upscaler - > scale_output ( * m_current_command_buffer , image_to_flip , VK_NULL_HANDLE , VK_IMAGE_LAYOUT_UNDEFINED , request , mode ) ;
}
2020-01-07 19:29:37 +01:00
}
2020-01-17 17:24:33 +01:00
2021-08-04 02:04:48 +02:00
vk : : change_image_layout ( * m_current_command_buffer , target_image , target_layout , VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL , subresource_range ) ;
target_layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ;
const auto key = vk : : get_renderpass_key ( m_swapchain - > get_surface_format ( ) ) ;
single_target_pass = vk : : get_renderpass ( * m_device , key ) ;
ensure ( single_target_pass ! = VK_NULL_HANDLE ) ;
direct_fbo = vk : : get_framebuffer ( * m_device , m_swapchain_dims . width , m_swapchain_dims . height , VK_FALSE , single_target_pass , m_swapchain - > get_surface_format ( ) , target_image ) ;
direct_fbo - > add_ref ( ) ;
vk : : get_overlay_pass < vk : : video_out_calibration_pass > ( ) - > run (
* m_current_command_buffer , areau ( aspect_ratio ) , direct_fbo , calibration_src ,
2023-04-07 20:08:07 +02:00
avconfig . gamma , ! use_full_rgb_range_output , avconfig . stereo_mode , single_target_pass ) ;
2021-08-04 02:04:48 +02:00
direct_fbo - > release ( ) ;
}
else
2020-01-17 17:24:33 +01:00
{
2020-08-15 23:33:34 +02:00
// Do raw transfer here as there is no image object associated with textures owned by the driver (TODO)
VkImageBlit rgn = { } ;
rgn . srcSubresource = { image_to_flip - > aspect ( ) , 0 , 0 , 1 } ;
rgn . dstSubresource = { VK_IMAGE_ASPECT_COLOR_BIT , 0 , 0 , 1 } ;
rgn . srcOffsets [ 0 ] = { 0 , 0 , 0 } ;
rgn . srcOffsets [ 1 ] = { s32 ( buffer_width ) , s32 ( buffer_height ) , 1 } ;
2021-11-09 21:06:52 +01:00
rgn . dstOffsets [ 0 ] = { aspect_ratio . x1 , aspect_ratio . y1 , 0 } ;
rgn . dstOffsets [ 1 ] = { aspect_ratio . x2 , aspect_ratio . y2 , 1 } ;
2020-08-15 23:33:34 +02:00
if ( target_layout ! = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL )
{
vk : : change_image_layout ( * m_current_command_buffer , target_image , target_layout , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , subresource_range ) ;
target_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ;
}
2021-08-04 02:04:48 +02:00
m_upscaler - > scale_output ( * m_current_command_buffer , image_to_flip , target_image , target_layout , rgn , UPSCALE_AND_COMMIT | UPSCALE_DEFAULT_VIEW ) ;
2020-01-17 17:24:33 +01:00
}
2020-01-17 20:44:59 +01:00
2023-01-14 00:07:07 +01:00
if ( g_user_asked_for_screenshot | | ( g_recording_mode ! = recording_mode : : stopped & & m_frame - > can_consume_frame ( ) ) )
2020-01-17 21:17:14 +01:00
{
2020-12-18 08:39:54 +01:00
const usz sshot_size = buffer_height * buffer_width * 4 ;
2020-01-17 21:17:14 +01:00
2021-07-13 23:26:02 +02:00
vk : : buffer sshot_vkbuf ( * m_device , utils : : align ( sshot_size , 0x100000 ) , m_device - > get_memory_mapping ( ) . host_visible_coherent ,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT , VK_BUFFER_USAGE_TRANSFER_DST_BIT , 0 , VMM_ALLOCATION_POOL_UNDEFINED ) ;
2020-01-17 21:17:14 +01:00
VkBufferImageCopy copy_info ;
copy_info . bufferOffset = 0 ;
copy_info . bufferRowLength = 0 ;
copy_info . bufferImageHeight = 0 ;
copy_info . imageSubresource . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
copy_info . imageSubresource . baseArrayLayer = 0 ;
copy_info . imageSubresource . layerCount = 1 ;
copy_info . imageSubresource . mipLevel = 0 ;
copy_info . imageOffset . x = 0 ;
copy_info . imageOffset . y = 0 ;
copy_info . imageOffset . z = 0 ;
copy_info . imageExtent . width = buffer_width ;
copy_info . imageExtent . height = buffer_height ;
copy_info . imageExtent . depth = 1 ;
image_to_flip - > push_layout ( * m_current_command_buffer , VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ) ;
vk : : copy_image_to_buffer ( * m_current_command_buffer , image_to_flip , & sshot_vkbuf , copy_info ) ;
image_to_flip - > pop_layout ( * m_current_command_buffer ) ;
flush_command_queue ( true ) ;
auto src = sshot_vkbuf . map ( 0 , sshot_size ) ;
std : : vector < u8 > sshot_frame ( sshot_size ) ;
memcpy ( sshot_frame . data ( ) , src , sshot_size ) ;
sshot_vkbuf . unmap ( ) ;
2020-08-11 19:35:50 +02:00
const bool is_bgra = image_to_flip - > format ( ) = = VK_FORMAT_B8G8R8A8_UNORM ;
2022-11-05 11:05:36 +01:00
2023-01-14 00:07:07 +01:00
if ( g_user_asked_for_screenshot . exchange ( false ) )
2022-11-05 11:05:36 +01:00
{
m_frame - > take_screenshot ( std : : move ( sshot_frame ) , buffer_width , buffer_height , is_bgra ) ;
}
else
{
m_frame - > present_frame ( sshot_frame , buffer_width , buffer_height , is_bgra ) ;
}
2020-01-17 21:17:14 +01:00
}
2020-01-17 17:24:33 +01:00
}
const bool has_overlay = ( m_overlay_manager & & m_overlay_manager - > has_visible ( ) ) ;
if ( g_cfg . video . overlay | | has_overlay )
{
if ( target_layout ! = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL )
{
// Change the image layout whilst setting up a dependency on waiting for the blit op to finish before we start writing
VkImageMemoryBarrier barrier = { } ;
barrier . sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER ;
barrier . newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ;
barrier . oldLayout = target_layout ;
barrier . image = target_image ;
barrier . srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT ;
barrier . dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT ;
barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
barrier . subresourceRange = subresource_range ;
vkCmdPipelineBarrier ( * m_current_command_buffer , VK_PIPELINE_STAGE_TRANSFER_BIT , VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT , VK_DEPENDENCY_BY_REGION_BIT , 0 , nullptr , 0 , nullptr , 1 , & barrier ) ;
target_layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ;
}
if ( ! direct_fbo )
{
const auto key = vk : : get_renderpass_key ( m_swapchain - > get_surface_format ( ) ) ;
single_target_pass = vk : : get_renderpass ( * m_device , key ) ;
2020-12-09 08:47:45 +01:00
ensure ( single_target_pass ! = VK_NULL_HANDLE ) ;
2020-01-17 17:24:33 +01:00
2021-05-31 21:59:39 +02:00
direct_fbo = vk : : get_framebuffer ( * m_device , m_swapchain_dims . width , m_swapchain_dims . height , VK_FALSE , single_target_pass , m_swapchain - > get_surface_format ( ) , target_image ) ;
2020-01-17 17:24:33 +01:00
}
direct_fbo - > add_ref ( ) ;
if ( has_overlay )
{
// Lock to avoid modification during run-update chain
2020-05-20 21:51:15 +02:00
auto ui_renderer = vk : : get_overlay_pass < vk : : ui_overlay_renderer > ( ) ;
2020-01-17 17:24:33 +01:00
std : : lock_guard lock ( * m_overlay_manager ) ;
for ( const auto & view : m_overlay_manager - > get_views ( ) )
{
2020-05-20 21:51:15 +02:00
ui_renderer - > run ( * m_current_command_buffer , areau ( aspect_ratio ) , direct_fbo , single_target_pass , m_texture_upload_buffer_ring_info , * view . get ( ) ) ;
2020-01-17 17:24:33 +01:00
}
}
if ( g_cfg . video . overlay )
{
2022-05-14 16:43:43 +02:00
// TODO: Move this to native overlay! It is both faster and easier to manage
2020-12-14 18:26:20 +01:00
if ( ! m_text_writer )
{
auto key = vk : : get_renderpass_key ( m_swapchain - > get_surface_format ( ) ) ;
m_text_writer = std : : make_unique < vk : : text_writer > ( ) ;
m_text_writer - > init ( * m_device , vk : : get_renderpass ( * m_device , key ) ) ;
}
2021-05-26 00:16:24 +02:00
m_text_writer - > set_scale ( m_frame - > client_device_pixel_ratio ( ) ) ;
2023-01-10 23:00:17 +01:00
int y_loc = 0 ;
const auto println = [ & ] ( const std : : string & text )
{
m_text_writer - > print_text ( * m_current_command_buffer , * direct_fbo , 4 , y_loc , direct_fbo - > width ( ) , direct_fbo - > height ( ) , text ) ;
y_loc + = 16 ;
} ;
println ( fmt : : format ( " RSX Load: %3d%% " , get_load ( ) ) ) ;
println ( fmt : : format ( " draw calls: %17d " , info . stats . draw_calls ) ) ;
println ( fmt : : format ( " submits: %20d " , info . stats . submit_count ) ) ;
println ( fmt : : format ( " draw call setup: %12dus " , info . stats . setup_time ) ) ;
println ( fmt : : format ( " vertex upload time: %9dus " , info . stats . vertex_upload_time ) ) ;
println ( fmt : : format ( " texture upload time: %8dus " , info . stats . textures_upload_time ) ) ;
println ( fmt : : format ( " draw call execution: %8dus " , info . stats . draw_exec_time ) ) ;
println ( fmt : : format ( " submit and flip: %12dus " , info . stats . flip_time ) ) ;
2020-01-17 17:24:33 +01:00
const auto num_dirty_textures = m_texture_cache . get_unreleased_textures_count ( ) ;
const auto texture_memory_size = m_texture_cache . get_texture_memory_in_use ( ) / ( 1024 * 1024 ) ;
const auto tmp_texture_memory_size = m_texture_cache . get_temporary_memory_in_use ( ) / ( 1024 * 1024 ) ;
const auto num_flushes = m_texture_cache . get_num_flush_requests ( ) ;
const auto num_mispredict = m_texture_cache . get_num_cache_mispredictions ( ) ;
const auto num_speculate = m_texture_cache . get_num_cache_speculative_writes ( ) ;
const auto num_misses = m_texture_cache . get_num_cache_misses ( ) ;
const auto num_unavoidable = m_texture_cache . get_num_unavoidable_hard_faults ( ) ;
const auto cache_miss_ratio = static_cast < u32 > ( ceil ( m_texture_cache . get_cache_miss_ratio ( ) * 100 ) ) ;
2020-12-14 19:29:07 +01:00
const auto num_texture_upload = m_texture_cache . get_texture_upload_calls_this_frame ( ) ;
const auto num_texture_upload_miss = m_texture_cache . get_texture_upload_misses_this_frame ( ) ;
const auto texture_upload_miss_ratio = m_texture_cache . get_texture_upload_miss_percentage ( ) ;
2023-07-03 20:29:01 +02:00
const auto texture_copies_ellided = m_texture_cache . get_texture_copies_ellided_this_frame ( ) ;
2023-06-07 02:06:27 +02:00
2023-01-10 23:00:17 +01:00
println ( fmt : : format ( " Unreleased textures: %8d " , num_dirty_textures ) ) ;
println ( fmt : : format ( " Texture cache memory: %7dM " , texture_memory_size ) ) ;
println ( fmt : : format ( " Temporary texture memory: %3dM " , tmp_texture_memory_size ) ) ;
println ( fmt : : format ( " Flush requests: %13d = %2d (%3d%%) hard faults, %2d unavoidable, %2d misprediction(s), %2d speculation(s) " , num_flushes , num_misses , cache_miss_ratio , num_unavoidable , num_mispredict , num_speculate ) ) ;
2023-07-03 20:29:01 +02:00
println ( fmt : : format ( " Texture uploads: %12u (%u from CPU - %02u%%, %u copies avoided) " , num_texture_upload , num_texture_upload_miss , texture_upload_miss_ratio , texture_copies_ellided ) ) ;
2023-06-07 02:06:27 +02:00
const auto vertex_cache_hit_count = ( info . stats . vertex_cache_request_count - info . stats . vertex_cache_miss_count ) ;
const auto vertex_cache_hit_ratio = info . stats . vertex_cache_request_count
? ( vertex_cache_hit_count * 100 ) / info . stats . vertex_cache_request_count
: 0 ;
2023-07-03 20:29:01 +02:00
println ( fmt : : format ( " Vertex cache hits: %10u/%u (%u%%) " , vertex_cache_hit_count , info . stats . vertex_cache_request_count , vertex_cache_hit_ratio ) ) ;
2020-01-17 17:24:33 +01:00
}
direct_fbo - > release ( ) ;
}
if ( target_layout ! = present_layout )
{
vk : : change_image_layout ( * m_current_command_buffer , target_image , target_layout , present_layout , subresource_range ) ;
}
queue_swap_request ( ) ;
m_frame_stats . flip_time = m_profiler . duration ( ) ;
m_frame - > flip ( m_context ) ;
rsx : : thread : : flip ( info ) ;
2020-01-17 21:17:14 +01:00
}