2018-09-18 21:50:40 +02:00
# include " stdafx.h "
2020-02-25 22:42:29 +01:00
# include "../Overlays/overlay_shader_compile_notification.h"
2020-01-03 12:04:18 +01:00
# include "../Overlays/Shaders/shader_loading_dialog_native.h"
2020-03-22 11:20:31 +01:00
# include "VKGSRender.h"
2017-06-15 01:33:50 +02:00
# include "VKCommonDecompiler.h"
2019-05-24 15:31:46 +02:00
# include "VKRenderPass.h"
2019-06-28 19:33:03 +02:00
# include "VKResourceManager.h"
2019-12-07 14:28:35 +01:00
# include "VKCommandStream.h"
2016-02-16 20:23:17 +01:00
2016-02-21 16:50:49 +01:00
namespace
2016-02-16 20:23:17 +01:00
{
2016-02-21 16:50:49 +01:00
u32 get_max_depth_value ( rsx : : surface_depth_format format )
{
switch ( format )
{
case rsx : : surface_depth_format : : z16 : return 0xFFFF ;
case rsx : : surface_depth_format : : z24s8 : return 0xFFFFFF ;
2019-05-11 07:36:16 +02:00
default :
ASSUME ( 0 ) ;
break ;
2016-02-21 16:50:49 +01:00
}
2016-08-08 18:01:06 +02:00
fmt : : throw_exception ( " Unknown depth format " HERE ) ;
2016-02-21 16:50:49 +01:00
}
2016-02-16 20:23:17 +01:00
2016-02-21 16:50:49 +01:00
u8 get_pixel_size ( rsx : : surface_depth_format format )
{
switch ( format )
{
case rsx : : surface_depth_format : : z16 : return 2 ;
case rsx : : surface_depth_format : : z24s8 : return 4 ;
2019-05-11 07:36:16 +02:00
default :
ASSUME ( 0 ) ;
break ;
2016-02-21 16:50:49 +01:00
}
2016-08-08 18:01:06 +02:00
fmt : : throw_exception ( " Unknown depth format " HERE ) ;
2016-02-21 16:50:49 +01:00
}
2016-02-16 20:23:17 +01:00
}
2016-02-21 16:50:49 +01:00
namespace vk
{
2020-03-22 11:20:31 +01:00
VkCompareOp get_compare_func ( rsx : : comparison_function op , bool reverse_direction = false ) ;
2016-02-21 16:50:49 +01:00
2016-05-30 16:28:16 +02:00
std : : pair < VkFormat , VkComponentMapping > get_compatible_surface_format ( rsx : : surface_color_format color_format )
2016-02-21 16:50:49 +01:00
{
2018-03-23 16:05:56 +01:00
const VkComponentMapping abgr = { VK_COMPONENT_SWIZZLE_B , VK_COMPONENT_SWIZZLE_G , VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_A } ;
const VkComponentMapping o_rgb = { VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_G , VK_COMPONENT_SWIZZLE_B , VK_COMPONENT_SWIZZLE_ONE } ;
const VkComponentMapping z_rgb = { VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_G , VK_COMPONENT_SWIZZLE_B , VK_COMPONENT_SWIZZLE_ZERO } ;
const VkComponentMapping o_bgr = { VK_COMPONENT_SWIZZLE_B , VK_COMPONENT_SWIZZLE_G , VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_ONE } ;
const VkComponentMapping z_bgr = { VK_COMPONENT_SWIZZLE_B , VK_COMPONENT_SWIZZLE_G , VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_ZERO } ;
2016-02-21 16:50:49 +01:00
switch ( color_format )
{
case rsx : : surface_color_format : : r5g6b5 :
2016-05-30 16:28:16 +02:00
return std : : make_pair ( VK_FORMAT_R5G6B5_UNORM_PACK16 , vk : : default_component_map ( ) ) ;
2016-02-21 16:50:49 +01:00
case rsx : : surface_color_format : : a8r8g8b8 :
2016-05-30 16:28:16 +02:00
return std : : make_pair ( VK_FORMAT_B8G8R8A8_UNORM , vk : : default_component_map ( ) ) ;
2016-02-21 16:50:49 +01:00
2018-03-23 16:05:56 +01:00
case rsx : : surface_color_format : : a8b8g8r8 :
return std : : make_pair ( VK_FORMAT_B8G8R8A8_UNORM , abgr ) ;
2016-05-30 16:28:16 +02:00
case rsx : : surface_color_format : : x8b8g8r8_o8b8g8r8 :
2018-03-23 16:05:56 +01:00
return std : : make_pair ( VK_FORMAT_B8G8R8A8_UNORM , o_bgr ) ;
2016-05-30 16:28:16 +02:00
case rsx : : surface_color_format : : x8b8g8r8_z8b8g8r8 :
2018-03-23 16:05:56 +01:00
return std : : make_pair ( VK_FORMAT_B8G8R8A8_UNORM , z_bgr ) ;
2016-05-30 16:28:16 +02:00
case rsx : : surface_color_format : : x8r8g8b8_z8r8g8b8 :
2018-03-23 16:05:56 +01:00
return std : : make_pair ( VK_FORMAT_B8G8R8A8_UNORM , z_rgb ) ;
2016-02-21 16:50:49 +01:00
case rsx : : surface_color_format : : x8r8g8b8_o8r8g8b8 :
2018-03-23 16:05:56 +01:00
return std : : make_pair ( VK_FORMAT_B8G8R8A8_UNORM , o_rgb ) ;
2016-02-21 16:50:49 +01:00
case rsx : : surface_color_format : : w16z16y16x16 :
2016-05-30 16:28:16 +02:00
return std : : make_pair ( VK_FORMAT_R16G16B16A16_SFLOAT , vk : : default_component_map ( ) ) ;
2016-02-21 16:50:49 +01:00
case rsx : : surface_color_format : : w32z32y32x32 :
2016-05-30 16:28:16 +02:00
return std : : make_pair ( VK_FORMAT_R32G32B32A32_SFLOAT , vk : : default_component_map ( ) ) ;
2016-02-21 16:50:49 +01:00
case rsx : : surface_color_format : : x1r5g5b5_o1r5g5b5 :
2019-05-30 17:38:18 +02:00
return std : : make_pair ( VK_FORMAT_A1R5G5B5_UNORM_PACK16 , o_rgb ) ;
2018-03-23 16:05:56 +01:00
2016-02-21 16:50:49 +01:00
case rsx : : surface_color_format : : x1r5g5b5_z1r5g5b5 :
2019-05-30 17:38:18 +02:00
return std : : make_pair ( VK_FORMAT_A1R5G5B5_UNORM_PACK16 , z_rgb ) ;
2016-05-30 16:28:16 +02:00
case rsx : : surface_color_format : : b8 :
2018-01-29 19:58:25 +01:00
{
2020-03-08 08:02:28 +01:00
const VkComponentMapping no_alpha = { VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_ONE } ;
2018-01-29 19:58:25 +01:00
return std : : make_pair ( VK_FORMAT_R8_UNORM , no_alpha ) ;
}
2018-09-03 21:28:33 +02:00
2016-02-21 16:50:49 +01:00
case rsx : : surface_color_format : : g8b8 :
2018-04-01 15:41:57 +02:00
{
2020-03-08 08:02:28 +01:00
const VkComponentMapping gb_rg = { VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_G , VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_G } ;
2018-04-01 15:41:57 +02:00
return std : : make_pair ( VK_FORMAT_R8G8_UNORM , gb_rg ) ;
}
2016-05-30 16:28:16 +02:00
2016-02-21 16:50:49 +01:00
case rsx : : surface_color_format : : x32 :
2020-03-08 08:02:28 +01:00
{
const VkComponentMapping rrrr = { VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_R } ;
return std : : make_pair ( VK_FORMAT_R32_SFLOAT , rrrr ) ;
}
2016-05-30 16:28:16 +02:00
2016-02-21 16:50:49 +01:00
default :
2020-02-01 09:07:25 +01:00
rsx_log . error ( " Surface color buffer: Unsupported surface color format (0x%x) " , static_cast < u32 > ( color_format ) ) ;
2016-05-30 16:28:16 +02:00
return std : : make_pair ( VK_FORMAT_B8G8R8A8_UNORM , vk : : default_component_map ( ) ) ;
2016-02-21 16:50:49 +01:00
}
}
2016-06-26 23:37:02 +02:00
VkLogicOp get_logic_op ( rsx : : logic_op op )
2016-06-13 13:10:59 +02:00
{
switch ( op )
{
2016-06-26 23:37:02 +02:00
case rsx : : logic_op : : logic_clear : return VK_LOGIC_OP_CLEAR ;
case rsx : : logic_op : : logic_and : return VK_LOGIC_OP_AND ;
case rsx : : logic_op : : logic_and_reverse : return VK_LOGIC_OP_AND_REVERSE ;
case rsx : : logic_op : : logic_copy : return VK_LOGIC_OP_COPY ;
case rsx : : logic_op : : logic_and_inverted : return VK_LOGIC_OP_AND_INVERTED ;
case rsx : : logic_op : : logic_noop : return VK_LOGIC_OP_NO_OP ;
case rsx : : logic_op : : logic_xor : return VK_LOGIC_OP_XOR ;
case rsx : : logic_op : : logic_or : return VK_LOGIC_OP_OR ;
case rsx : : logic_op : : logic_nor : return VK_LOGIC_OP_NOR ;
case rsx : : logic_op : : logic_equiv : return VK_LOGIC_OP_EQUIVALENT ;
case rsx : : logic_op : : logic_invert : return VK_LOGIC_OP_INVERT ;
case rsx : : logic_op : : logic_or_reverse : return VK_LOGIC_OP_OR_REVERSE ;
case rsx : : logic_op : : logic_copy_inverted : return VK_LOGIC_OP_COPY_INVERTED ;
case rsx : : logic_op : : logic_or_inverted : return VK_LOGIC_OP_OR_INVERTED ;
case rsx : : logic_op : : logic_nand : return VK_LOGIC_OP_NAND ;
2017-01-27 23:52:43 +01:00
case rsx : : logic_op : : logic_set : return VK_LOGIC_OP_SET ;
2016-06-13 13:10:59 +02:00
default :
2019-12-03 23:34:23 +01:00
fmt : : throw_exception ( " Unknown logic op 0x%x " HERE , static_cast < u32 > ( op ) ) ;
2016-06-13 13:10:59 +02:00
}
}
2016-06-26 23:37:02 +02:00
VkBlendFactor get_blend_factor ( rsx : : blend_factor factor )
2016-02-21 16:50:49 +01:00
{
switch ( factor )
{
2016-06-26 23:37:02 +02:00
case rsx : : blend_factor : : one : return VK_BLEND_FACTOR_ONE ;
case rsx : : blend_factor : : zero : return VK_BLEND_FACTOR_ZERO ;
case rsx : : blend_factor : : src_alpha : return VK_BLEND_FACTOR_SRC_ALPHA ;
case rsx : : blend_factor : : dst_alpha : return VK_BLEND_FACTOR_DST_ALPHA ;
case rsx : : blend_factor : : src_color : return VK_BLEND_FACTOR_SRC_COLOR ;
case rsx : : blend_factor : : dst_color : return VK_BLEND_FACTOR_DST_COLOR ;
case rsx : : blend_factor : : constant_color : return VK_BLEND_FACTOR_CONSTANT_COLOR ;
case rsx : : blend_factor : : constant_alpha : return VK_BLEND_FACTOR_CONSTANT_ALPHA ;
case rsx : : blend_factor : : one_minus_src_color : return VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR ;
case rsx : : blend_factor : : one_minus_dst_color : return VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR ;
case rsx : : blend_factor : : one_minus_src_alpha : return VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA ;
case rsx : : blend_factor : : one_minus_dst_alpha : return VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA ;
case rsx : : blend_factor : : one_minus_constant_alpha : return VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA ;
case rsx : : blend_factor : : one_minus_constant_color : return VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR ;
2017-04-22 16:35:36 +02:00
case rsx : : blend_factor : : src_alpha_saturate : return VK_BLEND_FACTOR_SRC_ALPHA_SATURATE ;
2016-02-21 16:50:49 +01:00
default :
2019-12-03 23:34:23 +01:00
fmt : : throw_exception ( " Unknown blend factor 0x%x " HERE , static_cast < u32 > ( factor ) ) ;
2016-02-21 16:50:49 +01:00
}
2019-05-11 10:14:56 +02:00
}
2016-02-21 16:50:49 +01:00
2016-06-26 23:37:02 +02:00
VkBlendOp get_blend_op ( rsx : : blend_equation op )
2016-02-21 16:50:49 +01:00
{
switch ( op )
{
2017-10-31 14:54:55 +01:00
case rsx : : blend_equation : : add_signed :
2020-02-01 09:07:25 +01:00
rsx_log . trace ( " blend equation add_signed used. Emulating using FUNC_ADD " ) ;
2017-06-30 04:06:02 +02:00
case rsx : : blend_equation : : add :
2017-10-31 14:54:55 +01:00
return VK_BLEND_OP_ADD ;
2016-06-26 23:37:02 +02:00
case rsx : : blend_equation : : substract : return VK_BLEND_OP_SUBTRACT ;
2017-10-31 14:54:55 +01:00
case rsx : : blend_equation : : reverse_substract_signed :
2020-02-01 09:07:25 +01:00
rsx_log . trace ( " blend equation reverse_subtract_signed used. Emulating using FUNC_REVERSE_SUBTRACT " ) ;
2016-06-26 23:37:02 +02:00
case rsx : : blend_equation : : reverse_substract : return VK_BLEND_OP_REVERSE_SUBTRACT ;
case rsx : : blend_equation : : min : return VK_BLEND_OP_MIN ;
case rsx : : blend_equation : : max : return VK_BLEND_OP_MAX ;
2016-02-21 16:50:49 +01:00
default :
2019-12-03 23:34:23 +01:00
fmt : : throw_exception ( " Unknown blend op: 0x%x " HERE , static_cast < u32 > ( op ) ) ;
2016-02-21 16:50:49 +01:00
}
}
2018-09-03 21:28:33 +02:00
2016-06-26 23:37:02 +02:00
VkStencilOp get_stencil_op ( rsx : : stencil_op op )
2016-06-13 13:10:59 +02:00
{
switch ( op )
{
2016-06-26 23:37:02 +02:00
case rsx : : stencil_op : : keep : return VK_STENCIL_OP_KEEP ;
case rsx : : stencil_op : : zero : return VK_STENCIL_OP_ZERO ;
case rsx : : stencil_op : : replace : return VK_STENCIL_OP_REPLACE ;
case rsx : : stencil_op : : incr : return VK_STENCIL_OP_INCREMENT_AND_CLAMP ;
case rsx : : stencil_op : : decr : return VK_STENCIL_OP_DECREMENT_AND_CLAMP ;
case rsx : : stencil_op : : invert : return VK_STENCIL_OP_INVERT ;
case rsx : : stencil_op : : incr_wrap : return VK_STENCIL_OP_INCREMENT_AND_WRAP ;
case rsx : : stencil_op : : decr_wrap : return VK_STENCIL_OP_DECREMENT_AND_WRAP ;
2016-06-13 13:10:59 +02:00
default :
2019-12-03 23:34:23 +01:00
fmt : : throw_exception ( " Unknown stencil op: 0x%x " HERE , static_cast < u32 > ( op ) ) ;
2016-06-13 13:10:59 +02:00
}
}
2016-06-26 23:37:02 +02:00
2016-07-17 18:57:50 +02:00
VkFrontFace get_front_face ( rsx : : front_face ffv )
2016-06-19 03:53:49 +02:00
{
switch ( ffv )
2016-06-30 06:46:25 +02:00
{
2016-07-17 21:55:59 +02:00
case rsx : : front_face : : cw : return VK_FRONT_FACE_CLOCKWISE ;
case rsx : : front_face : : ccw : return VK_FRONT_FACE_COUNTER_CLOCKWISE ;
2016-06-26 23:37:02 +02:00
default :
2019-12-03 23:34:23 +01:00
fmt : : throw_exception ( " Unknown front face value: 0x%x " HERE , static_cast < u32 > ( ffv ) ) ;
2016-06-19 03:53:49 +02:00
}
}
2016-07-03 04:35:51 +02:00
2016-09-06 03:43:27 +02:00
VkCullModeFlags get_cull_face ( rsx : : cull_face cfv )
2016-06-26 23:37:02 +02:00
{
switch ( cfv )
{
2016-09-06 03:43:27 +02:00
case rsx : : cull_face : : back : return VK_CULL_MODE_BACK_BIT ;
case rsx : : cull_face : : front : return VK_CULL_MODE_FRONT_BIT ;
case rsx : : cull_face : : front_and_back : return VK_CULL_MODE_FRONT_AND_BACK ;
2018-09-03 21:28:33 +02:00
default :
2019-12-03 23:34:23 +01:00
fmt : : throw_exception ( " Unknown cull face value: 0x%x " HERE , static_cast < u32 > ( cfv ) ) ;
2016-06-26 23:37:02 +02:00
}
2016-07-03 04:35:51 +02:00
}
2016-02-21 16:50:49 +01:00
}
2016-03-17 21:31:34 +01:00
namespace
{
2016-03-17 23:57:53 +01:00
std : : tuple < VkPipelineLayout , VkDescriptorSetLayout > get_shared_pipeline_layout ( VkDevice dev )
{
2020-01-08 17:30:35 +01:00
const auto & binding_table = vk : : get_current_renderer ( ) - > get_pipeline_binding_table ( ) ;
std : : vector < VkDescriptorSetLayoutBinding > bindings ( binding_table . total_descriptor_bindings ) ;
2016-03-17 23:57:53 +01:00
size_t idx = 0 ;
2018-10-20 16:43:00 +02:00
// Vertex stream, one stream for cacheable data, one stream for transient data
2019-06-18 15:38:50 +02:00
for ( int i = 0 ; i < 3 ; i + + )
2016-03-17 23:57:53 +01:00
{
bindings [ idx ] . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER ;
bindings [ idx ] . descriptorCount = 1 ;
bindings [ idx ] . stageFlags = VK_SHADER_STAGE_VERTEX_BIT ;
2020-01-08 17:30:35 +01:00
bindings [ idx ] . binding = binding_table . vertex_buffers_first_bind_slot + i ;
2016-03-17 23:57:53 +01:00
idx + + ;
}
bindings [ idx ] . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ;
bindings [ idx ] . descriptorCount = 1 ;
bindings [ idx ] . stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT ;
2020-01-08 17:30:35 +01:00
bindings [ idx ] . binding = binding_table . fragment_constant_buffers_bind_slot ;
2016-03-17 23:57:53 +01:00
idx + + ;
2018-10-20 16:43:00 +02:00
bindings [ idx ] . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ;
bindings [ idx ] . descriptorCount = 1 ;
bindings [ idx ] . stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT ;
2020-01-08 17:30:35 +01:00
bindings [ idx ] . binding = binding_table . fragment_state_bind_slot ;
2018-10-20 16:43:00 +02:00
idx + + ;
bindings [ idx ] . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ;
bindings [ idx ] . descriptorCount = 1 ;
bindings [ idx ] . stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT ;
2020-01-08 17:30:35 +01:00
bindings [ idx ] . binding = binding_table . fragment_texture_params_bind_slot ;
2018-10-20 16:43:00 +02:00
idx + + ;
2016-03-17 23:57:53 +01:00
bindings [ idx ] . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ;
bindings [ idx ] . descriptorCount = 1 ;
bindings [ idx ] . stageFlags = VK_SHADER_STAGE_VERTEX_BIT ;
2020-01-08 17:30:35 +01:00
bindings [ idx ] . binding = binding_table . vertex_constant_buffers_bind_slot ;
2016-03-17 23:57:53 +01:00
idx + + ;
2018-10-20 16:43:00 +02:00
bindings [ idx ] . descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ;
bindings [ idx ] . descriptorCount = 1 ;
bindings [ idx ] . stageFlags = VK_SHADER_STAGE_ALL_GRAPHICS ;
2020-01-08 17:30:35 +01:00
bindings [ idx ] . binding = binding_table . vertex_params_bind_slot ;
2018-10-20 16:43:00 +02:00
idx + + ;
2019-12-10 07:10:13 +01:00
bindings [ idx ] . descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ;
bindings [ idx ] . descriptorCount = 1 ;
bindings [ idx ] . stageFlags = VK_SHADER_STAGE_VERTEX_BIT ;
2020-01-08 17:30:35 +01:00
bindings [ idx ] . binding = binding_table . conditional_render_predicate_slot ;
2019-12-10 07:10:13 +01:00
idx + + ;
2020-01-08 17:30:35 +01:00
for ( auto binding = binding_table . textures_first_bind_slot ;
binding < binding_table . vertex_textures_first_bind_slot ;
binding + + )
2016-03-17 23:57:53 +01:00
{
bindings [ idx ] . descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
bindings [ idx ] . descriptorCount = 1 ;
bindings [ idx ] . stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT ;
2020-01-08 17:30:35 +01:00
bindings [ idx ] . binding = binding ;
2016-03-17 23:57:53 +01:00
idx + + ;
}
2018-10-20 16:43:00 +02:00
for ( int i = 0 ; i < rsx : : limits : : vertex_textures_count ; i + + )
2016-09-20 16:23:56 +02:00
{
bindings [ idx ] . descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ;
bindings [ idx ] . descriptorCount = 1 ;
bindings [ idx ] . stageFlags = VK_SHADER_STAGE_VERTEX_BIT ;
2020-01-08 17:30:35 +01:00
bindings [ idx ] . binding = binding_table . vertex_textures_first_bind_slot + i ;
2016-09-20 16:23:56 +02:00
idx + + ;
}
2020-01-08 17:30:35 +01:00
verify ( HERE ) , idx = = binding_table . total_descriptor_bindings ;
2016-03-17 23:57:53 +01:00
2019-06-18 15:38:50 +02:00
std : : array < VkPushConstantRange , 1 > push_constants ;
push_constants [ 0 ] . offset = 0 ;
push_constants [ 0 ] . size = 16 ;
push_constants [ 0 ] . stageFlags = VK_SHADER_STAGE_VERTEX_BIT ;
2019-12-10 07:10:13 +01:00
if ( vk : : emulate_conditional_rendering ( ) )
{
// Conditional render toggle
push_constants [ 0 ] . size = 20 ;
}
2016-03-17 23:57:53 +01:00
VkDescriptorSetLayoutCreateInfo infos = { } ;
infos . sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO ;
infos . pBindings = bindings . data ( ) ;
2016-03-30 20:23:40 +02:00
infos . bindingCount = static_cast < uint32_t > ( bindings . size ( ) ) ;
2016-03-17 23:57:53 +01:00
VkDescriptorSetLayout set_layout ;
CHECK_RESULT ( vkCreateDescriptorSetLayout ( dev , & infos , nullptr , & set_layout ) ) ;
VkPipelineLayoutCreateInfo layout_info = { } ;
layout_info . sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO ;
layout_info . setLayoutCount = 1 ;
layout_info . pSetLayouts = & set_layout ;
2019-06-18 15:38:50 +02:00
layout_info . pushConstantRangeCount = 1 ;
layout_info . pPushConstantRanges = push_constants . data ( ) ;
2016-03-17 23:57:53 +01:00
VkPipelineLayout result ;
CHECK_RESULT ( vkCreatePipelineLayout ( dev , & layout_info , nullptr , & result ) ) ;
return std : : make_tuple ( result , set_layout ) ;
}
2016-03-17 21:31:34 +01:00
}
2018-10-11 00:17:19 +02:00
u64 VKGSRender : : get_cycles ( )
{
return thread_ctrl : : get_cycles ( static_cast < named_thread < VKGSRender > & > ( * this ) ) ;
}
2017-05-20 13:45:02 +02:00
VKGSRender : : VKGSRender ( ) : GSRender ( )
2016-02-21 16:50:49 +01:00
{
2019-10-28 21:11:55 +01:00
if ( m_thread_context . createInstance ( " RPCS3 " ) )
2017-05-26 16:10:40 +02:00
{
2019-10-28 21:11:55 +01:00
m_thread_context . makeCurrentInstance ( ) ;
2017-05-26 16:10:40 +02:00
}
else
{
2020-02-01 09:07:25 +01:00
rsx_log . fatal ( " Could not find a vulkan compatible GPU driver. Your GPU(s) may not support Vulkan, or you need to install the vulkan runtime and drivers " ) ;
2017-05-26 16:10:40 +02:00
m_device = VK_NULL_HANDLE ;
return ;
}
2016-02-21 16:50:49 +01:00
2017-12-31 16:33:43 +01:00
std : : vector < vk : : physical_device > & gpus = m_thread_context . enumerateDevices ( ) ;
2016-03-07 09:38:00 +01:00
2017-04-11 11:15:57 +02:00
//Actually confirm that the loader found at least one compatible device
2017-05-26 16:10:40 +02:00
//This should not happen unless something is wrong with the driver setup on the target system
2019-06-01 15:25:33 +02:00
if ( gpus . empty ( ) )
2017-04-11 11:15:57 +02:00
{
//We can't throw in Emulator::Load, so we show error and return
2020-02-01 09:07:25 +01:00
rsx_log . fatal ( " No compatible GPU devices found " ) ;
2017-04-11 11:15:57 +02:00
m_device = VK_NULL_HANDLE ;
return ;
}
2017-05-25 17:46:28 +02:00
bool gpu_found = false ;
std : : string adapter_name = g_cfg . video . vk . adapter ;
2017-12-31 16:33:43 +01:00
display_handle_t display = m_frame - > handle ( ) ;
2019-11-20 16:53:41 +01:00
# ifdef HAVE_X11
2018-08-29 19:20:52 +02:00
std : : visit ( [ this ] ( auto & & p ) {
using T = std : : decay_t < decltype ( p ) > ;
if constexpr ( std : : is_same_v < T , std : : pair < Display * , Window > > )
{
m_display_handle = p . first ; XFlush ( m_display_handle ) ;
}
} , display ) ;
2018-02-17 12:45:08 +01:00
# endif
2017-12-31 16:33:43 +01:00
2017-05-25 17:46:28 +02:00
for ( auto & gpu : gpus )
{
2018-08-29 14:49:19 +02:00
if ( gpu . get_name ( ) = = adapter_name )
2017-05-25 17:46:28 +02:00
{
2018-02-17 12:45:08 +01:00
m_swapchain . reset ( m_thread_context . createSwapChain ( display , gpu ) ) ;
2017-05-25 17:46:28 +02:00
gpu_found = true ;
break ;
}
}
if ( ! gpu_found | | adapter_name . empty ( ) )
{
2018-02-17 12:45:08 +01:00
m_swapchain . reset ( m_thread_context . createSwapChain ( display , gpus [ 0 ] ) ) ;
2017-05-25 17:46:28 +02:00
}
2017-05-29 18:24:09 +02:00
2018-02-17 12:45:08 +01:00
if ( ! m_swapchain )
2017-05-29 18:24:09 +02:00
{
2018-02-17 12:45:08 +01:00
m_device = VK_NULL_HANDLE ;
2020-02-01 09:07:25 +01:00
rsx_log . fatal ( " Could not successfully initialize a swapchain " ) ;
2018-02-17 12:45:08 +01:00
return ;
2017-05-29 18:24:09 +02:00
}
2019-12-03 23:34:23 +01:00
m_device = const_cast < vk : : render_device * > ( & m_swapchain - > get_device ( ) ) ;
2017-12-31 16:33:43 +01:00
2016-02-21 16:50:49 +01:00
vk : : set_current_thread_ctx ( m_thread_context ) ;
2018-02-17 12:45:08 +01:00
vk : : set_current_renderer ( m_swapchain - > get_device ( ) ) ;
2016-02-21 16:50:49 +01:00
2019-06-10 11:29:46 +02:00
m_swapchain_dims . width = m_frame - > client_width ( ) ;
m_swapchain_dims . height = m_frame - > client_height ( ) ;
if ( ! m_swapchain - > init ( m_swapchain_dims . width , m_swapchain_dims . height ) )
{
swapchain_unavailable = true ;
}
2016-02-21 16:50:49 +01:00
//create command buffer...
m_command_buffer_pool . create ( ( * m_device ) ) ;
2017-04-22 20:30:16 +02:00
for ( auto & cb : m_primary_cb_list )
{
cb . create ( m_command_buffer_pool ) ;
cb . init_fence ( * m_device ) ;
}
m_current_command_buffer = & m_primary_cb_list [ 0 ] ;
2018-09-03 21:28:33 +02:00
2017-08-04 23:11:14 +02:00
//Create secondary command_buffer for parallel operations
2017-04-21 21:55:05 +02:00
m_secondary_command_buffer_pool . create ( ( * m_device ) ) ;
2018-08-23 20:56:06 +02:00
m_secondary_command_buffer . create ( m_secondary_command_buffer_pool , true ) ;
2017-10-27 15:52:27 +02:00
m_secondary_command_buffer . access_hint = vk : : command_buffer : : access_type_hint : : all ;
2017-11-16 22:52:21 +01:00
2017-08-27 15:22:59 +02:00
//Precalculated stuff
2016-03-17 23:57:53 +01:00
std : : tie ( pipeline_layout , descriptor_layouts ) = get_shared_pipeline_layout ( * m_device ) ;
2017-11-16 22:52:21 +01:00
//Occlusion
2018-10-01 22:05:51 +02:00
m_occlusion_query_pool . create ( ( * m_device ) , OCCLUSION_MAX_POOL_SIZE ) ;
2019-06-18 23:26:25 +02:00
m_occlusion_map . resize ( occlusion_query_count ) ;
2019-06-28 09:16:14 +02:00
for ( u32 n = 0 ; n < occlusion_query_count ; + + n )
2018-03-05 12:09:43 +01:00
m_occlusion_query_data [ n ] . driver_handle = n ;
2017-11-16 22:52:21 +01:00
2017-08-04 23:11:14 +02:00
//Generate frame contexts
2020-01-17 23:14:05 +01:00
const auto & binding_table = m_device - > get_pipeline_binding_table ( ) ;
const u32 num_fs_samplers = binding_table . vertex_textures_first_bind_slot - binding_table . textures_first_bind_slot ;
2019-12-10 07:10:13 +01:00
std : : vector < VkDescriptorPoolSize > sizes ;
sizes . push_back ( { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER , 6 * DESCRIPTOR_MAX_DRAW_CALLS } ) ;
sizes . push_back ( { VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER , 3 * DESCRIPTOR_MAX_DRAW_CALLS } ) ;
2020-01-17 23:14:05 +01:00
sizes . push_back ( { VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER , ( num_fs_samplers + 4 ) * DESCRIPTOR_MAX_DRAW_CALLS } ) ;
2016-03-17 23:57:53 +01:00
2019-12-10 07:10:13 +01:00
// Conditional rendering predicate slot; refactor to allow skipping this when not needed
sizes . push_back ( { VK_DESCRIPTOR_TYPE_STORAGE_BUFFER , 1 * DESCRIPTOR_MAX_DRAW_CALLS } ) ;
2016-03-17 23:57:53 +01:00
2017-08-04 23:11:14 +02:00
VkSemaphoreCreateInfo semaphore_info = { } ;
semaphore_info . sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO ;
2016-03-17 23:57:53 +01:00
2017-08-27 15:22:59 +02:00
//VRAM allocation
2019-11-10 10:51:37 +01:00
m_attrib_ring_info . create ( VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT , VK_ATTRIB_RING_BUFFER_SIZE_M * 0x100000 , " attrib buffer " , 0x400000 , VK_TRUE ) ;
2018-10-20 16:43:00 +02:00
m_fragment_env_ring_info . create ( VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT , VK_UBO_RING_BUFFER_SIZE_M * 0x100000 , " fragment env buffer " ) ;
m_vertex_env_ring_info . create ( VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT , VK_UBO_RING_BUFFER_SIZE_M * 0x100000 , " vertex env buffer " ) ;
m_fragment_texture_params_ring_info . create ( VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT , VK_UBO_RING_BUFFER_SIZE_M * 0x100000 , " fragment texture params buffer " ) ;
2019-11-10 10:51:37 +01:00
m_vertex_layout_ring_info . create ( VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT , VK_UBO_RING_BUFFER_SIZE_M * 0x100000 , " vertex layout buffer " , 0x10000 , VK_TRUE ) ;
2018-10-20 16:43:00 +02:00
m_fragment_constants_ring_info . create ( VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT , VK_UBO_RING_BUFFER_SIZE_M * 0x100000 , " fragment constants buffer " ) ;
2018-06-14 19:58:28 +02:00
m_transform_constants_ring_info . create ( VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT , VK_TRANSFORM_CONSTANTS_BUFFER_SIZE_M * 0x100000 , " transform constants buffer " ) ;
m_index_buffer_ring_info . create ( VK_BUFFER_USAGE_INDEX_BUFFER_BIT , VK_INDEX_RING_BUFFER_SIZE_M * 0x100000 , " index buffer " ) ;
m_texture_upload_buffer_ring_info . create ( VK_BUFFER_USAGE_TRANSFER_SRC_BIT , VK_TEXTURE_UPLOAD_RING_BUFFER_SIZE_M * 0x100000 , " texture upload buffer " , 32 * 0x100000 ) ;
2017-08-27 15:22:59 +02:00
2018-08-24 09:52:31 +02:00
const auto limits = m_device - > gpu ( ) . get_limits ( ) ;
2018-10-29 11:16:37 +01:00
m_texbuffer_view_size = std : : min ( limits . maxTexelBufferElements , VK_ATTRIB_RING_BUFFER_SIZE_M * 0x100000u ) ;
2018-08-24 09:52:31 +02:00
if ( m_texbuffer_view_size < 0x800000 )
{
// Warn, only possibly expected on macOS
2020-02-01 09:07:25 +01:00
rsx_log . warning ( " Current driver may crash due to memory limitations (%uk) " , m_texbuffer_view_size / 1024 ) ;
2018-08-24 09:52:31 +02:00
}
2017-08-16 11:30:31 +02:00
for ( auto & ctx : frame_context_storage )
2017-08-04 23:11:14 +02:00
{
2019-06-09 11:48:19 +02:00
vkCreateSemaphore ( ( * m_device ) , & semaphore_info , nullptr , & ctx . present_wait_semaphore ) ;
vkCreateSemaphore ( ( * m_device ) , & semaphore_info , nullptr , & ctx . acquire_signal_semaphore ) ;
2019-05-21 19:17:48 +02:00
ctx . descriptor_pool . create ( * m_device , sizes . data ( ) , static_cast < uint32_t > ( sizes . size ( ) ) , DESCRIPTOR_MAX_DRAW_CALLS , 1 ) ;
2017-08-04 23:11:14 +02:00
}
2016-03-16 00:34:36 +01:00
2018-06-14 19:58:28 +02:00
const auto & memory_map = m_device - > get_memory_mapping ( ) ;
2018-08-23 20:56:06 +02:00
null_buffer = std : : make_unique < vk : : buffer > ( * m_device , 32 , memory_map . device_local , VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT , VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT , 0 ) ;
2017-08-27 15:22:59 +02:00
null_buffer_view = std : : make_unique < vk : : buffer_view > ( * m_device , null_buffer - > value , VK_FORMAT_R8_UINT , 0 , 32 ) ;
2016-03-23 23:22:33 +01:00
2017-06-22 19:25:32 +02:00
vk : : initialize_compiler_context ( ) ;
2017-05-20 13:45:02 +02:00
if ( g_cfg . video . overlay )
2016-10-11 02:55:42 +02:00
{
2019-05-24 15:31:46 +02:00
auto key = vk : : get_renderpass_key ( m_swapchain - > get_surface_format ( ) ) ;
2019-06-08 08:47:51 +02:00
m_text_writer = std : : make_unique < vk : : text_writer > ( ) ;
2019-05-24 15:31:46 +02:00
m_text_writer - > init ( * m_device , vk : : get_renderpass ( * m_device , key ) ) ;
2016-10-11 02:55:42 +02:00
}
2017-07-24 19:49:51 +02:00
2019-06-08 08:47:51 +02:00
m_depth_converter = std : : make_unique < vk : : depth_convert_pass > ( ) ;
2017-11-17 22:51:38 +01:00
m_depth_converter - > create ( * m_device ) ;
2019-06-08 08:47:51 +02:00
m_attachment_clear_pass = std : : make_unique < vk : : attachment_clear_pass > ( ) ;
2018-04-12 13:13:13 +02:00
m_attachment_clear_pass - > create ( * m_device ) ;
2019-10-30 18:23:49 +01:00
m_video_output_pass = std : : make_unique < vk : : video_out_calibration_pass > ( ) ;
m_video_output_pass - > create ( * m_device ) ;
2020-03-09 10:15:59 +01:00
m_prog_buffer = std : : make_unique < VKProgramBuffer >
(
[ this ] ( const vk : : pipeline_props & props , const RSXVertexProgram & vp , const RSXFragmentProgram & fp )
{
// Program was linked or queued for linking
m_shaders_cache - > store ( props , vp , fp ) ;
}
) ;
2017-08-09 13:30:15 +02:00
2019-06-17 22:08:17 +02:00
if ( g_cfg . video . disable_vertex_cache | | g_cfg . video . multithreaded_rsx )
2019-06-08 08:47:51 +02:00
m_vertex_cache = std : : make_unique < vk : : null_vertex_cache > ( ) ;
2017-07-24 19:49:51 +02:00
else
2019-06-08 08:47:51 +02:00
m_vertex_cache = std : : make_unique < vk : : weak_vertex_cache > ( ) ;
2017-08-04 23:11:14 +02:00
2019-08-23 18:36:01 +02:00
m_shaders_cache = std : : make_unique < vk : : shader_cache > ( * m_prog_buffer , " vulkan " , " v1.91 " ) ;
2017-08-09 13:30:15 +02:00
2017-08-04 23:11:14 +02:00
open_command_buffer ( ) ;
2018-02-17 12:45:08 +01:00
for ( u32 i = 0 ; i < m_swapchain - > get_swap_image_count ( ) ; + + i )
2017-08-04 23:11:14 +02:00
{
2018-02-17 12:45:08 +01:00
const auto target_layout = m_swapchain - > get_optimal_present_layout ( ) ;
const auto target_image = m_swapchain - > get_image ( i ) ;
2018-01-17 17:14:00 +01:00
VkClearColorValue clear_color { } ;
VkImageSubresourceRange range = { VK_IMAGE_ASPECT_COLOR_BIT , 0 , 1 , 0 , 1 } ;
2019-02-05 14:14:32 +01:00
vk : : change_image_layout ( * m_current_command_buffer , target_image , VK_IMAGE_LAYOUT_UNDEFINED , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , range ) ;
vkCmdClearColorImage ( * m_current_command_buffer , target_image , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , & clear_color , 1 , & range ) ;
vk : : change_image_layout ( * m_current_command_buffer , target_image , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , target_layout , range ) ;
2017-08-04 23:11:14 +02:00
}
2017-08-16 11:30:31 +02:00
m_current_frame = & frame_context_storage [ 0 ] ;
2017-09-07 21:32:52 +02:00
2018-04-22 13:22:40 +02:00
m_texture_cache . initialize ( ( * m_device ) , m_swapchain - > get_graphics_queue ( ) ,
2018-01-17 17:14:00 +01:00
m_texture_upload_buffer_ring_info ) ;
2019-06-08 08:47:51 +02:00
m_ui_renderer = std : : make_unique < vk : : ui_overlay_renderer > ( ) ;
2018-04-22 13:22:40 +02:00
m_ui_renderer - > create ( * m_current_command_buffer , m_texture_upload_buffer_ring_info ) ;
2017-09-08 16:52:13 +02:00
2019-11-10 09:27:22 +01:00
m_occlusion_query_pool . initialize ( * m_current_command_buffer ) ;
2019-10-13 21:37:10 +02:00
backend_config . supports_multidraw = true ;
2019-07-18 19:15:19 +02:00
// NOTE: We do not actually need multiple sample support for A2C to work
// This is here for visual consistency - will be removed when AA problems due to mipmaps are fixed
2019-10-29 21:34:46 +01:00
if ( g_cfg . video . antialiasing_level ! = msaa_level : : none )
{
backend_config . supports_hw_a2c = VK_TRUE ;
backend_config . supports_hw_a2one = m_device - > get_alpha_to_one_support ( ) ;
}
2019-10-13 21:37:10 +02:00
// NOTE: On NVIDIA cards going back decades (including the PS3) there is a slight normalization inaccuracy in compressed formats.
// Confirmed in BLES01916 (The Evil Within) which uses RGB565 for some virtual texturing data.
backend_config . supports_hw_renormalization = ( vk : : get_driver_vendor ( ) = = vk : : driver_vendor : : NVIDIA ) ;
2019-11-30 13:44:47 +01:00
2019-12-11 17:28:31 +01:00
// Relaxed query synchronization
backend_config . supports_hw_conditional_render = ! ! g_cfg . video . relaxed_zcull_sync ;
2016-02-21 16:50:49 +01:00
}
VKGSRender : : ~ VKGSRender ( )
{
2017-04-11 11:15:57 +02:00
if ( m_device = = VK_NULL_HANDLE )
{
//Initialization failed
return ;
}
2017-06-07 21:45:02 +02:00
//Wait for device to finish up with resources
vkDeviceWaitIdle ( * m_device ) ;
2016-02-21 16:50:49 +01:00
2019-09-12 17:35:11 +02:00
// Clear flush requests
m_flush_requests . clear_pending_flag ( ) ;
2017-06-22 19:25:32 +02:00
//Texture cache
m_texture_cache . destroy ( ) ;
2016-08-24 02:50:07 +02:00
//Shaders
2017-06-22 19:25:32 +02:00
vk : : finalize_compiler_context ( ) ;
2017-08-09 13:30:15 +02:00
m_prog_buffer - > clear ( ) ;
2016-02-21 16:50:49 +01:00
2018-02-21 18:50:27 +01:00
m_persistent_attribute_storage . reset ( ) ;
m_volatile_attribute_storage . reset ( ) ;
2019-06-18 15:38:50 +02:00
m_vertex_layout_storage . reset ( ) ;
2018-02-21 18:50:27 +01:00
2016-08-24 02:50:07 +02:00
//Global resources
vk : : destroy_global_resources ( ) ;
2017-08-27 15:22:59 +02:00
//Heaps
2018-06-14 19:58:28 +02:00
m_attrib_ring_info . destroy ( ) ;
2018-10-20 16:43:00 +02:00
m_fragment_env_ring_info . destroy ( ) ;
m_vertex_env_ring_info . destroy ( ) ;
m_fragment_texture_params_ring_info . destroy ( ) ;
m_vertex_layout_ring_info . destroy ( ) ;
m_fragment_constants_ring_info . destroy ( ) ;
m_transform_constants_ring_info . destroy ( ) ;
m_index_buffer_ring_info . destroy ( ) ;
2018-06-14 19:58:28 +02:00
m_texture_upload_buffer_ring_info . destroy ( ) ;
2016-08-24 02:50:07 +02:00
//Fallback bindables
null_buffer . reset ( ) ;
null_buffer_view . reset ( ) ;
2017-08-27 15:22:59 +02:00
if ( m_current_frame = = & m_aux_frame_context )
{
//Return resources back to the owner
m_current_frame = & frame_context_storage [ m_current_queue_index ] ;
m_current_frame - > swap_storage ( m_aux_frame_context ) ;
m_current_frame - > grab_resources ( m_aux_frame_context ) ;
}
2017-08-16 11:30:31 +02:00
m_aux_frame_context . buffer_views_to_clean . clear ( ) ;
//NOTE: aux_context uses descriptor pools borrowed from the main queues and any allocations will be automatically freed when pool is destroyed
for ( auto & ctx : frame_context_storage )
2017-08-04 23:11:14 +02:00
{
2019-06-09 11:48:19 +02:00
vkDestroySemaphore ( ( * m_device ) , ctx . present_wait_semaphore , nullptr ) ;
vkDestroySemaphore ( ( * m_device ) , ctx . acquire_signal_semaphore , nullptr ) ;
2017-08-04 23:11:14 +02:00
ctx . descriptor_pool . destroy ( ) ;
ctx . buffer_views_to_clean . clear ( ) ;
}
2016-08-24 02:50:07 +02:00
//Textures
2016-03-16 00:42:40 +01:00
m_rtts . destroy ( ) ;
2016-08-24 02:50:07 +02:00
m_texture_cache . destroy ( ) ;
2016-03-16 00:42:40 +01:00
2018-11-24 13:54:46 +01:00
m_stencil_mirror_sampler . reset ( ) ;
2017-10-30 13:27:22 +01:00
//Overlay text handler
2016-10-11 02:55:42 +02:00
m_text_writer . reset ( ) ;
2018-01-17 17:14:00 +01:00
//Overlay UI renderer
m_ui_renderer - > destroy ( ) ;
m_ui_renderer . reset ( ) ;
2017-11-17 22:51:38 +01:00
//RGBA->depth cast helper
m_depth_converter - > destroy ( ) ;
m_depth_converter . reset ( ) ;
2018-04-12 13:13:13 +02:00
//Attachment clear helper
m_attachment_clear_pass - > destroy ( ) ;
m_attachment_clear_pass . reset ( ) ;
2019-10-30 18:23:49 +01:00
// Video-out calibration (gamma, colorspace, etc)
m_video_output_pass - > destroy ( ) ;
m_video_output_pass . reset ( ) ;
2016-08-24 02:50:07 +02:00
//Pipeline descriptors
2016-03-17 23:57:53 +01:00
vkDestroyPipelineLayout ( * m_device , pipeline_layout , nullptr ) ;
vkDestroyDescriptorSetLayout ( * m_device , descriptor_layouts , nullptr ) ;
2017-11-16 22:52:21 +01:00
//Queries
m_occlusion_query_pool . destroy ( ) ;
2019-12-10 05:56:44 +01:00
m_cond_render_buffer . reset ( ) ;
2017-11-16 22:52:21 +01:00
2016-08-24 02:50:07 +02:00
//Command buffer
2017-04-22 20:30:16 +02:00
for ( auto & cb : m_primary_cb_list )
cb . destroy ( ) ;
2016-02-21 16:50:49 +01:00
m_command_buffer_pool . destroy ( ) ;
2017-04-21 21:55:05 +02:00
m_secondary_command_buffer . destroy ( ) ;
m_secondary_command_buffer_pool . destroy ( ) ;
2016-08-24 02:50:07 +02:00
//Device handles/contexts
2018-02-17 12:45:08 +01:00
m_swapchain - > destroy ( ) ;
2016-02-21 16:50:49 +01:00
m_thread_context . close ( ) ;
2018-09-03 21:28:33 +02:00
2019-11-20 16:53:41 +01:00
# if defined(HAVE_X11) && defined(HAVE_VULKAN)
2017-05-29 18:24:09 +02:00
if ( m_display_handle )
XCloseDisplay ( m_display_handle ) ;
# endif
2016-02-21 16:50:49 +01:00
}
bool VKGSRender : : on_access_violation ( u32 address , bool is_writing )
{
2017-10-27 23:32:27 +02:00
vk : : texture_cache : : thrashed_set result ;
2017-10-24 16:59:03 +02:00
{
2018-09-03 21:28:33 +02:00
std : : lock_guard lock ( m_secondary_cb_guard ) ;
2018-09-22 02:14:26 +02:00
const rsx : : invalidation_cause cause = is_writing ? rsx : : invalidation_cause : : deferred_write : rsx : : invalidation_cause : : deferred_read ;
2019-03-16 10:14:11 +01:00
result = std : : move ( m_texture_cache . invalidate_address ( m_secondary_command_buffer , address , cause ) ) ;
2017-10-24 16:59:03 +02:00
}
2017-09-08 16:52:13 +02:00
2017-10-27 23:32:27 +02:00
if ( ! result . violation_handled )
2017-10-21 23:12:32 +02:00
return false ;
2017-08-14 17:50:50 +02:00
2017-10-30 13:27:22 +01:00
{
2018-09-03 21:28:33 +02:00
std : : lock_guard lock ( m_sampler_mutex ) ;
2017-10-30 13:27:22 +01:00
m_samplers_dirty . store ( true ) ;
}
2017-10-27 23:32:27 +02:00
if ( result . num_flushable > 0 )
2017-10-21 23:12:32 +02:00
{
2020-02-23 11:12:31 +01:00
if ( g_fxo - > get < rsx : : dma_manager > ( ) - > is_current_thread ( ) )
2019-08-25 17:47:49 +02:00
{
// The offloader thread cannot handle flush requests
2020-03-13 18:58:41 +01:00
verify ( HERE ) , ! ( m_queue_status & flush_queue_state : : deadlock ) ;
2019-08-25 17:47:49 +02:00
2020-02-23 11:12:31 +01:00
m_offloader_fault_range = g_fxo - > get < rsx : : dma_manager > ( ) - > get_fault_range ( is_writing ) ;
2019-08-25 17:47:49 +02:00
m_offloader_fault_cause = ( is_writing ) ? rsx : : invalidation_cause : : write : rsx : : invalidation_cause : : read ;
2020-02-23 11:12:31 +01:00
g_fxo - > get < rsx : : dma_manager > ( ) - > set_mem_fault_flag ( ) ;
2019-08-25 17:47:49 +02:00
m_queue_status | = flush_queue_state : : deadlock ;
// Wait for deadlock to clear
while ( m_queue_status & flush_queue_state : : deadlock )
{
_mm_pause ( ) ;
}
2017-08-14 17:50:50 +02:00
2020-02-23 11:12:31 +01:00
g_fxo - > get < rsx : : dma_manager > ( ) - > clear_mem_fault_flag ( ) ;
2019-08-25 17:47:49 +02:00
return true ;
}
bool has_queue_ref = false ;
if ( ! is_current_thread ( ) )
2017-10-21 23:12:32 +02:00
{
2018-01-15 20:28:25 +01:00
//Always submit primary cb to ensure state consistency (flush pending changes such as image transitions)
2017-10-21 23:12:32 +02:00
vm : : temporary_unlock ( ) ;
2018-01-15 20:28:25 +01:00
2018-09-03 21:28:33 +02:00
std : : lock_guard lock ( m_flush_queue_mutex ) ;
2018-01-15 20:28:25 +01:00
2019-03-14 13:27:50 +01:00
m_flush_requests . post ( false ) ;
2018-01-15 20:28:25 +01:00
has_queue_ref = true ;
2017-10-21 23:12:32 +02:00
}
2019-09-05 13:38:33 +02:00
else
2017-10-26 12:04:00 +02:00
{
2019-09-05 13:38:33 +02:00
if ( vk : : is_uninterruptible ( ) )
{
2020-02-01 09:07:25 +01:00
rsx_log . error ( " Fault in uninterruptible code! " ) ;
2019-09-05 13:38:33 +02:00
}
2017-10-26 12:04:00 +02:00
//Flush primary cb queue to sync pending changes (e.g image transitions!)
flush_command_queue ( ) ;
}
2017-08-14 17:50:50 +02:00
2018-01-15 20:28:25 +01:00
if ( has_queue_ref )
{
//Wait for the RSX thread to process request if it hasn't already
m_flush_requests . producer_wait ( ) ;
2017-04-21 21:55:05 +02:00
}
2017-10-21 23:12:32 +02:00
2019-03-16 10:14:11 +01:00
m_texture_cache . flush_all ( m_secondary_command_buffer , result ) ;
2017-10-21 23:12:32 +02:00
if ( has_queue_ref )
2017-04-23 18:20:22 +02:00
{
2018-01-15 20:28:25 +01:00
//Release RSX thread
m_flush_requests . remove_one ( ) ;
2017-04-23 18:20:22 +02:00
}
2017-04-21 21:55:05 +02:00
}
2016-02-21 16:50:49 +01:00
2018-12-17 17:35:54 +01:00
return true ;
2016-02-21 16:50:49 +01:00
}
2019-08-25 17:47:49 +02:00
void VKGSRender : : on_invalidate_memory_range ( const utils : : address_range & range , rsx : : invalidation_cause cause )
2017-08-07 23:54:40 +02:00
{
2018-09-03 21:28:33 +02:00
std : : lock_guard lock ( m_secondary_cb_guard ) ;
2018-09-22 02:14:26 +02:00
2019-08-25 17:47:49 +02:00
auto data = std : : move ( m_texture_cache . invalidate_range ( m_secondary_command_buffer , range , cause ) ) ;
2018-09-22 02:14:26 +02:00
AUDIT ( data . empty ( ) ) ;
2019-08-25 17:47:49 +02:00
if ( cause = = rsx : : invalidation_cause : : unmap & & data . violation_handled )
2017-10-21 23:12:32 +02:00
{
2018-09-22 02:14:26 +02:00
m_texture_cache . purge_unreleased_sections ( ) ;
2017-10-30 13:27:22 +01:00
{
2018-09-03 21:28:33 +02:00
std : : lock_guard lock ( m_sampler_mutex ) ;
2017-10-30 13:27:22 +01:00
m_samplers_dirty . store ( true ) ;
}
}
}
2019-08-25 17:47:49 +02:00
void VKGSRender : : on_semaphore_acquire_wait ( )
{
2019-08-26 20:41:37 +02:00
if ( m_flush_requests . pending ( ) | |
( async_flip_requested & flip_request : : emu_requested ) | |
( m_queue_status & flush_queue_state : : deadlock ) )
2019-08-25 17:47:49 +02:00
{
do_local_task ( rsx : : FIFO_state : : lock_wait ) ;
}
}
2017-10-30 13:27:22 +01:00
void VKGSRender : : notify_tile_unbound ( u32 tile )
{
//TODO: Handle texture writeback
2020-01-30 16:44:29 +01:00
if ( false )
{
u32 addr = rsx : : get_address ( tiles [ tile ] . offset , tiles [ tile ] . location , HERE ) ;
on_notify_memory_unmapped ( addr , tiles [ tile ] . size ) ;
m_rtts . invalidate_surface_address ( addr , false ) ;
}
2017-10-30 13:27:22 +01:00
{
2018-09-03 21:28:33 +02:00
std : : lock_guard lock ( m_sampler_mutex ) ;
2017-10-30 13:27:22 +01:00
m_samplers_dirty . store ( true ) ;
2017-10-21 23:12:32 +02:00
}
2017-08-07 23:54:40 +02:00
}
2019-04-06 19:29:58 +02:00
void VKGSRender : : check_heap_status ( u32 flags )
2016-02-21 16:50:49 +01:00
{
2019-08-31 13:30:29 +02:00
verify ( HERE ) , flags ;
2019-04-06 19:29:58 +02:00
bool heap_critical ;
if ( flags = = VK_HEAP_CHECK_ALL )
{
heap_critical = m_attrib_ring_info . is_critical ( ) | |
m_texture_upload_buffer_ring_info . is_critical ( ) | |
m_fragment_env_ring_info . is_critical ( ) | |
m_vertex_env_ring_info . is_critical ( ) | |
m_fragment_texture_params_ring_info . is_critical ( ) | |
m_vertex_layout_ring_info . is_critical ( ) | |
m_fragment_constants_ring_info . is_critical ( ) | |
m_transform_constants_ring_info . is_critical ( ) | |
m_index_buffer_ring_info . is_critical ( ) ;
}
else if ( flags )
{
heap_critical = false ;
u32 test = 1 < < utils : : cnttz32 ( flags , true ) ;
do
{
switch ( flags & test )
{
case 0 :
break ;
case VK_HEAP_CHECK_TEXTURE_UPLOAD_STORAGE :
heap_critical = m_texture_upload_buffer_ring_info . is_critical ( ) ;
break ;
case VK_HEAP_CHECK_VERTEX_STORAGE :
heap_critical = m_attrib_ring_info . is_critical ( ) | | m_index_buffer_ring_info . is_critical ( ) ;
break ;
case VK_HEAP_CHECK_VERTEX_ENV_STORAGE :
heap_critical = m_vertex_env_ring_info . is_critical ( ) ;
break ;
case VK_HEAP_CHECK_FRAGMENT_ENV_STORAGE :
heap_critical = m_fragment_env_ring_info . is_critical ( ) ;
break ;
case VK_HEAP_CHECK_TEXTURE_ENV_STORAGE :
heap_critical = m_fragment_texture_params_ring_info . is_critical ( ) ;
break ;
case VK_HEAP_CHECK_VERTEX_LAYOUT_STORAGE :
heap_critical = m_vertex_layout_ring_info . is_critical ( ) ;
break ;
case VK_HEAP_CHECK_TRANSFORM_CONSTANTS_STORAGE :
heap_critical = m_transform_constants_ring_info . is_critical ( ) ;
break ;
case VK_HEAP_CHECK_FRAGMENT_CONSTANTS_STORAGE :
heap_critical = m_fragment_constants_ring_info . is_critical ( ) ;
break ;
default :
fmt : : throw_exception ( " Unexpected heap flag set! (0x%X) " , test ) ;
}
flags & = ~ test ;
test < < = 1 ;
}
while ( flags & & ! heap_critical ) ;
}
if ( heap_critical )
2016-06-11 15:24:27 +02:00
{
2019-06-18 20:31:35 +02:00
m_profiler . start ( ) ;
2016-10-11 02:55:42 +02:00
2020-01-17 17:24:33 +01:00
vk : : frame_context_t * target_frame = nullptr ;
2019-06-09 11:48:19 +02:00
if ( ! m_queued_frames . empty ( ) )
2017-09-22 17:39:48 +02:00
{
2019-06-09 11:48:19 +02:00
if ( m_current_frame ! = & m_aux_frame_context )
2017-09-22 17:39:48 +02:00
{
2019-06-09 11:48:19 +02:00
target_frame = m_queued_frames . front ( ) ;
2017-09-22 17:39:48 +02:00
}
}
if ( target_frame = = nullptr )
{
flush_command_queue ( true ) ;
m_vertex_cache - > purge ( ) ;
m_index_buffer_ring_info . reset_allocation_stats ( ) ;
2018-10-20 16:43:00 +02:00
m_fragment_env_ring_info . reset_allocation_stats ( ) ;
m_vertex_env_ring_info . reset_allocation_stats ( ) ;
m_fragment_texture_params_ring_info . reset_allocation_stats ( ) ;
m_vertex_layout_ring_info . reset_allocation_stats ( ) ;
m_fragment_constants_ring_info . reset_allocation_stats ( ) ;
2018-04-20 22:44:34 +02:00
m_transform_constants_ring_info . reset_allocation_stats ( ) ;
2017-09-22 17:39:48 +02:00
m_attrib_ring_info . reset_allocation_stats ( ) ;
m_texture_upload_buffer_ring_info . reset_allocation_stats ( ) ;
m_current_frame - > reset_heap_ptrs ( ) ;
2018-03-05 12:09:43 +01:00
m_last_heap_sync_time = get_system_time ( ) ;
2017-09-22 17:39:48 +02:00
}
else
{
2019-06-09 11:48:19 +02:00
// Flush the frame context
frame_context_cleanup ( target_frame , true ) ;
2017-09-22 17:39:48 +02:00
}
2017-08-27 15:22:59 +02:00
2019-09-19 19:08:06 +02:00
m_frame_stats . flip_time + = m_profiler . duration ( ) ;
2016-06-11 15:24:27 +02:00
}
2018-02-03 14:42:02 +01:00
}
2019-05-17 22:12:41 +02:00
void VKGSRender : : check_present_status ( )
{
2019-06-09 11:48:19 +02:00
while ( ! m_queued_frames . empty ( ) )
2019-05-17 22:12:41 +02:00
{
auto ctx = m_queued_frames . front ( ) ;
if ( ctx - > swap_command_buffer - > pending )
{
if ( ! ctx - > swap_command_buffer - > poke ( ) )
{
return ;
}
}
2019-06-09 11:48:19 +02:00
frame_context_cleanup ( ctx , true ) ;
2019-05-17 22:12:41 +02:00
}
}
2018-10-29 11:16:37 +01:00
void VKGSRender : : check_descriptors ( )
2018-02-03 14:42:02 +01:00
{
2018-10-29 11:16:37 +01:00
// Ease resource pressure if the number of draw calls becomes too high or we are running low on memory resources
const auto required_descriptors = rsx : : method_registers . current_draw_clause . pass_count ( ) ;
verify ( HERE ) , required_descriptors < DESCRIPTOR_MAX_DRAW_CALLS ;
if ( ( required_descriptors + m_current_frame - > used_descriptors ) > DESCRIPTOR_MAX_DRAW_CALLS )
2018-02-03 14:42:02 +01:00
{
2019-05-17 22:12:41 +02:00
// Should hard sync before resetting descriptors for spec compliance
flush_command_queue ( true ) ;
2018-02-03 14:42:02 +01:00
2019-05-21 19:17:48 +02:00
m_current_frame - > descriptor_pool . reset ( 0 ) ;
2018-02-03 14:42:02 +01:00
m_current_frame - > used_descriptors = 0 ;
}
2018-10-29 11:16:37 +01:00
}
2018-02-03 14:42:02 +01:00
2018-10-29 11:16:37 +01:00
VkDescriptorSet VKGSRender : : allocate_descriptor_set ( )
{
verify ( HERE ) , m_current_frame - > used_descriptors < DESCRIPTOR_MAX_DRAW_CALLS ;
2016-06-11 15:24:27 +02:00
2016-03-23 23:16:49 +01:00
VkDescriptorSetAllocateInfo alloc_info = { } ;
2017-08-04 23:11:14 +02:00
alloc_info . descriptorPool = m_current_frame - > descriptor_pool ;
2016-03-23 23:16:49 +01:00
alloc_info . descriptorSetCount = 1 ;
alloc_info . pSetLayouts = & descriptor_layouts ;
alloc_info . sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO ;
VkDescriptorSet new_descriptor_set ;
CHECK_RESULT ( vkAllocateDescriptorSets ( * m_device , & alloc_info , & new_descriptor_set ) ) ;
2017-08-04 23:11:14 +02:00
m_current_frame - > used_descriptors + + ;
2018-10-29 11:16:37 +01:00
return new_descriptor_set ;
}
2016-02-21 16:50:49 +01:00
void VKGSRender : : set_viewport ( )
{
2017-09-26 15:24:43 +02:00
const auto clip_width = rsx : : apply_resolution_scale ( rsx : : method_registers . surface_clip_width ( ) , true ) ;
const auto clip_height = rsx : : apply_resolution_scale ( rsx : : method_registers . surface_clip_height ( ) , true ) ;
2016-02-21 16:50:49 +01:00
2016-09-26 14:21:17 +02:00
//NOTE: The scale_offset matrix already has viewport matrix factored in
2018-10-28 13:20:53 +01:00
m_viewport . x = 0 ;
m_viewport . y = 0 ;
m_viewport . width = clip_width ;
m_viewport . height = clip_height ;
m_viewport . minDepth = 0.f ;
m_viewport . maxDepth = 1.f ;
}
2019-07-20 13:58:05 +02:00
void VKGSRender : : set_scissor ( bool clip_viewport )
2018-10-28 13:20:53 +01:00
{
2019-07-18 15:50:21 +02:00
areau scissor ;
2019-07-20 13:58:05 +02:00
if ( get_scissor ( scissor , clip_viewport ) )
2018-10-28 13:20:53 +01:00
{
2019-07-18 15:50:21 +02:00
m_scissor . extent . height = scissor . height ( ) ;
m_scissor . extent . width = scissor . width ( ) ;
m_scissor . offset . x = scissor . x1 ;
m_scissor . offset . y = scissor . y1 ;
2017-07-05 00:16:59 +02:00
}
2016-02-21 16:50:49 +01:00
}
2018-10-28 13:20:53 +01:00
void VKGSRender : : bind_viewport ( )
{
vkCmdSetViewport ( * m_current_command_buffer , 0 , 1 , & m_viewport ) ;
vkCmdSetScissor ( * m_current_command_buffer , 0 , 1 , & m_scissor ) ;
}
2016-02-21 16:50:49 +01:00
void VKGSRender : : on_init_thread ( )
{
2017-04-11 11:15:57 +02:00
if ( m_device = = VK_NULL_HANDLE )
{
fmt : : throw_exception ( " No vulkan device was created " ) ;
}
2016-02-21 16:50:49 +01:00
GSRender : : on_init_thread ( ) ;
2018-03-05 12:09:43 +01:00
zcull_ctrl . reset ( static_cast < : : rsx : : reports : : ZCULL_control * > ( this ) ) ;
2017-08-09 13:30:15 +02:00
2019-08-26 02:08:16 +02:00
if ( ! m_overlay_manager )
2018-01-17 17:14:00 +01:00
{
m_frame - > hide ( ) ;
m_shaders_cache - > load ( nullptr , * m_device , pipeline_layout ) ;
m_frame - > show ( ) ;
}
else
{
2020-01-03 12:04:18 +01:00
rsx : : shader_loading_dialog_native dlg ( this ) ;
2017-10-28 14:34:24 +02:00
2019-06-10 11:29:46 +02:00
// TODO: Handle window resize messages during loading on GPUs without OUT_OF_DATE_KHR support
2020-01-03 12:04:18 +01:00
m_shaders_cache - > load ( & dlg , * m_device , pipeline_layout ) ;
2018-01-17 17:14:00 +01:00
}
2016-02-21 16:50:49 +01:00
}
void VKGSRender : : on_exit ( )
{
2018-03-05 12:09:43 +01:00
zcull_ctrl . release ( ) ;
2018-09-05 22:52:38 +02:00
GSRender : : on_exit ( ) ;
2016-02-21 16:50:49 +01:00
}
void VKGSRender : : clear_surface ( u32 mask )
{
2019-09-19 19:08:06 +02:00
if ( skip_current_frame | | swapchain_unavailable ) return ;
2017-06-30 00:20:23 +02:00
2019-04-29 17:56:35 +02:00
// If stencil write mask is disabled, remove clear_stencil bit
if ( ! rsx : : method_registers . stencil_mask ( ) ) mask & = ~ 0x2u ;
2017-08-16 11:30:31 +02:00
// Ignore invalid clear flags
2016-02-21 16:50:49 +01:00
if ( ! ( mask & 0xF3 ) ) return ;
2017-08-16 11:30:31 +02:00
2017-11-15 16:50:41 +01:00
u8 ctx = rsx : : framebuffer_creation_context : : context_draw ;
if ( mask & 0xF0 ) ctx | = rsx : : framebuffer_creation_context : : context_clear_color ;
if ( mask & 0x3 ) ctx | = rsx : : framebuffer_creation_context : : context_clear_depth ;
2019-12-03 23:34:23 +01:00
init_buffers ( rsx : : framebuffer_creation_context { ctx } ) ;
2017-07-05 00:16:59 +02:00
if ( ! framebuffer_status_valid ) return ;
2016-02-21 16:50:49 +01:00
float depth_clear = 1.f ;
2016-03-07 15:55:02 +01:00
u32 stencil_clear = 0 ;
2016-09-21 15:41:29 +02:00
u32 depth_stencil_mask = 0 ;
std : : vector < VkClearAttachment > clear_descriptors ;
2017-11-06 00:35:30 +01:00
VkClearValue depth_stencil_clear_values = { } , color_clear_values = { } ;
2016-02-21 16:50:49 +01:00
2019-12-03 23:34:23 +01:00
u16 scissor_x = static_cast < u16 > ( m_scissor . offset . x ) ;
u16 scissor_w = static_cast < u16 > ( m_scissor . extent . width ) ;
u16 scissor_y = static_cast < u16 > ( m_scissor . offset . y ) ;
u16 scissor_h = static_cast < u16 > ( m_scissor . extent . height ) ;
2016-09-21 15:41:29 +02:00
2017-09-26 15:24:43 +02:00
const u16 fb_width = m_draw_fbo - > width ( ) ;
const u16 fb_height = m_draw_fbo - > height ( ) ;
2017-06-07 21:45:02 +02:00
//clip region
2017-06-10 22:32:17 +02:00
std : : tie ( scissor_x , scissor_y , scissor_w , scissor_h ) = rsx : : clip_region < u16 > ( fb_width , fb_height , scissor_x , scissor_y , scissor_w , scissor_h , true ) ;
2019-06-08 22:47:46 +02:00
VkClearRect region = { { { scissor_x , scissor_y } , { scissor_w , scissor_h } } , 0 , 1 } ;
2016-09-21 15:41:29 +02:00
2019-05-14 18:50:45 +02:00
const bool require_mem_load = ( scissor_w * scissor_h ) < ( fb_width * fb_height ) ;
2019-08-27 13:55:45 +02:00
bool update_color = false , update_z = false ;
2016-09-21 15:41:29 +02:00
auto surface_depth_format = rsx : : method_registers . surface_depth_fmt ( ) ;
2016-03-28 20:46:23 +02:00
2018-12-30 21:47:15 +01:00
if ( auto ds = std : : get < 1 > ( m_rtts . m_bound_depth_stencil ) ; mask & 0x3 )
2016-02-21 16:50:49 +01:00
{
2018-12-30 21:47:15 +01:00
if ( mask & 0x1 )
{
u32 max_depth_value = get_max_depth_value ( surface_depth_format ) ;
2016-02-21 16:50:49 +01:00
2018-12-30 21:47:15 +01:00
u32 clear_depth = rsx : : method_registers . z_clear_value ( surface_depth_format = = rsx : : surface_depth_format : : z24s8 ) ;
2019-12-03 23:34:23 +01:00
float depth_clear = static_cast < float > ( clear_depth ) / max_depth_value ;
2016-02-21 16:50:49 +01:00
2018-12-30 21:47:15 +01:00
depth_stencil_clear_values . depthStencil . depth = depth_clear ;
depth_stencil_clear_values . depthStencil . stencil = stencil_clear ;
2016-09-21 15:41:29 +02:00
2018-12-30 21:47:15 +01:00
depth_stencil_mask | = VK_IMAGE_ASPECT_DEPTH_BIT ;
}
2016-02-21 16:50:49 +01:00
2018-12-30 21:47:15 +01:00
if ( surface_depth_format = = rsx : : surface_depth_format : : z24s8 )
2016-09-21 15:41:29 +02:00
{
2019-04-29 17:56:35 +02:00
if ( mask & 0x2 )
2018-12-30 21:47:15 +01:00
{
u8 clear_stencil = rsx : : method_registers . stencil_clear_value ( ) ;
depth_stencil_clear_values . depthStencil . stencil = clear_stencil ;
depth_stencil_mask | = VK_IMAGE_ASPECT_STENCIL_BIT ;
2019-06-08 22:47:46 +02:00
if ( ds - > samples ( ) > 1 )
{
if ( ! require_mem_load ) ds - > stencil_init_flags & = 0xFF ;
ds - > stencil_init_flags | = clear_stencil ;
}
2018-12-30 21:47:15 +01:00
}
2016-02-21 16:50:49 +01:00
2019-05-14 18:50:45 +02:00
if ( ( mask & 0x3 ) ! = 0x3 & & ! require_mem_load & & ds - > state_flags & rsx : : surface_state_flags : : erase_bkgnd )
2018-12-30 21:47:15 +01:00
{
verify ( HERE ) , depth_stencil_mask ;
2019-08-17 13:06:57 +02:00
if ( ! g_cfg . video . read_depth_buffer )
2018-12-30 21:47:15 +01:00
{
2019-08-17 13:06:57 +02:00
// Only one aspect was cleared. Make sure to memory intialize the other before removing dirty flag
if ( mask = = 1 )
{
// Depth was cleared, initialize stencil
depth_stencil_clear_values . depthStencil . stencil = 0xFF ;
depth_stencil_mask | = VK_IMAGE_ASPECT_STENCIL_BIT ;
}
else
{
// Stencil was cleared, initialize depth
depth_stencil_clear_values . depthStencil . depth = 1.f ;
depth_stencil_mask | = VK_IMAGE_ASPECT_DEPTH_BIT ;
}
2018-12-30 21:47:15 +01:00
}
else
{
2019-08-17 13:06:57 +02:00
ds - > write_barrier ( * m_current_command_buffer ) ;
2018-12-30 21:47:15 +01:00
}
}
2016-09-21 15:41:29 +02:00
}
2016-03-28 20:46:23 +02:00
}
2016-02-21 16:50:49 +01:00
2018-04-12 13:13:13 +02:00
if ( auto colormask = ( mask & 0xF0 ) )
2016-02-21 16:50:49 +01:00
{
2018-07-25 10:41:07 +02:00
if ( ! m_draw_buffers . empty ( ) )
2017-11-29 19:17:23 +01:00
{
2018-04-12 13:13:13 +02:00
bool use_fast_clear = false ;
bool ignore_clear = false ;
switch ( rsx : : method_registers . surface_color ( ) )
2017-11-29 19:17:23 +01:00
{
2018-04-12 13:13:13 +02:00
case rsx : : surface_color_format : : x32 :
case rsx : : surface_color_format : : w16z16y16x16 :
case rsx : : surface_color_format : : w32z32y32x32 :
//NOP
ignore_clear = true ;
break ;
case rsx : : surface_color_format : : g8b8 :
colormask = rsx : : get_g8b8_r8g8_colormask ( colormask ) ;
use_fast_clear = ( colormask = = ( 0x10 | 0x20 ) ) ;
ignore_clear = ( colormask = = 0 ) ;
colormask | = ( 0x40 | 0x80 ) ;
break ;
default :
use_fast_clear = ( colormask = = ( 0x10 | 0x20 | 0x40 | 0x80 ) ) ;
break ;
2019-05-11 10:14:56 +02:00
}
2017-06-30 23:24:41 +02:00
2018-04-12 13:13:13 +02:00
if ( ! ignore_clear )
2017-06-30 23:24:41 +02:00
{
2018-04-12 13:13:13 +02:00
u8 clear_a = rsx : : method_registers . clear_color_a ( ) ;
u8 clear_r = rsx : : method_registers . clear_color_r ( ) ;
u8 clear_g = rsx : : method_registers . clear_color_g ( ) ;
u8 clear_b = rsx : : method_registers . clear_color_b ( ) ;
2019-12-03 23:34:23 +01:00
color_clear_values . color . float32 [ 0 ] = static_cast < float > ( clear_r ) / 255 ;
color_clear_values . color . float32 [ 1 ] = static_cast < float > ( clear_g ) / 255 ;
color_clear_values . color . float32 [ 2 ] = static_cast < float > ( clear_b ) / 255 ;
color_clear_values . color . float32 [ 3 ] = static_cast < float > ( clear_a ) / 255 ;
2018-04-12 13:13:13 +02:00
if ( use_fast_clear )
{
2018-07-25 10:41:07 +02:00
for ( u32 index = 0 ; index < m_draw_buffers . size ( ) ; + + index )
2018-04-12 13:13:13 +02:00
{
clear_descriptors . push_back ( { VK_IMAGE_ASPECT_COLOR_BIT , index , color_clear_values } ) ;
}
}
else
2017-11-29 19:17:23 +01:00
{
2018-04-12 13:13:13 +02:00
color4f clear_color =
{
color_clear_values . color . float32 [ 0 ] ,
color_clear_values . color . float32 [ 1 ] ,
color_clear_values . color . float32 [ 2 ] ,
color_clear_values . color . float32 [ 3 ]
} ;
2019-05-24 15:31:46 +02:00
VkRenderPass renderpass = VK_NULL_HANDLE ;
2018-04-12 13:13:13 +02:00
m_attachment_clear_pass - > update_config ( colormask , clear_color ) ;
2018-04-15 00:53:38 +02:00
2018-07-25 10:41:07 +02:00
for ( const auto & index : m_draw_buffers )
2018-04-12 13:13:13 +02:00
{
2019-05-14 18:50:45 +02:00
if ( auto rtt = m_rtts . m_bound_render_targets [ index ] . second )
2018-04-12 13:13:13 +02:00
{
2019-05-14 18:50:45 +02:00
if ( require_mem_load ) rtt - > write_barrier ( * m_current_command_buffer ) ;
2019-05-20 09:27:09 +02:00
// Add a barrier to ensure previous writes are visible; also transitions into GENERAL layout
const auto old_layout = rtt - > current_layout ;
vk : : insert_texture_barrier ( * m_current_command_buffer , rtt , VK_IMAGE_LAYOUT_GENERAL ) ;
2019-05-24 15:31:46 +02:00
if ( ! renderpass )
{
std : : vector < vk : : image * > surfaces = { rtt } ;
const auto key = vk : : get_renderpass_key ( surfaces ) ;
renderpass = vk : : get_renderpass ( * m_device , key ) ;
}
2019-09-28 11:53:20 +02:00
m_attachment_clear_pass - > run ( * m_current_command_buffer , rtt , region . rect , renderpass ) ;
2019-05-20 09:27:09 +02:00
rtt - > change_layout ( * m_current_command_buffer , old_layout ) ;
2018-04-12 13:13:13 +02:00
}
else
fmt : : throw_exception ( " Unreachable " HERE ) ;
}
}
2019-08-27 13:55:45 +02:00
for ( u8 index = m_rtts . m_bound_render_targets_config . first , count = 0 ;
count < m_rtts . m_bound_render_targets_config . second ;
+ + count , + + index )
2018-04-12 13:13:13 +02:00
{
2019-08-27 13:55:45 +02:00
if ( require_mem_load )
m_rtts . m_bound_render_targets [ index ] . second - > write_barrier ( * m_current_command_buffer ) ;
2017-11-29 19:17:23 +01:00
}
2019-08-27 13:55:45 +02:00
update_color = true ;
2017-06-30 23:24:41 +02:00
}
}
2016-02-21 16:50:49 +01:00
}
2018-12-30 21:47:15 +01:00
if ( depth_stencil_mask )
2017-06-19 12:47:38 +02:00
{
2019-08-06 13:46:21 +02:00
if ( m_rtts . m_bound_depth_stencil . first )
2017-06-30 23:24:41 +02:00
{
2019-05-14 18:50:45 +02:00
if ( require_mem_load ) m_rtts . m_bound_depth_stencil . second - > write_barrier ( * m_current_command_buffer ) ;
2019-08-27 13:55:45 +02:00
2019-12-03 23:34:23 +01:00
clear_descriptors . push_back ( { static_cast < VkImageAspectFlags > ( depth_stencil_mask ) , 0 , depth_stencil_clear_values } ) ;
2019-08-27 13:55:45 +02:00
update_z = true ;
2017-06-30 23:24:41 +02:00
}
2017-06-19 12:47:38 +02:00
}
2017-11-06 00:35:30 +01:00
2019-08-27 13:55:45 +02:00
if ( update_color | | update_z )
{
const bool write_all_mask [ ] = { true , true , true , true } ;
m_rtts . on_write ( update_color ? write_all_mask : nullptr , update_z ) ;
}
2019-06-01 15:25:33 +02:00
if ( ! clear_descriptors . empty ( ) )
2017-11-29 19:17:23 +01:00
{
begin_render_pass ( ) ;
2019-12-03 23:34:23 +01:00
vkCmdClearAttachments ( * m_current_command_buffer , : : size32 ( clear_descriptors ) , clear_descriptors . data ( ) , 1 , & region ) ;
2017-11-29 19:17:23 +01:00
}
2016-03-23 23:22:33 +01:00
}
2016-02-21 16:50:49 +01:00
2017-04-22 20:30:16 +02:00
void VKGSRender : : flush_command_queue ( bool hard_sync )
{
2019-06-09 11:48:19 +02:00
close_and_submit_command_buffer ( m_current_command_buffer - > submit_fence ) ;
2017-04-22 20:30:16 +02:00
if ( hard_sync )
{
2019-06-09 11:48:19 +02:00
// wait for the latest instruction to execute
2017-04-23 14:00:38 +02:00
m_current_command_buffer - > pending = true ;
2017-08-16 11:30:31 +02:00
m_current_command_buffer - > reset ( ) ;
2017-04-23 14:00:38 +02:00
2019-06-09 11:48:19 +02:00
// Clear all command buffer statuses
2017-04-22 20:30:16 +02:00
for ( auto & cb : m_primary_cb_list )
2017-09-07 21:32:52 +02:00
{
if ( cb . pending )
cb . poke ( ) ;
}
2017-04-23 14:00:38 +02:00
2019-06-09 11:48:19 +02:00
// Drain present queue
while ( ! m_queued_frames . empty ( ) )
{
check_present_status ( ) ;
}
2018-01-15 20:28:25 +01:00
m_flush_requests . clear_pending_flag ( ) ;
2017-04-22 20:30:16 +02:00
}
else
{
2019-12-07 14:28:35 +01:00
// Mark this queue as pending and proceed
2017-04-22 20:30:16 +02:00
m_current_command_buffer - > pending = true ;
2019-12-07 14:28:35 +01:00
}
2018-09-03 21:28:33 +02:00
2019-12-07 14:28:35 +01:00
// Grab next cb in line and make it usable
// NOTE: Even in the case of a hard sync, this is required to free any waiters on the CB (ZCULL)
m_current_cb_index = ( m_current_cb_index + 1 ) % VK_MAX_ASYNC_CB_COUNT ;
m_current_command_buffer = & m_primary_cb_list [ m_current_cb_index ] ;
2017-08-04 23:11:14 +02:00
2019-12-07 14:28:35 +01:00
if ( ! m_current_command_buffer - > poke ( ) )
{
2020-02-01 09:07:25 +01:00
rsx_log . error ( " CB chain has run out of free entries! " ) ;
2019-12-07 14:28:35 +01:00
}
2019-06-28 19:33:03 +02:00
2019-12-07 14:28:35 +01:00
m_current_command_buffer - > reset ( ) ;
2019-06-09 11:48:19 +02:00
2019-12-07 14:28:35 +01:00
// Just in case a queued frame holds a ref to this cb, drain the present queue
check_present_status ( ) ;
2017-04-21 21:55:05 +02:00
2019-06-18 23:26:25 +02:00
if ( m_occlusion_query_active )
{
m_current_command_buffer - > flags | = vk : : command_buffer : : cb_load_occluson_task ;
}
2017-04-21 21:55:05 +02:00
open_command_buffer ( ) ;
}
2019-11-30 13:44:47 +01:00
void VKGSRender : : sync_hint ( rsx : : FIFO_hint hint , void * args )
2018-07-19 18:57:01 +02:00
{
2019-11-30 13:44:47 +01:00
verify ( HERE ) , args ;
rsx : : thread : : sync_hint ( hint , args ) ;
2019-12-11 17:28:57 +01:00
// Occlusion queries not enabled, do nothing
if ( ! ( m_current_command_buffer - > flags & vk : : command_buffer : : cb_has_occlusion_task ) )
return ;
// Check if the required report is synced to this CB
auto occlusion_info = static_cast < rsx : : reports : : occlusion_query_info * > ( args ) ;
auto & data = m_occlusion_map [ occlusion_info - > driver_handle ] ;
2019-12-11 20:07:09 +01:00
// NOTE: Currently, a special condition exists where the indices can be empty even with active draw count.
// This is caused by async compiler and should be removed when ubershaders are added in
if ( ! data . is_current ( m_current_command_buffer ) | | data . indices . empty ( ) )
2019-12-11 17:28:57 +01:00
return ;
2019-07-30 14:22:53 +02:00
// Occlusion test result evaluation is coming up, avoid a hard sync
2019-10-23 16:29:26 +02:00
switch ( hint )
{
case rsx : : FIFO_hint : : hint_conditional_render_eval :
2018-07-19 18:57:01 +02:00
{
2019-07-30 14:22:53 +02:00
// If a flush request is already enqueued, do nothing
if ( m_flush_requests . pending ( ) )
return ;
2019-12-11 17:28:57 +01:00
// Schedule a sync on the next loop iteration
m_flush_requests . post ( false ) ;
m_flush_requests . remove_one ( ) ;
2019-10-23 16:29:26 +02:00
break ;
}
case rsx : : FIFO_hint : : hint_zcull_sync :
{
2019-12-11 17:28:57 +01:00
// Unavoidable hard sync coming up, flush immediately
// This heavyweight hint should be used with caution
std : : lock_guard lock ( m_flush_queue_mutex ) ;
flush_command_queue ( ) ;
2019-10-23 16:29:26 +02:00
2019-12-11 17:28:57 +01:00
if ( m_flush_requests . pending ( ) )
2019-10-23 16:29:26 +02:00
{
2019-12-11 17:28:57 +01:00
// Clear without wait
m_flush_requests . clear_pending_flag ( ) ;
2019-10-23 16:29:26 +02:00
}
break ;
}
2018-07-19 18:57:01 +02:00
}
}
2018-05-29 13:53:16 +02:00
void VKGSRender : : do_local_task ( rsx : : FIFO_state state )
2017-04-21 21:55:05 +02:00
{
2019-08-25 17:47:49 +02:00
if ( m_queue_status & flush_queue_state : : deadlock )
2017-04-21 21:55:05 +02:00
{
2019-08-25 17:47:49 +02:00
// Clear offloader deadlock
// NOTE: It is not possible to handle regular flush requests before this is cleared
// NOTE: This may cause graphics corruption due to unsynchronized modification
on_invalidate_memory_range ( m_offloader_fault_range , m_offloader_fault_cause ) ;
m_queue_status . clear ( flush_queue_state : : deadlock ) ;
2020-03-13 18:58:41 +01:00
}
2020-03-12 20:29:58 +01:00
2020-03-13 18:58:41 +01:00
if ( m_queue_status & flush_queue_state : : flushing )
{
// Abort recursive CB submit requests.
// When flushing flag is already set, only deadlock events may be processed.
2020-03-12 20:29:58 +01:00
return ;
2019-08-25 17:47:49 +02:00
}
2020-03-13 18:58:41 +01:00
else if ( m_flush_requests . pending ( ) )
2019-08-25 17:47:49 +02:00
{
if ( m_flush_queue_mutex . try_lock ( ) )
{
// TODO: Determine if a hard sync is necessary
// Pipeline barriers later may do a better job synchronizing than wholly stalling the pipeline
flush_command_queue ( ) ;
m_flush_requests . clear_pending_flag ( ) ;
m_flush_requests . consumer_wait ( ) ;
m_flush_queue_mutex . unlock ( ) ;
}
2017-04-21 21:55:05 +02:00
}
2018-05-29 13:53:16 +02:00
else if ( ! in_begin_end & & state ! = rsx : : FIFO_state : : lock_wait )
2018-02-03 09:37:42 +01:00
{
2018-06-23 16:50:34 +02:00
if ( m_graphics_state & rsx : : pipeline_state : : framebuffer_reads_dirty )
{
//This will re-engage locks and break the texture cache if another thread is waiting in access violation handler!
//Only call when there are no waiters
m_texture_cache . do_update ( ) ;
m_graphics_state & = ~ rsx : : pipeline_state : : framebuffer_reads_dirty ;
}
2018-02-03 09:37:42 +01:00
}
2017-08-14 17:50:50 +02:00
2018-05-29 13:53:16 +02:00
rsx : : thread : : do_local_task ( state ) ;
2017-08-14 17:50:50 +02:00
2019-05-17 22:12:41 +02:00
switch ( state )
2018-05-29 13:53:16 +02:00
{
2019-05-17 22:12:41 +02:00
case rsx : : FIFO_state : : lock_wait :
2018-05-29 13:53:16 +02:00
// Critical check finished
return ;
2019-05-21 19:17:48 +02:00
//case rsx::FIFO_state::spinning:
//case rsx::FIFO_state::empty:
2019-05-17 22:12:41 +02:00
// We have some time, check the present queue
2019-05-21 19:17:48 +02:00
//check_present_status();
//break;
2019-05-17 22:12:41 +02:00
default :
break ;
2017-08-14 17:50:50 +02:00
}
2017-10-11 00:09:04 +02:00
2018-05-20 22:05:00 +02:00
if ( m_overlay_manager )
2018-01-17 17:14:00 +01:00
{
2018-09-22 09:51:48 +02:00
if ( ! in_begin_end & & async_flip_requested & flip_request : : native_ui )
2018-01-17 17:14:00 +01:00
{
flush_command_queue ( true ) ;
2019-09-19 19:08:06 +02:00
rsx : : display_flip_info_t info { } ;
info . buffer = current_display_buffer ;
flip ( info ) ;
2018-01-17 17:14:00 +01:00
}
}
2016-02-21 16:50:49 +01:00
}
2018-07-11 22:51:29 +02:00
bool VKGSRender : : load_program ( )
2017-10-30 13:27:22 +01:00
{
2018-04-20 22:44:34 +02:00
if ( m_graphics_state & rsx : : pipeline_state : : invalidate_pipeline_bits )
2018-04-10 17:06:29 +02:00
{
get_current_fragment_program ( fs_sampler_state ) ;
verify ( HERE ) , current_fragment_program . valid ;
2016-09-29 08:54:32 +02:00
2018-07-09 20:31:31 +02:00
get_current_vertex_program ( vs_sampler_state ) ;
2018-07-11 22:51:29 +02:00
m_graphics_state & = ~ rsx : : pipeline_state : : invalidate_pipeline_bits ;
2018-04-10 17:06:29 +02:00
}
2016-03-20 02:26:51 +01:00
2017-08-04 23:11:14 +02:00
auto & vertex_program = current_vertex_program ;
auto & fragment_program = current_fragment_program ;
2018-04-20 22:44:34 +02:00
auto old_program = m_program ;
2017-08-04 23:11:14 +02:00
2018-07-11 22:51:29 +02:00
vk : : pipeline_props properties { } ;
2016-03-20 02:26:51 +01:00
2018-04-12 13:13:13 +02:00
// Input assembly
2017-10-17 12:09:02 +02:00
bool emulated_primitive_type ;
2018-04-12 13:13:13 +02:00
properties . state . set_primitive_type ( vk : : get_appropriate_topology ( rsx : : method_registers . current_draw_clause . primitive , emulated_primitive_type ) ) ;
2016-03-20 02:26:51 +01:00
2017-10-17 12:09:02 +02:00
const bool restarts_valid = rsx : : method_registers . current_draw_clause . command = = rsx : : draw_command : : indexed & & ! emulated_primitive_type & & ! rsx : : method_registers . current_draw_clause . is_disjoint_primitive ;
2018-04-21 17:02:17 +02:00
if ( rsx : : method_registers . restart_index_enabled ( ) & & ! vk : : emulate_primitive_restart ( rsx : : method_registers . current_draw_clause . primitive ) & & restarts_valid )
2018-04-12 13:13:13 +02:00
properties . state . enable_primitive_restart ( ) ;
2016-03-20 02:26:51 +01:00
2018-04-12 13:13:13 +02:00
// Rasterizer state
2019-12-03 23:34:23 +01:00
properties . state . set_attachment_count ( : : size32 ( m_draw_buffers ) ) ;
2018-04-12 13:13:13 +02:00
properties . state . set_front_face ( vk : : get_front_face ( rsx : : method_registers . front_face_mode ( ) ) ) ;
properties . state . enable_depth_clamp ( rsx : : method_registers . depth_clamp_enabled ( ) | | ! rsx : : method_registers . depth_clip_enabled ( ) ) ;
properties . state . enable_depth_bias ( true ) ;
2019-10-28 22:07:38 +01:00
properties . state . enable_depth_bounds_test ( m_device - > get_depth_bounds_support ( ) ) ;
2018-04-12 13:13:13 +02:00
if ( rsx : : method_registers . depth_test_enabled ( ) )
2018-04-15 00:53:38 +02:00
{
//NOTE: Like stencil, depth write is meaningless without depth test
properties . state . set_depth_mask ( rsx : : method_registers . depth_write_enabled ( ) ) ;
2018-04-12 13:13:13 +02:00
properties . state . enable_depth_test ( vk : : get_compare_func ( rsx : : method_registers . depth_func ( ) ) ) ;
2018-04-15 00:53:38 +02:00
}
2018-04-12 13:13:13 +02:00
if ( rsx : : method_registers . logic_op_enabled ( ) )
properties . state . enable_logic_op ( vk : : get_logic_op ( rsx : : method_registers . logic_operation ( ) ) ) ;
if ( rsx : : method_registers . cull_face_enabled ( ) )
properties . state . enable_cull_face ( vk : : get_cull_face ( rsx : : method_registers . cull_face_mode ( ) ) ) ;
2016-03-20 02:26:51 +01:00
2020-02-19 18:03:59 +01:00
for ( uint index = 0 ; index < m_draw_buffers . size ( ) ; + + index )
2019-08-27 13:55:45 +02:00
{
bool color_mask_b = rsx : : method_registers . color_mask_b ( index ) ;
bool color_mask_g = rsx : : method_registers . color_mask_g ( index ) ;
bool color_mask_r = rsx : : method_registers . color_mask_r ( index ) ;
bool color_mask_a = rsx : : method_registers . color_mask_a ( index ) ;
2018-04-01 15:41:57 +02:00
2019-08-27 13:55:45 +02:00
if ( rsx : : method_registers . surface_color ( ) = = rsx : : surface_color_format : : g8b8 )
rsx : : get_g8b8_r8g8_colormask ( color_mask_r , color_mask_g , color_mask_b , color_mask_a ) ;
2018-04-01 15:41:57 +02:00
2019-08-27 13:55:45 +02:00
properties . state . set_color_mask ( index , color_mask_r , color_mask_g , color_mask_b , color_mask_a ) ;
}
2016-03-20 02:26:51 +01:00
2018-03-07 21:09:38 +01:00
bool mrt_blend_enabled [ ] =
2018-03-02 21:22:26 +01:00
{
rsx : : method_registers . blend_enabled ( ) ,
rsx : : method_registers . blend_enabled_surface_1 ( ) ,
rsx : : method_registers . blend_enabled_surface_2 ( ) ,
rsx : : method_registers . blend_enabled_surface_3 ( )
} ;
2018-03-07 21:09:38 +01:00
VkBlendFactor sfactor_rgb , sfactor_a , dfactor_rgb , dfactor_a ;
VkBlendOp equation_rgb , equation_a ;
if ( mrt_blend_enabled [ 0 ] | | mrt_blend_enabled [ 1 ] | | mrt_blend_enabled [ 2 ] | | mrt_blend_enabled [ 3 ] )
{
2019-01-16 18:06:45 +01:00
sfactor_rgb = vk : : get_blend_factor ( rsx : : method_registers . blend_func_sfactor_rgb ( ) ) ;
sfactor_a = vk : : get_blend_factor ( rsx : : method_registers . blend_func_sfactor_a ( ) ) ;
dfactor_rgb = vk : : get_blend_factor ( rsx : : method_registers . blend_func_dfactor_rgb ( ) ) ;
dfactor_a = vk : : get_blend_factor ( rsx : : method_registers . blend_func_dfactor_a ( ) ) ;
equation_rgb = vk : : get_blend_op ( rsx : : method_registers . blend_equation_rgb ( ) ) ;
equation_a = vk : : get_blend_op ( rsx : : method_registers . blend_equation_a ( ) ) ;
2017-06-25 22:14:56 +02:00
2018-07-25 10:41:07 +02:00
for ( u8 idx = 0 ; idx < m_draw_buffers . size ( ) ; + + idx )
2016-03-20 02:26:51 +01:00
{
2018-03-02 21:22:26 +01:00
if ( mrt_blend_enabled [ idx ] )
{
2018-04-12 13:13:13 +02:00
properties . state . enable_blend ( idx , sfactor_rgb , sfactor_a , dfactor_rgb , dfactor_a , equation_rgb , equation_a ) ;
2018-03-02 21:22:26 +01:00
}
2016-03-20 02:26:51 +01:00
}
2017-07-31 13:38:28 +02:00
}
2016-03-20 02:26:51 +01:00
2017-07-31 13:38:28 +02:00
if ( rsx : : method_registers . stencil_test_enabled ( ) )
{
2018-04-12 13:13:13 +02:00
if ( ! rsx : : method_registers . two_sided_stencil_test_enabled ( ) )
2016-06-13 13:10:59 +02:00
{
2018-04-12 13:13:13 +02:00
properties . state . enable_stencil_test (
vk : : get_stencil_op ( rsx : : method_registers . stencil_op_fail ( ) ) ,
vk : : get_stencil_op ( rsx : : method_registers . stencil_op_zfail ( ) ) ,
vk : : get_stencil_op ( rsx : : method_registers . stencil_op_zpass ( ) ) ,
vk : : get_compare_func ( rsx : : method_registers . stencil_func ( ) ) ,
2018-04-15 00:53:38 +02:00
0xFF , 0xFF ) ; //write mask, func_mask, ref are dynamic
2016-06-13 13:10:59 +02:00
}
else
2018-04-12 13:13:13 +02:00
{
properties . state . enable_stencil_test_separate ( 0 ,
vk : : get_stencil_op ( rsx : : method_registers . stencil_op_fail ( ) ) ,
vk : : get_stencil_op ( rsx : : method_registers . stencil_op_zfail ( ) ) ,
vk : : get_stencil_op ( rsx : : method_registers . stencil_op_zpass ( ) ) ,
vk : : get_compare_func ( rsx : : method_registers . stencil_func ( ) ) ,
2018-04-15 00:53:38 +02:00
0xFF , 0xFF ) ; //write mask, func_mask, ref are dynamic
2016-07-17 18:57:50 +02:00
2018-04-12 13:13:13 +02:00
properties . state . enable_stencil_test_separate ( 1 ,
vk : : get_stencil_op ( rsx : : method_registers . back_stencil_op_fail ( ) ) ,
vk : : get_stencil_op ( rsx : : method_registers . back_stencil_op_zfail ( ) ) ,
vk : : get_stencil_op ( rsx : : method_registers . back_stencil_op_zpass ( ) ) ,
vk : : get_compare_func ( rsx : : method_registers . back_stencil_func ( ) ) ,
2018-04-15 00:53:38 +02:00
0xFF , 0xFF ) ; //write mask, func_mask, ref are dynamic
2018-04-12 13:13:13 +02:00
}
2019-06-08 22:47:46 +02:00
if ( auto ds = m_rtts . m_bound_depth_stencil . second ;
ds & & ds - > samples ( ) > 1 & & ! ( ds - > stencil_init_flags & 0xFF00 ) )
{
if ( properties . state . ds . front . failOp ! = VK_STENCIL_OP_KEEP | |
properties . state . ds . front . depthFailOp ! = VK_STENCIL_OP_KEEP | |
properties . state . ds . front . passOp ! = VK_STENCIL_OP_KEEP | |
2019-08-31 13:30:29 +02:00
properties . state . ds . back . failOp ! = VK_STENCIL_OP_KEEP | |
properties . state . ds . back . depthFailOp ! = VK_STENCIL_OP_KEEP | |
properties . state . ds . back . passOp ! = VK_STENCIL_OP_KEEP )
2019-06-08 22:47:46 +02:00
{
// Toggle bit 9 to signal require full bit-wise transfer
ds - > stencil_init_flags | = ( 1 < < 8 ) ;
}
}
2018-04-12 13:13:13 +02:00
}
2016-06-26 23:37:02 +02:00
2019-05-30 17:38:18 +02:00
const auto rasterization_samples = u8 ( ( m_current_renderpass_key > > 16 ) & 0xF ) ;
2019-10-13 21:37:10 +02:00
if ( backend_config . supports_hw_a2c | | rasterization_samples > 1 )
2019-05-30 17:38:18 +02:00
{
2019-10-29 21:34:46 +01:00
const bool alpha_to_one_enable = rsx : : method_registers . msaa_alpha_to_one_enabled ( ) & & backend_config . supports_hw_a2one ;
2019-05-30 17:38:18 +02:00
properties . state . set_multisample_state (
rasterization_samples ,
rsx : : method_registers . msaa_sample_mask ( ) ,
rsx : : method_registers . msaa_enabled ( ) ,
rsx : : method_registers . msaa_alpha_to_coverage_enabled ( ) ,
2019-10-29 21:34:46 +01:00
alpha_to_one_enable ) ;
2019-05-30 17:38:18 +02:00
}
2019-05-24 15:31:46 +02:00
properties . renderpass_key = m_current_renderpass_key ;
2017-04-23 16:17:05 +02:00
2017-07-31 13:38:28 +02:00
vk : : enter_uninterruptible ( ) ;
//Load current program from buffer
2017-08-04 23:11:14 +02:00
vertex_program . skip_vertex_input_check = true ;
2017-11-02 19:54:19 +01:00
fragment_program . unnormalized_coords = 0 ;
2018-07-11 22:51:29 +02:00
m_program = m_prog_buffer - > get_graphics_pipeline ( vertex_program , fragment_program , properties ,
2020-03-09 10:15:59 +01:00
! g_cfg . video . disable_asynchronous_shader_compiler , true , * m_device , pipeline_layout ) . get ( ) ;
2018-07-11 22:51:29 +02:00
vk : : leave_uninterruptible ( ) ;
2017-08-09 13:30:15 +02:00
if ( m_prog_buffer - > check_cache_missed ( ) )
2018-01-17 17:14:00 +01:00
{
2018-07-11 22:51:29 +02:00
// Notify the user with HUD notification
2018-03-20 02:00:49 +01:00
if ( g_cfg . misc . show_shader_compilation_hint )
2018-01-17 17:14:00 +01:00
{
2018-05-20 22:05:00 +02:00
if ( m_overlay_manager )
2018-03-20 02:00:49 +01:00
{
2018-05-20 22:05:00 +02:00
if ( auto dlg = m_overlay_manager - > get < rsx : : overlays : : shader_compile_notification > ( ) )
{
2018-07-11 22:51:29 +02:00
// Extend duration
2018-05-20 22:05:00 +02:00
dlg - > touch ( ) ;
}
else
{
2018-07-11 22:51:29 +02:00
// Create dialog but do not show immediately
2018-05-20 22:05:00 +02:00
m_overlay_manager - > create < rsx : : overlays : : shader_compile_notification > ( ) ;
}
2018-03-20 02:00:49 +01:00
}
2018-01-17 17:14:00 +01:00
}
}
2018-07-11 22:51:29 +02:00
return m_program ! = nullptr ;
}
2018-10-20 16:43:00 +02:00
void VKGSRender : : load_program_env ( )
2018-07-11 22:51:29 +02:00
{
if ( ! m_program )
{
fmt : : throw_exception ( " Unreachable right now " HERE ) ;
}
2016-03-14 22:29:18 +01:00
2018-10-20 16:43:00 +02:00
const u32 fragment_constants_size = current_fp_metadata . program_constants_buffer_length ;
2018-04-20 22:44:34 +02:00
2018-10-20 16:43:00 +02:00
const bool update_transform_constants = ! ! ( m_graphics_state & rsx : : pipeline_state : : transform_constants_dirty ) ;
const bool update_fragment_constants = ! ! ( m_graphics_state & rsx : : pipeline_state : : fragment_constants_dirty ) ;
const bool update_vertex_env = ! ! ( m_graphics_state & rsx : : pipeline_state : : vertex_state_dirty ) ;
const bool update_fragment_env = ! ! ( m_graphics_state & rsx : : pipeline_state : : fragment_state_dirty ) ;
const bool update_fragment_texture_env = ! ! ( m_graphics_state & rsx : : pipeline_state : : fragment_texture_state_dirty ) ;
2018-04-20 22:44:34 +02:00
2018-10-20 16:43:00 +02:00
if ( update_vertex_env )
{
2019-04-06 19:29:58 +02:00
check_heap_status ( VK_HEAP_CHECK_VERTEX_ENV_STORAGE ) ;
2018-10-20 16:43:00 +02:00
// Vertex state
const auto mem = m_vertex_env_ring_info . alloc < 256 > ( 256 ) ;
2019-12-10 05:56:44 +01:00
auto buf = static_cast < u8 * > ( m_vertex_env_ring_info . map ( mem , 148 ) ) ;
2018-04-20 22:44:34 +02:00
fill_scale_offset_data ( buf , false ) ;
fill_user_clip_data ( buf + 64 ) ;
* ( reinterpret_cast < u32 * > ( buf + 128 ) ) = rsx : : method_registers . transform_branch_bits ( ) ;
2018-10-31 22:25:59 +01:00
* ( reinterpret_cast < f32 * > ( buf + 132 ) ) = rsx : : method_registers . point_size ( ) ;
* ( reinterpret_cast < f32 * > ( buf + 136 ) ) = rsx : : method_registers . clip_min ( ) ;
* ( reinterpret_cast < f32 * > ( buf + 140 ) ) = rsx : : method_registers . clip_max ( ) ;
2018-04-20 22:44:34 +02:00
2018-10-20 16:43:00 +02:00
m_vertex_env_ring_info . unmap ( ) ;
2018-10-31 22:25:59 +01:00
m_vertex_env_buffer_info = { m_vertex_env_ring_info . heap - > value , mem , 144 } ;
2018-10-20 16:43:00 +02:00
}
if ( update_transform_constants )
{
2019-04-06 19:29:58 +02:00
check_heap_status ( VK_HEAP_CHECK_TRANSFORM_CONSTANTS_STORAGE ) ;
2018-10-20 16:43:00 +02:00
// Transform constants
auto mem = m_transform_constants_ring_info . alloc < 256 > ( 8192 ) ;
auto buf = m_transform_constants_ring_info . map ( mem , 8192 ) ;
2018-07-11 22:51:29 +02:00
2018-10-20 16:43:00 +02:00
fill_vertex_program_constants_data ( buf ) ;
m_transform_constants_ring_info . unmap ( ) ;
m_vertex_constants_buffer_info = { m_transform_constants_ring_info . heap - > value , mem , 8192 } ;
}
if ( update_fragment_constants )
{
2019-04-06 19:29:58 +02:00
check_heap_status ( VK_HEAP_CHECK_FRAGMENT_CONSTANTS_STORAGE ) ;
2018-10-20 16:43:00 +02:00
// Fragment constants
if ( fragment_constants_size )
2018-04-20 22:44:34 +02:00
{
2018-10-20 16:43:00 +02:00
auto mem = m_fragment_constants_ring_info . alloc < 256 > ( fragment_constants_size ) ;
auto buf = m_fragment_constants_ring_info . map ( mem , fragment_constants_size ) ;
2019-11-09 16:51:53 +01:00
m_prog_buffer - > fill_fragment_constants_buffer ( { reinterpret_cast < float * > ( buf ) , fragment_constants_size } ,
2018-07-11 22:51:29 +02:00
current_fragment_program , vk : : sanitize_fp_values ( ) ) ;
2018-04-20 22:44:34 +02:00
2018-10-20 16:43:00 +02:00
m_fragment_constants_ring_info . unmap ( ) ;
m_fragment_constants_buffer_info = { m_fragment_constants_ring_info . heap - > value , mem , fragment_constants_size } ;
}
else
{
2019-01-05 19:01:21 +01:00
m_fragment_constants_buffer_info = { m_fragment_constants_ring_info . heap - > value , 0 , 32 } ;
2018-10-20 16:43:00 +02:00
}
}
2018-07-11 22:51:29 +02:00
2018-10-20 16:43:00 +02:00
if ( update_fragment_env )
{
2019-04-06 19:29:58 +02:00
check_heap_status ( VK_HEAP_CHECK_FRAGMENT_ENV_STORAGE ) ;
2018-10-20 16:43:00 +02:00
auto mem = m_fragment_env_ring_info . alloc < 256 > ( 256 ) ;
auto buf = m_fragment_env_ring_info . map ( mem , 32 ) ;
2018-04-20 22:44:34 +02:00
2018-10-20 16:43:00 +02:00
fill_fragment_state_buffer ( buf , current_fragment_program ) ;
m_fragment_env_ring_info . unmap ( ) ;
m_fragment_env_buffer_info = { m_fragment_env_ring_info . heap - > value , mem , 32 } ;
2018-04-20 22:44:34 +02:00
}
2018-10-20 16:43:00 +02:00
if ( update_fragment_texture_env )
2018-02-23 09:30:13 +01:00
{
2019-04-06 19:29:58 +02:00
check_heap_status ( VK_HEAP_CHECK_TEXTURE_ENV_STORAGE ) ;
2018-10-20 16:43:00 +02:00
auto mem = m_fragment_texture_params_ring_info . alloc < 256 > ( 256 ) ;
auto buf = m_fragment_texture_params_ring_info . map ( mem , 256 ) ;
2018-04-20 22:44:34 +02:00
2018-10-20 16:43:00 +02:00
fill_fragment_texture_parameters ( buf , current_fragment_program ) ;
m_fragment_texture_params_ring_info . unmap ( ) ;
m_fragment_texture_params_buffer_info = { m_fragment_texture_params_ring_info . heap - > value , mem , 256 } ;
2018-02-23 09:30:13 +01:00
}
2017-06-25 22:14:56 +02:00
2020-01-08 17:30:35 +01:00
const auto & binding_table = m_device - > get_pipeline_binding_table ( ) ;
m_program - > bind_uniform ( m_vertex_env_buffer_info , binding_table . vertex_params_bind_slot , m_current_frame - > descriptor_set ) ;
m_program - > bind_uniform ( m_vertex_constants_buffer_info , binding_table . vertex_constant_buffers_bind_slot , m_current_frame - > descriptor_set ) ;
m_program - > bind_uniform ( m_fragment_constants_buffer_info , binding_table . fragment_constant_buffers_bind_slot , m_current_frame - > descriptor_set ) ;
m_program - > bind_uniform ( m_fragment_env_buffer_info , binding_table . fragment_state_bind_slot , m_current_frame - > descriptor_set ) ;
m_program - > bind_uniform ( m_fragment_texture_params_buffer_info , binding_table . fragment_texture_params_bind_slot , m_current_frame - > descriptor_set ) ;
2017-06-25 22:14:56 +02:00
2019-12-10 07:10:13 +01:00
if ( vk : : emulate_conditional_rendering ( ) )
{
auto predicate = m_cond_render_buffer ? m_cond_render_buffer - > value : vk : : get_scratch_buffer ( ) - > value ;
2020-01-08 17:30:35 +01:00
m_program - > bind_buffer ( { predicate , 0 , 4 } , binding_table . conditional_render_predicate_slot , VK_DESCRIPTOR_TYPE_STORAGE_BUFFER , m_current_frame - > descriptor_set ) ;
2019-12-10 07:10:13 +01:00
}
2018-04-20 22:44:34 +02:00
//Clear flags
2018-10-20 16:43:00 +02:00
const u32 handled_flags = ( rsx : : pipeline_state : : fragment_state_dirty | rsx : : pipeline_state : : vertex_state_dirty | rsx : : pipeline_state : : transform_constants_dirty | rsx : : pipeline_state : : fragment_constants_dirty | rsx : : pipeline_state : : fragment_texture_state_dirty ) ;
2018-07-11 22:51:29 +02:00
m_graphics_state & = ~ handled_flags ;
2016-02-21 16:50:49 +01:00
}
2019-06-18 15:38:50 +02:00
void VKGSRender : : update_vertex_env ( u32 id , const vk : : vertex_upload_info & vertex_info )
2016-02-21 16:50:49 +01:00
{
2019-06-18 15:38:50 +02:00
// Actual allocation must have been done previously
u32 base_offset ;
2020-03-04 13:36:23 +01:00
const u32 offset32 = static_cast < u32 > ( m_vertex_layout_stream_info . offset ) ;
const u32 range32 = static_cast < u32 > ( m_vertex_layout_stream_info . range ) ;
2016-02-21 16:50:49 +01:00
2020-03-04 13:36:23 +01:00
if ( ! m_vertex_layout_storage | | ! m_vertex_layout_storage - > in_range ( offset32 , range32 , base_offset ) )
2019-06-18 15:38:50 +02:00
{
verify ( " Incompatible driver (MacOS?) " HERE ) , m_texbuffer_view_size > = m_vertex_layout_stream_info . range ;
2016-02-21 16:50:49 +01:00
2019-06-18 15:38:50 +02:00
if ( m_vertex_layout_storage )
m_current_frame - > buffer_views_to_clean . push_back ( std : : move ( m_vertex_layout_storage ) ) ;
2019-10-28 10:44:57 +01:00
const size_t alloc_addr = m_vertex_layout_stream_info . offset ;
const size_t view_size = ( alloc_addr + m_texbuffer_view_size ) > m_vertex_layout_ring_info . size ( ) ? m_vertex_layout_ring_info . size ( ) - alloc_addr : m_texbuffer_view_size ;
m_vertex_layout_storage = std : : make_unique < vk : : buffer_view > ( * m_device , m_vertex_layout_ring_info . heap - > value , VK_FORMAT_R32G32_UINT , alloc_addr , view_size ) ;
2019-06-18 15:38:50 +02:00
base_offset = 0 ;
}
2019-12-10 07:10:13 +01:00
u8 data_size = 16 ;
u32 draw_info [ 5 ] ;
2019-06-18 15:38:50 +02:00
draw_info [ 0 ] = vertex_info . vertex_index_base ;
draw_info [ 1 ] = vertex_info . vertex_index_offset ;
draw_info [ 2 ] = id ;
draw_info [ 3 ] = ( id * 16 ) + ( base_offset / 8 ) ;
2019-12-10 07:10:13 +01:00
if ( vk : : emulate_conditional_rendering ( ) )
{
draw_info [ 4 ] = cond_render_ctrl . hw_cond_active ? 1 : 0 ;
data_size = 20 ;
}
vkCmdPushConstants ( * m_current_command_buffer , pipeline_layout , VK_SHADER_STAGE_VERTEX_BIT , 0 , data_size , draw_info ) ;
2019-06-18 15:38:50 +02:00
const size_t data_offset = ( id * 128 ) + m_vertex_layout_stream_info . offset ;
auto dst = m_vertex_layout_ring_info . map ( data_offset , 128 ) ;
2019-12-03 23:34:23 +01:00
fill_vertex_layout_state ( m_vertex_layout , vertex_info . first_vertex , vertex_info . allocated_vertex_count , static_cast < s32 * > ( dst ) ,
2018-10-20 16:43:00 +02:00
vertex_info . persistent_window_offset , vertex_info . volatile_window_offset ) ;
2018-10-01 22:05:51 +02:00
2018-10-20 16:43:00 +02:00
m_vertex_layout_ring_info . unmap ( ) ;
2018-10-01 22:05:51 +02:00
}
2016-02-21 16:50:49 +01:00
2019-08-06 13:46:21 +02:00
void VKGSRender : : init_buffers ( rsx : : framebuffer_creation_context context , bool )
2016-02-21 16:50:49 +01:00
{
2017-11-15 16:50:41 +01:00
prepare_rtts ( context ) ;
2016-02-21 16:50:49 +01:00
}
2019-12-07 14:28:35 +01:00
void VKGSRender : : close_and_submit_command_buffer ( vk : : fence * pFence , VkSemaphore wait_semaphore , VkSemaphore signal_semaphore , VkPipelineStageFlags pipeline_stage_flags )
2016-03-23 23:22:33 +01:00
{
2020-03-13 18:58:41 +01:00
verify ( " Recursive calls to submit the current commandbuffer will cause a deadlock " HERE ) , ! m_queue_status . test_and_set ( flush_queue_state : : flushing ) ;
2020-01-13 18:46:58 +01:00
// Workaround for deadlock occuring during RSX offloader fault
// TODO: Restructure command submission infrastructure to avoid this condition
2020-02-23 11:12:31 +01:00
const bool sync_success = g_fxo - > get < rsx : : dma_manager > ( ) - > sync ( ) ;
2020-01-13 18:46:58 +01:00
const VkBool32 force_flush = ! sync_success ;
2019-11-10 10:51:37 +01:00
if ( vk : : test_status_interrupt ( vk : : heap_dirty ) )
2018-08-23 20:56:06 +02:00
{
2019-10-23 16:29:26 +02:00
if ( m_attrib_ring_info . dirty ( ) | |
m_fragment_env_ring_info . dirty ( ) | |
m_vertex_env_ring_info . dirty ( ) | |
m_fragment_texture_params_ring_info . dirty ( ) | |
m_vertex_layout_ring_info . dirty ( ) | |
m_fragment_constants_ring_info . dirty ( ) | |
m_index_buffer_ring_info . dirty ( ) | |
m_transform_constants_ring_info . dirty ( ) | |
m_texture_upload_buffer_ring_info . dirty ( ) )
{
std : : lock_guard lock ( m_secondary_cb_guard ) ;
m_secondary_command_buffer . begin ( ) ;
2018-08-23 20:56:06 +02:00
2019-10-23 16:29:26 +02:00
m_attrib_ring_info . sync ( m_secondary_command_buffer ) ;
m_fragment_env_ring_info . sync ( m_secondary_command_buffer ) ;
m_vertex_env_ring_info . sync ( m_secondary_command_buffer ) ;
m_fragment_texture_params_ring_info . sync ( m_secondary_command_buffer ) ;
m_vertex_layout_ring_info . sync ( m_secondary_command_buffer ) ;
m_fragment_constants_ring_info . sync ( m_secondary_command_buffer ) ;
m_index_buffer_ring_info . sync ( m_secondary_command_buffer ) ;
m_transform_constants_ring_info . sync ( m_secondary_command_buffer ) ;
m_texture_upload_buffer_ring_info . sync ( m_secondary_command_buffer ) ;
2018-08-23 20:56:06 +02:00
2019-10-23 16:29:26 +02:00
m_secondary_command_buffer . end ( ) ;
2019-06-09 11:48:19 +02:00
2019-10-23 16:29:26 +02:00
m_secondary_command_buffer . submit ( m_swapchain - > get_graphics_queue ( ) ,
2020-01-13 18:46:58 +01:00
VK_NULL_HANDLE , VK_NULL_HANDLE , VK_NULL_HANDLE , VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT , force_flush ) ;
2019-10-23 16:29:26 +02:00
}
2019-11-09 14:31:12 +01:00
2019-11-10 10:51:37 +01:00
vk : : clear_status_interrupt ( vk : : heap_dirty ) ;
2018-08-23 20:56:06 +02:00
}
2019-12-10 05:56:44 +01:00
#if 0 // Currently unreachable
if ( m_current_command_buffer - > flags & vk : : command_buffer : : cb_has_conditional_render )
{
verify ( HERE ) , m_render_pass_open ;
m_device - > cmdEndConditionalRenderingEXT ( * m_current_command_buffer ) ;
}
# endif
2019-09-05 13:38:33 +02:00
// End any active renderpasses; the caller should handle reopening
2020-03-10 12:05:50 +01:00
if ( vk : : is_renderpass_open ( * m_current_command_buffer ) )
2019-09-05 13:38:33 +02:00
{
close_render_pass ( ) ;
}
2019-06-18 23:26:25 +02:00
// End open queries. Flags will be automatically reset by the submit routine
if ( m_current_command_buffer - > flags & vk : : command_buffer : : cb_has_open_query )
{
auto open_query = m_occlusion_map [ m_active_query_info - > driver_handle ] . indices . back ( ) ;
m_occlusion_query_pool . end_query ( * m_current_command_buffer , open_query ) ;
2019-06-28 20:35:43 +02:00
m_current_command_buffer - > flags & = ~ vk : : command_buffer : : cb_has_open_query ;
2019-06-18 23:26:25 +02:00
}
2017-09-15 00:32:23 +02:00
m_current_command_buffer - > end ( ) ;
2017-08-14 17:50:50 +02:00
m_current_command_buffer - > tag ( ) ;
2019-06-09 11:48:19 +02:00
m_current_command_buffer - > submit ( m_swapchain - > get_graphics_queue ( ) ,
2020-01-13 18:46:58 +01:00
wait_semaphore , signal_semaphore , pFence , pipeline_stage_flags , force_flush ) ;
if ( force_flush )
{
verify ( HERE ) , m_current_command_buffer - > submit_fence - > flushed ;
}
2020-03-13 18:58:41 +01:00
m_queue_status . clear ( flush_queue_state : : flushing ) ;
2016-03-23 23:22:33 +01:00
}
void VKGSRender : : open_command_buffer ( )
2016-02-21 16:50:49 +01:00
{
2017-09-15 00:32:23 +02:00
m_current_command_buffer - > begin ( ) ;
2016-02-21 16:50:49 +01:00
}
2017-11-15 16:50:41 +01:00
void VKGSRender : : prepare_rtts ( rsx : : framebuffer_creation_context context )
2016-02-21 16:50:49 +01:00
{
2019-07-20 13:58:05 +02:00
const bool clipped_scissor = ( context = = rsx : : framebuffer_creation_context : : context_draw ) ;
2018-12-12 09:58:44 +01:00
if ( m_current_framebuffer_context = = context & & ! m_rtts_dirty & & m_draw_fbo )
2018-10-28 13:20:53 +01:00
{
2018-12-12 09:58:44 +01:00
// Fast path
// Framebuffer usage has not changed, framebuffer exists and config regs have not changed
2019-07-20 13:58:05 +02:00
set_scissor ( clipped_scissor ) ;
2016-02-21 16:50:49 +01:00
return ;
2018-10-28 13:20:53 +01:00
}
2016-02-21 16:50:49 +01:00
m_rtts_dirty = false ;
2017-10-17 16:27:19 +02:00
framebuffer_status_valid = false ;
2017-11-15 15:15:19 +01:00
m_framebuffer_state_contested = false ;
2017-10-17 16:27:19 +02:00
2019-08-27 13:55:45 +02:00
get_framebuffer_layout ( context , m_framebuffer_layout ) ;
2018-07-23 23:55:15 +02:00
if ( ! framebuffer_status_valid )
2017-07-05 00:16:59 +02:00
{
2017-07-10 22:45:42 +02:00
return ;
2017-07-05 00:16:59 +02:00
}
2019-08-27 13:55:45 +02:00
if ( m_draw_fbo & & m_framebuffer_layout . ignore_change )
2017-11-19 18:11:00 +01:00
{
2018-07-23 23:55:15 +02:00
// Nothing has changed, we're still using the same framebuffer
// Update flags to match current
2019-07-20 13:58:05 +02:00
set_scissor ( clipped_scissor ) ;
2017-10-17 16:27:19 +02:00
return ;
2017-12-08 10:48:01 +01:00
}
2017-10-17 16:27:19 +02:00
2019-05-07 20:51:53 +02:00
m_rtts . prepare_render_target ( * m_current_command_buffer ,
2019-08-27 13:55:45 +02:00
m_framebuffer_layout . color_format , m_framebuffer_layout . depth_format ,
m_framebuffer_layout . width , m_framebuffer_layout . height ,
m_framebuffer_layout . target , m_framebuffer_layout . aa_mode ,
m_framebuffer_layout . color_addresses , m_framebuffer_layout . zeta_address ,
m_framebuffer_layout . actual_color_pitch , m_framebuffer_layout . actual_zeta_pitch ,
2019-05-07 20:51:53 +02:00
( * m_device ) , * m_current_command_buffer ) ;
2016-02-21 16:50:49 +01:00
2019-02-27 19:26:22 +01:00
// Reset framebuffer information
2019-08-27 13:55:45 +02:00
const auto color_bpp = get_format_block_size_in_bytes ( m_framebuffer_layout . color_format ) ;
const auto samples = get_format_sample_count ( m_framebuffer_layout . aa_mode ) ;
2019-02-27 19:26:22 +01:00
2017-04-21 15:35:13 +02:00
for ( u8 i = 0 ; i < rsx : : limits : : color_buffers_count ; + + i )
{
2019-04-01 17:45:19 +02:00
// Flush old address if we keep missing it
2017-07-23 15:34:17 +02:00
if ( m_surface_info [ i ] . pitch & & g_cfg . video . write_color_buffers )
{
2018-09-22 02:14:26 +02:00
const utils : : address_range rsx_range = m_surface_info [ i ] . get_memory_range ( ) ;
m_texture_cache . set_memory_read_flags ( rsx_range , rsx : : memory_read_flags : : flush_once ) ;
2019-03-16 10:14:11 +01:00
m_texture_cache . flush_if_cache_miss_likely ( * m_current_command_buffer , rsx_range ) ;
2017-07-23 15:34:17 +02:00
}
2017-04-21 15:35:13 +02:00
m_surface_info [ i ] . address = m_surface_info [ i ] . pitch = 0 ;
2019-08-27 13:55:45 +02:00
m_surface_info [ i ] . width = m_framebuffer_layout . width ;
m_surface_info [ i ] . height = m_framebuffer_layout . height ;
m_surface_info [ i ] . color_format = m_framebuffer_layout . color_format ;
2019-02-27 19:26:22 +01:00
m_surface_info [ i ] . bpp = color_bpp ;
2019-05-20 16:14:02 +02:00
m_surface_info [ i ] . samples = samples ;
2017-04-21 15:35:13 +02:00
}
2018-02-03 09:37:42 +01:00
//Process depth surface as well
{
if ( m_depth_surface_info . pitch & & g_cfg . video . write_depth_buffer )
{
2018-09-22 02:14:26 +02:00
const utils : : address_range surface_range = m_depth_surface_info . get_memory_range ( ) ;
m_texture_cache . set_memory_read_flags ( surface_range , rsx : : memory_read_flags : : flush_once ) ;
2019-03-16 10:14:11 +01:00
m_texture_cache . flush_if_cache_miss_likely ( * m_current_command_buffer , surface_range ) ;
2018-02-03 09:37:42 +01:00
}
m_depth_surface_info . address = m_depth_surface_info . pitch = 0 ;
2019-08-27 13:55:45 +02:00
m_depth_surface_info . width = m_framebuffer_layout . width ;
m_depth_surface_info . height = m_framebuffer_layout . height ;
m_depth_surface_info . depth_format = m_framebuffer_layout . depth_format ;
m_depth_surface_info . depth_buffer_float = m_framebuffer_layout . depth_float ;
m_depth_surface_info . bpp = ( m_framebuffer_layout . depth_format = = rsx : : surface_depth_format : : z16 ? 2 : 4 ) ;
2019-05-20 16:14:02 +02:00
m_depth_surface_info . samples = samples ;
2018-02-03 09:37:42 +01:00
}
2017-04-21 15:35:13 +02:00
2016-02-21 16:50:49 +01:00
//Bind created rtts as current fbo...
2019-08-27 13:55:45 +02:00
const auto draw_buffers = rsx : : utility : : get_rtt_indexes ( m_framebuffer_layout . target ) ;
2019-05-24 15:31:46 +02:00
m_draw_buffers . clear ( ) ;
m_fbo_images . clear ( ) ;
2016-03-21 18:20:30 +01:00
2017-07-12 23:49:50 +02:00
for ( u8 index : draw_buffers )
{
2017-11-14 19:24:17 +01:00
if ( auto surface = std : : get < 1 > ( m_rtts . m_bound_render_targets [ index ] ) )
2017-04-21 15:35:13 +02:00
{
2019-05-24 15:31:46 +02:00
m_fbo_images . push_back ( surface ) ;
2017-11-14 19:24:17 +01:00
2019-08-27 13:55:45 +02:00
m_surface_info [ index ] . address = m_framebuffer_layout . color_addresses [ index ] ;
m_surface_info [ index ] . pitch = m_framebuffer_layout . actual_color_pitch [ index ] ;
verify ( " Pitch mismatch! " HERE ) , surface - > rsx_pitch = = m_framebuffer_layout . actual_color_pitch [ index ] ;
2017-10-26 15:20:09 +02:00
2019-08-27 13:55:45 +02:00
m_texture_cache . notify_surface_changed ( m_surface_info [ index ] . get_memory_range ( m_framebuffer_layout . aa_factors ) ) ;
2018-07-25 10:41:07 +02:00
m_draw_buffers . push_back ( index ) ;
2017-11-14 19:24:17 +01:00
}
2016-02-21 16:50:49 +01:00
}
2017-07-12 23:49:50 +02:00
if ( std : : get < 0 > ( m_rtts . m_bound_depth_stencil ) ! = 0 )
2016-02-21 16:50:49 +01:00
{
2017-08-13 23:27:19 +02:00
auto ds = std : : get < 1 > ( m_rtts . m_bound_depth_stencil ) ;
2019-08-27 13:55:45 +02:00
ds - > set_depth_render_mode ( ! m_framebuffer_layout . depth_float ) ;
2019-05-24 15:31:46 +02:00
m_fbo_images . push_back ( ds ) ;
2017-04-21 15:35:13 +02:00
2019-08-27 13:55:45 +02:00
m_depth_surface_info . address = m_framebuffer_layout . zeta_address ;
m_depth_surface_info . pitch = m_framebuffer_layout . actual_zeta_pitch ;
verify ( " Pitch mismatch! " HERE ) , ds - > rsx_pitch = = m_framebuffer_layout . actual_zeta_pitch ;
2017-11-04 11:49:05 +01:00
2019-08-27 13:55:45 +02:00
m_texture_cache . notify_surface_changed ( m_depth_surface_info . get_memory_range ( m_framebuffer_layout . aa_factors ) ) ;
2017-04-21 15:35:13 +02:00
}
2019-03-16 10:14:11 +01:00
// Before messing with memory properties, flush command queue if there are dma transfers queued up
if ( m_current_command_buffer - > flags & vk : : command_buffer : : cb_has_dma_transfer )
{
flush_command_queue ( ) ;
}
2019-08-27 13:55:45 +02:00
const auto color_fmt_info = get_compatible_gcm_format ( m_framebuffer_layout . color_format ) ;
2019-02-25 16:03:14 +01:00
for ( u8 index : m_draw_buffers )
2017-04-21 15:35:13 +02:00
{
2019-02-25 16:03:14 +01:00
if ( ! m_surface_info [ index ] . address | | ! m_surface_info [ index ] . pitch ) continue ;
2017-04-21 15:35:13 +02:00
2019-04-01 17:45:19 +02:00
const utils : : address_range surface_range = m_surface_info [ index ] . get_memory_range ( ) ;
2019-02-25 16:03:14 +01:00
if ( g_cfg . video . write_color_buffers )
{
2019-05-15 14:55:14 +02:00
m_texture_cache . lock_memory_region (
* m_current_command_buffer , m_rtts . m_bound_render_targets [ index ] . second , surface_range , true ,
2019-08-27 13:55:45 +02:00
m_surface_info [ index ] . width , m_surface_info [ index ] . height , m_framebuffer_layout . actual_color_pitch [ index ] ,
2019-05-15 14:55:14 +02:00
color_fmt_info . first , color_fmt_info . second ) ;
2017-04-21 15:35:13 +02:00
}
2019-02-25 16:03:14 +01:00
else
{
2019-03-16 10:14:11 +01:00
m_texture_cache . commit_framebuffer_memory_region ( * m_current_command_buffer , surface_range ) ;
2019-02-25 16:03:14 +01:00
}
2017-04-21 15:35:13 +02:00
}
2019-02-25 16:03:14 +01:00
if ( m_depth_surface_info . address & & m_depth_surface_info . pitch )
2017-04-21 15:35:13 +02:00
{
2019-04-01 17:45:19 +02:00
const utils : : address_range surface_range = m_depth_surface_info . get_memory_range ( ) ;
2019-02-25 16:03:14 +01:00
if ( g_cfg . video . write_depth_buffer )
2017-04-21 15:35:13 +02:00
{
2019-02-25 16:03:14 +01:00
const u32 gcm_format = ( m_depth_surface_info . depth_format ! = rsx : : surface_depth_format : : z16 ) ? CELL_GCM_TEXTURE_DEPTH16 : CELL_GCM_TEXTURE_DEPTH24_D8 ;
2019-05-15 14:55:14 +02:00
m_texture_cache . lock_memory_region (
* m_current_command_buffer , m_rtts . m_bound_depth_stencil . second , surface_range , true ,
2019-09-04 21:19:58 +02:00
m_depth_surface_info . width , m_depth_surface_info . height , m_framebuffer_layout . actual_zeta_pitch , gcm_format , true ) ;
2017-04-21 15:35:13 +02:00
}
2019-02-25 16:03:14 +01:00
else
{
2019-03-16 10:14:11 +01:00
m_texture_cache . commit_framebuffer_memory_region ( * m_current_command_buffer , surface_range ) ;
2019-02-25 16:03:14 +01:00
}
2016-02-21 16:50:49 +01:00
}
2019-05-13 19:53:00 +02:00
if ( ! m_rtts . orphaned_surfaces . empty ( ) )
{
2019-08-20 22:22:21 +02:00
u32 gcm_format ;
bool swap_bytes ;
for ( auto & surface : m_rtts . orphaned_surfaces )
2019-05-13 19:53:00 +02:00
{
2019-08-20 22:22:21 +02:00
const bool lock = surface - > is_depth_surface ( ) ? ! ! g_cfg . video . write_depth_buffer :
! ! g_cfg . video . write_color_buffers ;
2019-05-13 19:53:00 +02:00
2020-02-05 08:00:08 +01:00
if ( ! lock ) [[likely]]
2019-05-13 19:53:00 +02:00
{
2019-08-20 22:22:21 +02:00
m_texture_cache . commit_framebuffer_memory_region ( * m_current_command_buffer , surface - > get_memory_range ( ) ) ;
continue ;
}
2019-05-13 19:53:00 +02:00
2019-08-20 22:22:21 +02:00
if ( surface - > is_depth_surface ( ) )
{
gcm_format = ( surface - > get_surface_depth_format ( ) ! = rsx : : surface_depth_format : : z16 ) ? CELL_GCM_TEXTURE_DEPTH16 : CELL_GCM_TEXTURE_DEPTH24_D8 ;
swap_bytes = true ;
}
else
{
auto info = get_compatible_gcm_format ( surface - > get_surface_color_format ( ) ) ;
gcm_format = info . first ;
swap_bytes = info . second ;
2019-05-13 19:53:00 +02:00
}
2019-08-20 22:22:21 +02:00
m_texture_cache . lock_memory_region (
* m_current_command_buffer , surface , surface - > get_memory_range ( ) , false ,
surface - > get_surface_width ( rsx : : surface_metrics : : pixels ) , surface - > get_surface_height ( rsx : : surface_metrics : : pixels ) , surface - > get_rsx_pitch ( ) ,
gcm_format , swap_bytes ) ;
2019-05-13 19:53:00 +02:00
}
m_rtts . orphaned_surfaces . clear ( ) ;
}
2019-05-24 15:31:46 +02:00
m_current_renderpass_key = vk : : get_renderpass_key ( m_fbo_images ) ;
2019-05-27 11:16:18 +02:00
m_cached_renderpass = vk : : get_renderpass ( * m_device , m_current_renderpass_key ) ;
2017-11-14 19:24:17 +01:00
2019-05-10 22:04:13 +02:00
// Search old framebuffers for this same configuration
2019-08-27 13:55:45 +02:00
const auto fbo_width = rsx : : apply_resolution_scale ( m_framebuffer_layout . width , true ) ;
const auto fbo_height = rsx : : apply_resolution_scale ( m_framebuffer_layout . height , true ) ;
2017-11-14 19:24:17 +01:00
2019-05-10 22:04:13 +02:00
if ( m_draw_fbo )
{
// Release old ref
m_draw_fbo - > release ( ) ;
}
2019-05-27 11:16:18 +02:00
m_draw_fbo = vk : : get_framebuffer ( * m_device , fbo_width , fbo_height , m_cached_renderpass , m_fbo_images ) ;
m_draw_fbo - > add_ref ( ) ;
2017-11-16 22:52:21 +01:00
2018-10-28 13:20:53 +01:00
set_viewport ( ) ;
2019-07-20 13:58:05 +02:00
set_scissor ( clipped_scissor ) ;
2018-10-28 13:20:53 +01:00
2018-03-05 12:09:43 +01:00
check_zcull_status ( true ) ;
2016-02-21 16:50:49 +01:00
}
2019-12-07 14:28:35 +01:00
void VKGSRender : : renderctl ( u32 request_code , void * args )
{
switch ( request_code )
{
case vk : : rctrl_queue_submit :
{
auto packet = reinterpret_cast < vk : : submit_packet * > ( args ) ;
vk : : queue_submit ( packet - > queue , & packet - > submit_info , packet - > pfence , VK_TRUE ) ;
free ( packet ) ;
break ;
}
default :
fmt : : throw_exception ( " Unhandled request code 0x%x " HERE , request_code ) ;
}
}
2017-08-13 23:27:19 +02:00
bool VKGSRender : : scaled_image_from_memory ( rsx : : blit_src_info & src , rsx : : blit_dst_info & dst , bool interpolate )
{
2019-06-10 11:29:46 +02:00
if ( swapchain_unavailable )
2017-11-10 14:42:27 +01:00
return false ;
2019-04-06 19:29:58 +02:00
// Verify enough memory exists before attempting to handle data transfer
check_heap_status ( VK_HEAP_CHECK_TEXTURE_UPLOAD_STORAGE ) ;
2018-02-03 14:42:02 +01:00
2018-06-04 18:57:16 +02:00
if ( m_texture_cache . blit ( src , dst , interpolate , m_rtts , * m_current_command_buffer ) )
2018-02-03 14:42:02 +01:00
{
2018-02-10 17:21:16 +01:00
m_samplers_dirty . store ( true ) ;
2019-03-16 10:14:11 +01:00
m_current_command_buffer - > set_flag ( vk : : command_buffer : : cb_has_blit_transfer ) ;
2019-03-14 13:27:50 +01:00
2019-03-16 10:14:11 +01:00
if ( m_current_command_buffer - > flags & vk : : command_buffer : : cb_has_dma_transfer )
2019-03-14 13:27:50 +01:00
{
2019-03-16 10:14:11 +01:00
// A dma transfer has been queued onto this cb
// This likely means that we're done with the tranfers to the target (writes_likely_completed=1)
2019-03-14 13:27:50 +01:00
flush_command_queue ( ) ;
}
2018-02-10 17:21:16 +01:00
return true ;
2018-02-03 14:42:02 +01:00
}
2018-02-10 17:21:16 +01:00
return false ;
2017-11-16 22:52:21 +01:00
}
2018-03-05 12:09:43 +01:00
void VKGSRender : : begin_occlusion_query ( rsx : : reports : : occlusion_query_info * query )
2017-11-16 22:52:21 +01:00
{
2019-06-18 23:26:25 +02:00
verify ( HERE ) , ! m_occlusion_query_active ;
2017-11-16 22:52:21 +01:00
query - > result = 0 ;
2018-03-05 12:09:43 +01:00
//query->sync_timestamp = get_system_time();
2017-11-16 22:52:21 +01:00
m_active_query_info = query ;
m_occlusion_query_active = true ;
2019-06-18 23:26:25 +02:00
m_current_command_buffer - > flags | = vk : : command_buffer : : cb_load_occluson_task ;
2017-11-16 22:52:21 +01:00
}
2018-03-05 12:09:43 +01:00
void VKGSRender : : end_occlusion_query ( rsx : : reports : : occlusion_query_info * query )
2017-11-16 22:52:21 +01:00
{
2019-06-18 23:26:25 +02:00
verify ( HERE ) , query = = m_active_query_info ;
2017-11-16 22:52:21 +01:00
2018-07-24 22:41:45 +02:00
// NOTE: flushing the queue is very expensive, do not flush just because query stopped
2019-06-18 23:26:25 +02:00
if ( m_current_command_buffer - > flags & vk : : command_buffer : : cb_has_open_query )
{
// End query
auto open_query = m_occlusion_map [ m_active_query_info - > driver_handle ] . indices . back ( ) ;
m_occlusion_query_pool . end_query ( * m_current_command_buffer , open_query ) ;
m_current_command_buffer - > flags & = ~ vk : : command_buffer : : cb_has_open_query ;
}
// Clear occlusion load flag
m_current_command_buffer - > flags & = ~ vk : : command_buffer : : cb_load_occluson_task ;
m_occlusion_query_active = false ;
m_active_query_info = nullptr ;
2017-11-16 22:52:21 +01:00
}
2018-03-05 12:09:43 +01:00
bool VKGSRender : : check_occlusion_query_status ( rsx : : reports : : occlusion_query_info * query )
2017-11-16 22:52:21 +01:00
{
2018-03-05 12:09:43 +01:00
if ( ! query - > num_draws )
return true ;
2019-06-18 23:26:25 +02:00
auto & data = m_occlusion_map [ query - > driver_handle ] ;
2019-06-01 15:25:33 +02:00
if ( data . indices . empty ( ) )
2017-11-16 22:52:21 +01:00
return true ;
2019-12-11 20:07:09 +01:00
if ( data . is_current ( m_current_command_buffer ) )
2017-11-16 22:52:21 +01:00
return false ;
u32 oldest = data . indices . front ( ) ;
return m_occlusion_query_pool . check_query_status ( oldest ) ;
}
2018-03-05 12:09:43 +01:00
void VKGSRender : : get_occlusion_query_result ( rsx : : reports : : occlusion_query_info * query )
2017-11-16 22:52:21 +01:00
{
2019-06-18 23:26:25 +02:00
auto & data = m_occlusion_map [ query - > driver_handle ] ;
2019-06-01 15:25:33 +02:00
if ( data . indices . empty ( ) )
2017-11-16 22:52:21 +01:00
return ;
2018-03-05 12:09:43 +01:00
if ( query - > num_draws )
{
2019-12-11 20:07:09 +01:00
if ( data . is_current ( m_current_command_buffer ) )
2018-03-05 12:09:43 +01:00
{
2019-03-02 15:55:17 +01:00
std : : lock_guard lock ( m_flush_queue_mutex ) ;
2018-03-05 12:09:43 +01:00
flush_command_queue ( ) ;
2017-11-16 22:52:21 +01:00
2018-03-05 12:09:43 +01:00
if ( m_flush_requests . pending ( ) )
{
m_flush_requests . clear_pending_flag ( ) ;
}
2019-10-23 16:29:26 +02:00
2020-02-01 09:07:25 +01:00
rsx_log . error ( " [Performance warning] Unexpected ZCULL read caused a hard sync " ) ;
2019-11-30 13:44:47 +01:00
busy_wait ( ) ;
2019-10-23 16:29:26 +02:00
}
2018-03-05 12:09:43 +01:00
2019-12-11 20:07:09 +01:00
data . sync ( ) ;
2019-12-07 14:28:35 +01:00
2019-10-23 16:29:26 +02:00
// Gather data
2018-03-05 12:09:43 +01:00
for ( const auto occlusion_id : data . indices )
2017-11-16 22:52:21 +01:00
{
2019-10-23 16:29:26 +02:00
// We only need one hit
2018-03-05 12:09:43 +01:00
if ( auto value = m_occlusion_query_pool . get_query_result ( occlusion_id ) )
{
query - > result = 1 ;
break ;
}
2017-11-16 22:52:21 +01:00
}
}
m_occlusion_query_pool . reset_queries ( * m_current_command_buffer , data . indices ) ;
2019-06-18 23:26:25 +02:00
data . indices . clear ( ) ;
2018-01-17 17:14:00 +01:00
}
2018-03-05 12:09:43 +01:00
void VKGSRender : : discard_occlusion_query ( rsx : : reports : : occlusion_query_info * query )
{
if ( m_active_query_info = = query )
{
2019-06-18 23:26:25 +02:00
end_occlusion_query ( query ) ;
2018-03-05 12:09:43 +01:00
}
2019-06-18 23:26:25 +02:00
auto & data = m_occlusion_map [ query - > driver_handle ] ;
2019-06-01 15:25:33 +02:00
if ( data . indices . empty ( ) )
2018-03-05 12:09:43 +01:00
return ;
m_occlusion_query_pool . reset_queries ( * m_current_command_buffer , data . indices ) ;
2019-06-18 23:26:25 +02:00
data . indices . clear ( ) ;
2018-07-11 22:51:29 +02:00
}
2019-06-28 20:35:43 +02:00
void VKGSRender : : emergency_query_cleanup ( vk : : command_buffer * commands )
{
verify ( " Command list mismatch " HERE ) , commands = = static_cast < vk : : command_buffer * > ( m_current_command_buffer ) ;
if ( m_current_command_buffer - > flags & vk : : command_buffer : : cb_has_open_query )
{
auto open_query = m_occlusion_map [ m_active_query_info - > driver_handle ] . indices . back ( ) ;
m_occlusion_query_pool . end_query ( * m_current_command_buffer , open_query ) ;
m_current_command_buffer - > flags & = ~ vk : : command_buffer : : cb_has_open_query ;
}
}
2019-12-10 05:56:44 +01:00
void VKGSRender : : begin_conditional_rendering ( const std : : vector < rsx : : reports : : occlusion_query_info * > & sources )
{
verify ( HERE ) , ! sources . empty ( ) ;
// Flag check whether to calculate all entries or only one
bool partial_eval ;
// Try and avoid regenerating the data if its a repeat/spam
// NOTE: The incoming list is reversed with the first entry being the newest
if ( m_cond_render_sync_tag = = sources . front ( ) - > sync_tag )
{
// Already synched, check subdraw which is possible if last sync happened while query was active
if ( ! m_active_query_info | | m_active_query_info ! = sources . front ( ) )
{
rsx : : thread : : begin_conditional_rendering ( sources ) ;
return ;
}
// Partial evaluation only
partial_eval = true ;
}
else
{
m_cond_render_sync_tag = sources . front ( ) - > sync_tag ;
partial_eval = false ;
}
// Time to aggregate
if ( ! m_cond_render_buffer )
{
auto & memory_props = m_device - > get_memory_mapping ( ) ;
2019-12-10 07:10:13 +01:00
auto usage_flags = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT ;
if ( m_device - > get_conditional_render_support ( ) )
{
usage_flags | = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT ;
}
2019-12-10 05:56:44 +01:00
m_cond_render_buffer = std : : make_unique < vk : : buffer > (
* m_device , 4 ,
memory_props . device_local , VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT ,
2019-12-10 07:10:13 +01:00
usage_flags , 0 ) ;
}
VkPipelineStageFlags dst_stage ;
VkAccessFlags dst_access ;
if ( m_device - > get_conditional_render_support ( ) )
{
dst_stage = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT ;
dst_access = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT ;
}
else
{
dst_stage = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT ;
dst_access = VK_ACCESS_SHADER_READ_BIT ;
2019-12-10 05:56:44 +01:00
}
if ( sources . size ( ) = = 1 )
{
const auto query = sources . front ( ) ;
const auto & query_info = m_occlusion_map [ query - > driver_handle ] ;
if ( query_info . indices . size ( ) = = 1 )
{
const auto & index = query_info . indices . front ( ) ;
m_occlusion_query_pool . get_query_result_indirect ( * m_current_command_buffer , index , m_cond_render_buffer - > value , 0 ) ;
vk : : insert_buffer_memory_barrier ( * m_current_command_buffer , m_cond_render_buffer - > value , 0 , 4 ,
2019-12-10 07:10:13 +01:00
VK_PIPELINE_STAGE_TRANSFER_BIT , dst_stage ,
VK_ACCESS_TRANSFER_WRITE_BIT , dst_access ) ;
2019-12-10 05:56:44 +01:00
rsx : : thread : : begin_conditional_rendering ( sources ) ;
return ;
}
}
auto scratch = vk : : get_scratch_buffer ( ) ;
u32 dst_offset = 0 ;
size_t first = 0 ;
size_t last ;
2020-02-05 08:00:08 +01:00
if ( ! partial_eval ) [[likely]]
2019-12-10 05:56:44 +01:00
{
last = sources . size ( ) ;
}
else
{
last = 1 ;
}
for ( size_t i = first ; i < last ; + + i )
{
auto & query_info = m_occlusion_map [ sources [ i ] - > driver_handle ] ;
for ( const auto & index : query_info . indices )
{
m_occlusion_query_pool . get_query_result_indirect ( * m_current_command_buffer , index , scratch - > value , dst_offset ) ;
dst_offset + = 4 ;
}
}
if ( dst_offset )
{
// Fast path should have been caught above
verify ( HERE ) , dst_offset > 4 ;
if ( ! partial_eval )
{
// Clear result to zero
vkCmdFillBuffer ( * m_current_command_buffer , m_cond_render_buffer - > value , 0 , 4 , 0 ) ;
vk : : insert_buffer_memory_barrier ( * m_current_command_buffer , m_cond_render_buffer - > value , 0 , 4 ,
VK_PIPELINE_STAGE_TRANSFER_BIT , VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ,
VK_ACCESS_TRANSFER_WRITE_BIT , VK_ACCESS_SHADER_WRITE_BIT ) ;
}
vk : : insert_buffer_memory_barrier ( * m_current_command_buffer , scratch - > value , 0 , dst_offset ,
VK_PIPELINE_STAGE_TRANSFER_BIT , VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ,
VK_ACCESS_TRANSFER_WRITE_BIT , VK_ACCESS_SHADER_READ_BIT ) ;
vk : : get_compute_task < vk : : cs_aggregator > ( ) - > run ( * m_current_command_buffer , m_cond_render_buffer . get ( ) , scratch , dst_offset / 4 ) ;
vk : : insert_buffer_memory_barrier ( * m_current_command_buffer , m_cond_render_buffer - > value , 0 , 4 ,
2019-12-10 07:10:13 +01:00
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT , dst_stage ,
VK_ACCESS_SHADER_WRITE_BIT , dst_access ) ;
2019-12-10 05:56:44 +01:00
}
2020-03-31 18:31:51 +02:00
else if ( m_program )
2019-12-10 05:56:44 +01:00
{
2020-03-31 18:31:51 +02:00
// This can sometimes happen when shaders are compiling, only log if there is a program hit
rsx_log . warning ( " Dubious query data pushed to cond render!, Please report to developers(q.pending=%d) " , sources . front ( ) - > pending ) ;
2019-12-10 05:56:44 +01:00
}
rsx : : thread : : begin_conditional_rendering ( sources ) ;
}
void VKGSRender : : end_conditional_rendering ( )
{
thread : : end_conditional_rendering ( ) ;
}
2018-07-11 22:51:29 +02:00
bool VKGSRender : : on_decompiler_task ( )
{
2018-07-22 15:03:14 +02:00
return m_prog_buffer - > async_update ( 8 , * m_device , pipeline_layout ) . first ;
2018-08-18 03:13:25 +02:00
}