2020-12-05 13:08:24 +01:00
# include "stdafx.h"
2020-03-22 11:20:31 +01:00
# include "../Overlays/overlay_shader_compile_notification.h"
# include "../Overlays/Shaders/shader_loading_dialog_native.h"
2015-10-26 22:09:31 +01:00
# include "GLGSRender.h"
2019-10-02 02:47:19 +02:00
# include "GLCompute.h"
2020-11-03 05:18:42 +01:00
# include "Emu/Memory/vm_locking.h"
2020-12-13 12:54:43 +01:00
# include "Emu/RSX/rsx_methods.h"
2012-11-15 00:39:56 +01:00
2021-05-12 23:56:01 +02:00
# include "../Program/program_state_cache2.hpp"
2020-12-21 15:12:05 +01:00
2018-10-11 00:17:19 +02:00
u64 GLGSRender : : get_cycles ( )
{
return thread_ctrl : : get_cycles ( static_cast < named_thread < GLGSRender > & > ( * this ) ) ;
}
2017-05-20 13:45:02 +02:00
GLGSRender : : GLGSRender ( ) : GSRender ( )
2015-10-04 00:45:26 +02:00
{
2022-03-13 09:32:04 +01:00
m_shaders_cache = std : : make_unique < gl : : shader_cache > ( m_prog_buffer , " opengl " , " v1.93 " ) ;
2017-07-26 18:32:13 +02:00
2019-06-17 22:08:17 +02:00
if ( g_cfg . video . disable_vertex_cache | | g_cfg . video . multithreaded_rsx )
2019-06-08 08:47:51 +02:00
m_vertex_cache = std : : make_unique < gl : : null_vertex_cache > ( ) ;
2017-07-26 18:32:13 +02:00
else
2019-06-08 08:47:51 +02:00
m_vertex_cache = std : : make_unique < gl : : weak_vertex_cache > ( ) ;
2017-09-07 21:32:52 +02:00
2019-10-13 21:37:10 +02:00
backend_config . supports_hw_a2c = false ;
2019-10-29 21:34:46 +01:00
backend_config . supports_hw_a2one = false ;
2019-10-13 21:37:10 +02:00
backend_config . supports_multidraw = true ;
2015-10-11 22:00:51 +02:00
}
2014-08-23 02:16:54 +02:00
2015-10-11 22:00:51 +02:00
extern CellGcmContextData current_context ;
2016-01-06 00:15:35 +01:00
void GLGSRender : : set_viewport ( )
{
2018-10-28 13:20:53 +01:00
// NOTE: scale offset matrix already contains the viewport transformation
2020-11-17 21:56:33 +01:00
const auto [ clip_width , clip_height ] = rsx : : apply_resolution_scale < true > (
rsx : : method_registers . surface_clip_width ( ) , rsx : : method_registers . surface_clip_height ( ) ) ;
2017-07-05 00:16:59 +02:00
glViewport ( 0 , 0 , clip_width , clip_height ) ;
2018-10-28 13:20:53 +01:00
}
2019-07-20 13:58:05 +02:00
void GLGSRender : : set_scissor ( bool clip_viewport )
2018-10-28 13:20:53 +01:00
{
2019-07-18 15:50:21 +02:00
areau scissor ;
2019-07-20 13:58:05 +02:00
if ( get_scissor ( scissor , clip_viewport ) )
2018-10-28 13:20:53 +01:00
{
2019-07-18 15:50:21 +02:00
// NOTE: window origin does not affect scissor region (probably only affects viewport matrix; already applied)
// See LIMBO [NPUB-30373] which uses shader window origin = top
glScissor ( scissor . x1 , scissor . y1 , scissor . width ( ) , scissor . height ( ) ) ;
gl_state . enable ( GL_TRUE , GL_SCISSOR_TEST ) ;
2018-10-28 13:20:53 +01:00
}
2016-01-06 00:15:35 +01:00
}
2015-11-26 09:06:29 +01:00
void GLGSRender : : on_init_thread ( )
2015-10-09 20:04:20 +02:00
{
2020-12-09 08:47:45 +01:00
ensure ( m_frame ) ;
2018-07-11 22:51:29 +02:00
// NOTES: All contexts have to be created before any is bound to a thread
// This allows context sharing to work (both GLRCs passed to wglShareLists have to be idle or you get ERROR_BUSY)
m_context = m_frame - > make_context ( ) ;
2018-07-31 11:05:13 +02:00
2020-04-27 16:37:31 +02:00
const auto shadermode = g_cfg . video . shadermode . get ( ) ;
2020-10-27 21:41:20 +01:00
if ( shadermode ! = shader_mode : : recompiler )
{
auto context_create_func = [ m_frame = m_frame ] ( )
{
return m_frame - > make_context ( ) ;
} ;
auto context_bind_func = [ m_frame = m_frame ] ( draw_context_t ctx )
{
m_frame - > set_current ( ctx ) ;
} ;
auto context_destroy_func = [ m_frame = m_frame ] ( draw_context_t ctx )
{
m_frame - > delete_context ( ctx ) ;
} ;
2020-04-27 16:37:31 +02:00
2020-11-14 12:58:51 +01:00
gl : : initialize_pipe_compiler ( context_create_func , context_bind_func , context_destroy_func , g_cfg . video . shader_compiler_threads_count ) ;
2020-10-27 21:41:20 +01:00
}
else
2018-07-31 11:05:13 +02:00
{
2020-10-27 21:41:20 +01:00
auto null_context_create_func = [ ] ( ) - > draw_context_t
{
return nullptr ;
} ;
gl : : initialize_pipe_compiler ( null_context_create_func , { } , { } , 1 ) ;
2018-07-31 11:05:13 +02:00
}
2018-07-11 22:51:29 +02:00
// Bind primary context to main RSX thread
m_frame - > set_current ( m_context ) ;
2020-04-05 13:16:57 +02:00
gl : : set_primary_context_thread ( ) ;
2018-07-11 22:51:29 +02:00
2018-03-05 12:09:43 +01:00
zcull_ctrl . reset ( static_cast < : : rsx : : reports : : ZCULL_control * > ( this ) ) ;
2021-09-04 20:50:53 +02:00
m_occlusion_type = g_cfg . video . precise_zpass_count ? GL_SAMPLES_PASSED : GL_ANY_SAMPLES_PASSED ;
2015-10-11 22:00:51 +02:00
gl : : init ( ) ;
2017-04-04 18:14:36 +02:00
2017-08-13 19:41:55 +02:00
//Enable adaptive vsync if vsync is requested
gl : : set_swapinterval ( g_cfg . video . vsync ? - 1 : 0 ) ;
2017-05-20 13:45:02 +02:00
if ( g_cfg . video . debug_output )
2016-03-22 22:26:37 +01:00
gl : : enable_debugging ( ) ;
2017-04-04 18:14:36 +02:00
2020-02-01 09:07:25 +01:00
rsx_log . notice ( " GL RENDERER: %s (%s) " , reinterpret_cast < const char * > ( glGetString ( GL_RENDERER ) ) , reinterpret_cast < const char * > ( glGetString ( GL_VENDOR ) ) ) ;
rsx_log . notice ( " GL VERSION: %s " , reinterpret_cast < const char * > ( glGetString ( GL_VERSION ) ) ) ;
rsx_log . notice ( " GLSL VERSION: %s " , reinterpret_cast < const char * > ( glGetString ( GL_SHADING_LANGUAGE_VERSION ) ) ) ;
2015-10-11 22:00:51 +02:00
2017-06-19 12:47:38 +02:00
auto & gl_caps = gl : : get_driver_caps ( ) ;
2017-04-04 18:14:36 +02:00
if ( ! gl_caps . ARB_texture_buffer_supported )
{
fmt : : throw_exception ( " Failed to initialize OpenGL renderer. ARB_texture_buffer_object is required but not supported by your GPU " ) ;
}
if ( ! gl_caps . ARB_dsa_supported & & ! gl_caps . EXT_dsa_supported )
{
fmt : : throw_exception ( " Failed to initialize OpenGL renderer. ARB_direct_state_access or EXT_direct_state_access is required but not supported by your GPU " ) ;
}
2017-06-19 12:47:38 +02:00
if ( ! gl_caps . ARB_depth_buffer_float_supported & & g_cfg . video . force_high_precision_z_buffer )
{
2020-02-01 09:07:25 +01:00
rsx_log . warning ( " High precision Z buffer requested but your GPU does not support GL_ARB_depth_buffer_float. Option ignored. " ) ;
2017-06-19 12:47:38 +02:00
}
2017-06-22 20:25:58 +02:00
if ( ! gl_caps . ARB_texture_barrier_supported & & ! gl_caps . NV_texture_barrier_supported & & ! g_cfg . video . strict_rendering_mode )
{
2020-02-01 09:07:25 +01:00
rsx_log . warning ( " Texture barriers are not supported by your GPU. Feedback loops will have undefined results. " ) ;
2017-06-22 20:25:58 +02:00
}
2021-10-26 21:21:24 +02:00
if ( ! gl_caps . ARB_bindless_texture_supported )
{
switch ( shadermode )
{
case shader_mode : : async_with_interpreter :
case shader_mode : : interpreter_only :
rsx_log . error ( " Bindless texture extension required for shader interpreter is not supported on your GPU. Will use async recompiler as a fallback. " ) ;
g_cfg . video . shadermode . set ( shader_mode : : async_recompiler ) ;
break ;
default :
break ;
}
}
// Use industry standard resource alignment values as defaults
2017-04-04 18:14:36 +02:00
m_uniform_buffer_offset_align = 256 ;
m_min_texbuffer_alignment = 256 ;
2018-01-21 16:31:35 +01:00
m_max_texbuffer_size = 0 ;
2017-04-04 18:14:36 +02:00
2015-10-11 22:00:51 +02:00
glEnable ( GL_VERTEX_PROGRAM_POINT_SIZE ) ;
2016-06-20 23:38:38 +02:00
glGetIntegerv ( GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT , & m_uniform_buffer_offset_align ) ;
2016-06-12 17:54:15 +02:00
glGetIntegerv ( GL_TEXTURE_BUFFER_OFFSET_ALIGNMENT , & m_min_texbuffer_alignment ) ;
2018-01-21 16:31:35 +01:00
glGetIntegerv ( GL_MAX_TEXTURE_BUFFER_SIZE , & m_max_texbuffer_size ) ;
2015-10-14 00:45:18 +02:00
m_vao . create ( ) ;
2016-01-28 18:01:10 +01:00
2021-10-26 21:21:24 +02:00
// Set min alignment to 16-bytes for SSE optimizations with aligned addresses to work
2017-03-29 11:29:11 +02:00
m_min_texbuffer_alignment = std : : max ( m_min_texbuffer_alignment , 16 ) ;
m_uniform_buffer_offset_align = std : : max ( m_uniform_buffer_offset_align , 16 ) ;
2020-02-01 09:07:25 +01:00
rsx_log . notice ( " Supported texel buffer size reported: %d bytes " , m_max_texbuffer_size ) ;
2018-01-21 16:31:35 +01:00
if ( m_max_texbuffer_size < ( 16 * 0x100000 ) )
{
2020-02-01 09:07:25 +01:00
rsx_log . error ( " Max texture buffer size supported is less than 16M which is useless. Expect undefined behaviour. " ) ;
2018-01-21 16:31:35 +01:00
m_max_texbuffer_size = ( 16 * 0x100000 ) ;
}
2021-10-26 21:21:24 +02:00
// Array stream buffer
2016-01-28 18:01:10 +01:00
{
2018-04-07 12:19:49 +02:00
m_gl_persistent_stream_buffer = std : : make_unique < gl : : texture > ( GL_TEXTURE_BUFFER , 0 , 0 , 0 , 0 , GL_R8UI ) ;
2018-11-24 13:54:46 +01:00
_SelectTexture ( GL_STREAM_BUFFER_START + 0 ) ;
2018-04-07 12:19:49 +02:00
glBindTexture ( GL_TEXTURE_BUFFER , m_gl_persistent_stream_buffer - > id ( ) ) ;
2017-07-31 13:38:28 +02:00
}
2016-10-18 09:57:28 +02:00
2021-10-26 21:21:24 +02:00
// Register stream buffer
2017-07-31 13:38:28 +02:00
{
2018-04-07 12:19:49 +02:00
m_gl_volatile_stream_buffer = std : : make_unique < gl : : texture > ( GL_TEXTURE_BUFFER , 0 , 0 , 0 , 0 , GL_R8UI ) ;
2018-11-24 13:54:46 +01:00
_SelectTexture ( GL_STREAM_BUFFER_START + 1 ) ;
2018-04-07 12:19:49 +02:00
glBindTexture ( GL_TEXTURE_BUFFER , m_gl_volatile_stream_buffer - > id ( ) ) ;
2016-01-28 18:01:10 +01:00
}
2016-02-15 10:50:14 +01:00
2021-10-26 21:21:24 +02:00
// Fallback null texture instead of relying on texture0
2018-02-02 13:25:15 +01:00
{
2018-11-24 13:54:46 +01:00
std : : vector < u32 > pixeldata = { 0 , 0 , 0 , 0 } ;
2018-02-02 13:25:15 +01:00
2021-10-26 21:21:24 +02:00
// 1D
2018-04-07 12:19:49 +02:00
auto tex1D = std : : make_unique < gl : : texture > ( GL_TEXTURE_1D , 1 , 1 , 1 , 1 , GL_RGBA8 ) ;
2019-10-12 00:05:05 +02:00
tex1D - > copy_from ( pixeldata . data ( ) , gl : : texture : : format : : rgba , gl : : texture : : type : : uint_8_8_8_8 , { } ) ;
2018-02-02 13:25:15 +01:00
2021-10-26 21:21:24 +02:00
// 2D
2018-04-07 12:19:49 +02:00
auto tex2D = std : : make_unique < gl : : texture > ( GL_TEXTURE_2D , 1 , 1 , 1 , 1 , GL_RGBA8 ) ;
2019-10-12 00:05:05 +02:00
tex2D - > copy_from ( pixeldata . data ( ) , gl : : texture : : format : : rgba , gl : : texture : : type : : uint_8_8_8_8 , { } ) ;
2018-02-02 13:25:15 +01:00
2021-10-26 21:21:24 +02:00
// 3D
2018-04-07 12:19:49 +02:00
auto tex3D = std : : make_unique < gl : : texture > ( GL_TEXTURE_3D , 1 , 1 , 1 , 1 , GL_RGBA8 ) ;
2019-10-12 00:05:05 +02:00
tex3D - > copy_from ( pixeldata . data ( ) , gl : : texture : : format : : rgba , gl : : texture : : type : : uint_8_8_8_8 , { } ) ;
2018-02-02 13:25:15 +01:00
2021-10-26 21:21:24 +02:00
// CUBE
2018-04-07 12:19:49 +02:00
auto texCUBE = std : : make_unique < gl : : texture > ( GL_TEXTURE_CUBE_MAP , 1 , 1 , 1 , 1 , GL_RGBA8 ) ;
2019-10-12 00:05:05 +02:00
texCUBE - > copy_from ( pixeldata . data ( ) , gl : : texture : : format : : rgba , gl : : texture : : type : : uint_8_8_8_8 , { } ) ;
2018-02-02 13:25:15 +01:00
m_null_textures [ GL_TEXTURE_1D ] = std : : move ( tex1D ) ;
m_null_textures [ GL_TEXTURE_2D ] = std : : move ( tex2D ) ;
m_null_textures [ GL_TEXTURE_3D ] = std : : move ( tex3D ) ;
m_null_textures [ GL_TEXTURE_CUBE_MAP ] = std : : move ( texCUBE ) ;
}
2017-04-04 18:14:36 +02:00
if ( ! gl_caps . ARB_buffer_storage_supported )
{
2020-02-01 09:07:25 +01:00
rsx_log . warning ( " Forcing use of legacy OpenGL buffers because ARB_buffer_storage is not supported " ) ;
2017-05-20 13:45:02 +02:00
// TODO: do not modify config options
2021-04-27 22:13:43 +02:00
g_cfg . video . renderdoc_compatiblity . from_string ( " true " ) ;
2017-04-04 18:14:36 +02:00
}
2021-04-27 22:13:43 +02:00
if ( g_cfg . video . renderdoc_compatiblity )
2016-10-18 09:57:28 +02:00
{
2020-02-01 09:07:25 +01:00
rsx_log . warning ( " Using legacy openGL buffers. " ) ;
2016-10-18 09:57:28 +02:00
manually_flush_ring_buffers = true ;
2016-06-12 11:05:22 +02:00
2019-06-08 08:47:51 +02:00
m_attrib_ring_buffer = std : : make_unique < gl : : legacy_ring_buffer > ( ) ;
m_transform_constants_buffer = std : : make_unique < gl : : legacy_ring_buffer > ( ) ;
m_fragment_constants_buffer = std : : make_unique < gl : : legacy_ring_buffer > ( ) ;
m_fragment_env_buffer = std : : make_unique < gl : : legacy_ring_buffer > ( ) ;
m_vertex_env_buffer = std : : make_unique < gl : : legacy_ring_buffer > ( ) ;
m_texture_parameters_buffer = std : : make_unique < gl : : legacy_ring_buffer > ( ) ;
m_vertex_layout_buffer = std : : make_unique < gl : : legacy_ring_buffer > ( ) ;
m_index_ring_buffer = std : : make_unique < gl : : legacy_ring_buffer > ( ) ;
2020-03-24 22:22:21 +01:00
m_vertex_instructions_buffer = std : : make_unique < gl : : legacy_ring_buffer > ( ) ;
m_fragment_instructions_buffer = std : : make_unique < gl : : legacy_ring_buffer > ( ) ;
2020-05-28 23:51:36 +02:00
m_raster_env_ring_buffer = std : : make_unique < gl : : legacy_ring_buffer > ( ) ;
2016-10-18 09:57:28 +02:00
}
else
{
2019-06-08 08:47:51 +02:00
m_attrib_ring_buffer = std : : make_unique < gl : : ring_buffer > ( ) ;
m_transform_constants_buffer = std : : make_unique < gl : : ring_buffer > ( ) ;
m_fragment_constants_buffer = std : : make_unique < gl : : ring_buffer > ( ) ;
m_fragment_env_buffer = std : : make_unique < gl : : ring_buffer > ( ) ;
m_vertex_env_buffer = std : : make_unique < gl : : ring_buffer > ( ) ;
m_texture_parameters_buffer = std : : make_unique < gl : : ring_buffer > ( ) ;
m_vertex_layout_buffer = std : : make_unique < gl : : ring_buffer > ( ) ;
m_index_ring_buffer = std : : make_unique < gl : : ring_buffer > ( ) ;
2020-03-24 22:22:21 +01:00
m_vertex_instructions_buffer = std : : make_unique < gl : : ring_buffer > ( ) ;
m_fragment_instructions_buffer = std : : make_unique < gl : : ring_buffer > ( ) ;
2020-05-28 23:51:36 +02:00
m_raster_env_ring_buffer = std : : make_unique < gl : : ring_buffer > ( ) ;
2016-10-18 09:57:28 +02:00
}
2018-02-22 09:13:01 +01:00
m_attrib_ring_buffer - > create ( gl : : buffer : : target : : texture , 256 * 0x100000 ) ;
m_index_ring_buffer - > create ( gl : : buffer : : target : : element_array , 64 * 0x100000 ) ;
m_transform_constants_buffer - > create ( gl : : buffer : : target : : uniform , 64 * 0x100000 ) ;
m_fragment_constants_buffer - > create ( gl : : buffer : : target : : uniform , 16 * 0x100000 ) ;
2018-10-20 16:43:00 +02:00
m_fragment_env_buffer - > create ( gl : : buffer : : target : : uniform , 16 * 0x100000 ) ;
m_vertex_env_buffer - > create ( gl : : buffer : : target : : uniform , 16 * 0x100000 ) ;
m_texture_parameters_buffer - > create ( gl : : buffer : : target : : uniform , 16 * 0x100000 ) ;
m_vertex_layout_buffer - > create ( gl : : buffer : : target : : uniform , 16 * 0x100000 ) ;
2020-05-28 23:51:36 +02:00
m_raster_env_ring_buffer - > create ( gl : : buffer : : target : : uniform , 16 * 0x100000 ) ;
2018-02-22 09:13:01 +01:00
2020-04-27 16:37:31 +02:00
if ( shadermode = = shader_mode : : async_with_interpreter | | shadermode = = shader_mode : : interpreter_only )
2020-03-24 22:22:21 +01:00
{
m_vertex_instructions_buffer - > create ( gl : : buffer : : target : : ssbo , 16 * 0x100000 ) ;
m_fragment_instructions_buffer - > create ( gl : : buffer : : target : : ssbo , 16 * 0x100000 ) ;
m_shader_interpreter . create ( ) ;
}
2018-04-29 15:14:53 +02:00
if ( gl_caps . vendor_AMD )
{
2019-06-08 08:47:51 +02:00
m_identity_index_buffer = std : : make_unique < gl : : buffer > ( ) ;
2019-08-20 20:01:27 +02:00
m_identity_index_buffer - > create ( gl : : buffer : : target : : element_array , 1 * 0x100000 , nullptr , gl : : buffer : : memory_type : : host_visible ) ;
2018-04-29 15:14:53 +02:00
// Initialize with 256k identity entries
2019-12-03 23:34:23 +01:00
auto * dst = reinterpret_cast < u32 * > ( m_identity_index_buffer - > map ( gl : : buffer : : access : : write ) ) ;
2018-04-29 15:14:53 +02:00
for ( u32 n = 0 ; n < ( 0x100000 > > 2 ) ; + + n )
{
dst [ n ] = n ;
}
m_identity_index_buffer - > unmap ( ) ;
}
2019-10-13 21:37:10 +02:00
else if ( gl_caps . vendor_NVIDIA )
{
// NOTE: On NVIDIA cards going back decades (including the PS3) there is a slight normalization inaccuracy in compressed formats.
// Confirmed in BLES01916 (The Evil Within) which uses RGB565 for some virtual texturing data.
backend_config . supports_hw_renormalization = true ;
}
2018-04-29 15:14:53 +02:00
2019-12-03 23:34:23 +01:00
m_persistent_stream_view . update ( m_attrib_ring_buffer . get ( ) , 0 , std : : min < u32 > ( static_cast < u32 > ( m_attrib_ring_buffer - > size ( ) ) , m_max_texbuffer_size ) ) ;
m_volatile_stream_view . update ( m_attrib_ring_buffer . get ( ) , 0 , std : : min < u32 > ( static_cast < u32 > ( m_attrib_ring_buffer - > size ( ) ) , m_max_texbuffer_size ) ) ;
2018-04-07 12:19:49 +02:00
m_gl_persistent_stream_buffer - > copy_from ( m_persistent_stream_view ) ;
m_gl_volatile_stream_buffer - > copy_from ( m_volatile_stream_view ) ;
2016-10-18 09:57:28 +02:00
m_vao . element_array_buffer = * m_index_ring_buffer ;
2016-10-20 05:20:45 +02:00
2018-11-24 13:54:46 +01:00
int image_unit = 0 ;
for ( auto & sampler : m_fs_sampler_states )
2017-03-29 21:27:29 +02:00
{
2018-11-24 13:54:46 +01:00
sampler . create ( ) ;
sampler . bind ( image_unit + + ) ;
2018-07-09 20:31:31 +02:00
}
2018-11-24 13:54:46 +01:00
for ( auto & sampler : m_fs_sampler_mirror_states )
{
sampler . create ( ) ;
sampler . apply_defaults ( ) ;
sampler . bind ( image_unit + + ) ;
}
for ( auto & sampler : m_vs_sampler_states )
2018-07-09 20:31:31 +02:00
{
2018-11-24 13:54:46 +01:00
sampler . create ( ) ;
sampler . bind ( image_unit + + ) ;
2017-03-29 21:27:29 +02:00
}
2017-07-27 18:04:55 +02:00
//Occlusion query
for ( u32 i = 0 ; i < occlusion_query_count ; + + i )
{
2017-11-16 22:52:21 +01:00
GLuint handle = 0 ;
2018-03-05 12:09:43 +01:00
auto & query = m_occlusion_query_data [ i ] ;
2017-11-16 22:52:21 +01:00
glGenQueries ( 1 , & handle ) ;
2018-09-03 21:28:33 +02:00
2019-12-03 23:34:23 +01:00
query . driver_handle = handle ;
2017-07-27 18:04:55 +02:00
query . pending = false ;
query . active = false ;
query . result = 0 ;
}
2017-05-11 00:42:55 +02:00
//Clip planes are shader controlled; enable all planes driver-side
glEnable ( GL_CLIP_DISTANCE0 + 0 ) ;
glEnable ( GL_CLIP_DISTANCE0 + 1 ) ;
glEnable ( GL_CLIP_DISTANCE0 + 2 ) ;
glEnable ( GL_CLIP_DISTANCE0 + 3 ) ;
glEnable ( GL_CLIP_DISTANCE0 + 4 ) ;
glEnable ( GL_CLIP_DISTANCE0 + 5 ) ;
2018-01-17 17:14:00 +01:00
m_ui_renderer . create ( ) ;
2018-03-23 12:49:15 +01:00
m_video_output_pass . create ( ) ;
2017-11-15 13:02:59 +01:00
2017-09-08 16:52:13 +02:00
m_gl_texture_cache . initialize ( ) ;
2017-08-10 21:40:20 +02:00
2020-03-09 10:15:59 +01:00
m_prog_buffer . initialize
(
[ this ] ( void * const & props , const RSXVertexProgram & vp , const RSXFragmentProgram & fp )
{
// Program was linked or queued for linking
m_shaders_cache - > store ( props , vp , fp ) ;
}
) ;
2019-08-26 02:08:16 +02:00
if ( ! m_overlay_manager )
2018-01-17 17:14:00 +01:00
{
m_frame - > hide ( ) ;
m_shaders_cache - > load ( nullptr ) ;
m_frame - > show ( ) ;
}
else
{
2020-01-03 12:04:18 +01:00
rsx : : shader_loading_dialog_native dlg ( this ) ;
2017-10-28 14:34:24 +02:00
2020-01-03 12:04:18 +01:00
m_shaders_cache - > load ( & dlg ) ;
2018-01-17 17:14:00 +01:00
}
2015-10-09 20:04:20 +02:00
}
2018-01-17 17:14:00 +01:00
2015-11-26 09:06:29 +01:00
void GLGSRender : : on_exit ( )
2015-10-09 20:04:20 +02:00
{
2020-11-09 19:43:07 +01:00
// Destroy internal RSX state, may call upon this->do_local_task
GSRender : : on_exit ( ) ;
2019-10-12 00:05:05 +02:00
// Globals
// TODO: Move these
2019-10-02 02:47:19 +02:00
gl : : destroy_compute_tasks ( ) ;
2019-10-12 00:05:05 +02:00
if ( gl : : g_typeless_transfer_buffer )
{
gl : : g_typeless_transfer_buffer . remove ( ) ;
}
2021-06-05 16:27:03 +02:00
gl : : debug : : g_vis_texture . reset ( ) ; // TODO
2020-10-27 21:41:20 +01:00
gl : : destroy_pipe_compiler ( ) ;
2016-08-26 16:23:23 +02:00
m_prog_buffer . clear ( ) ;
2019-05-10 22:04:13 +02:00
m_rtts . destroy ( ) ;
2015-10-04 00:45:26 +02:00
2018-08-08 23:48:56 +02:00
for ( auto & fbo : m_framebuffer_cache )
2016-06-26 14:47:48 +02:00
{
2018-08-08 23:48:56 +02:00
fbo . remove ( ) ;
2016-06-26 14:47:48 +02:00
}
2015-10-04 00:45:26 +02:00
2018-08-08 23:48:56 +02:00
m_framebuffer_cache . clear ( ) ;
2015-10-11 22:00:51 +02:00
if ( m_flip_fbo )
2016-06-26 14:47:48 +02:00
{
2015-10-11 22:00:51 +02:00
m_flip_fbo . remove ( ) ;
2016-06-26 14:47:48 +02:00
}
2015-10-04 00:45:26 +02:00
2015-10-11 22:00:51 +02:00
if ( m_flip_tex_color )
2016-06-26 14:47:48 +02:00
{
2018-04-07 12:19:49 +02:00
m_flip_tex_color . reset ( ) ;
2016-06-26 14:47:48 +02:00
}
2015-10-14 00:45:18 +02:00
if ( m_vao )
2016-06-26 14:47:48 +02:00
{
2015-10-14 00:45:18 +02:00
m_vao . remove ( ) ;
2016-06-26 14:47:48 +02:00
}
2015-10-14 02:15:23 +02:00
2018-04-07 12:19:49 +02:00
m_gl_persistent_stream_buffer . reset ( ) ;
m_gl_volatile_stream_buffer . reset ( ) ;
2016-06-11 21:51:34 +02:00
2018-07-09 20:31:31 +02:00
for ( auto & sampler : m_fs_sampler_states )
{
sampler . remove ( ) ;
}
2018-11-24 13:54:46 +01:00
for ( auto & sampler : m_fs_sampler_mirror_states )
{
sampler . remove ( ) ;
}
2018-07-09 20:31:31 +02:00
for ( auto & sampler : m_vs_sampler_states )
2017-03-29 21:27:29 +02:00
{
sampler . remove ( ) ;
}
2017-04-17 23:30:34 +02:00
if ( m_attrib_ring_buffer )
{
m_attrib_ring_buffer - > remove ( ) ;
}
if ( m_transform_constants_buffer )
{
m_transform_constants_buffer - > remove ( ) ;
}
if ( m_fragment_constants_buffer )
{
m_fragment_constants_buffer - > remove ( ) ;
}
2018-10-20 16:43:00 +02:00
if ( m_fragment_env_buffer )
2017-04-17 23:30:34 +02:00
{
2018-10-20 16:43:00 +02:00
m_fragment_env_buffer - > remove ( ) ;
}
if ( m_vertex_env_buffer )
{
m_vertex_env_buffer - > remove ( ) ;
}
if ( m_texture_parameters_buffer )
{
m_texture_parameters_buffer - > remove ( ) ;
}
if ( m_vertex_layout_buffer )
{
m_vertex_layout_buffer - > remove ( ) ;
2017-04-17 23:30:34 +02:00
}
if ( m_index_ring_buffer )
{
m_index_ring_buffer - > remove ( ) ;
}
2016-07-20 14:16:19 +02:00
2018-04-29 15:14:53 +02:00
if ( m_identity_index_buffer )
{
m_identity_index_buffer - > remove ( ) ;
}
2020-03-24 22:22:21 +01:00
if ( m_vertex_instructions_buffer )
{
m_vertex_instructions_buffer - > remove ( ) ;
}
if ( m_fragment_instructions_buffer )
{
m_fragment_instructions_buffer - > remove ( ) ;
}
2020-05-28 23:51:36 +02:00
if ( m_raster_env_ring_buffer )
{
m_raster_env_ring_buffer - > remove ( ) ;
}
2018-04-07 12:19:49 +02:00
m_null_textures . clear ( ) ;
2016-10-11 02:55:42 +02:00
m_text_printer . close ( ) ;
2017-09-08 16:52:13 +02:00
m_gl_texture_cache . destroy ( ) ;
2018-01-17 17:14:00 +01:00
m_ui_renderer . destroy ( ) ;
2018-03-23 12:49:15 +01:00
m_video_output_pass . destroy ( ) ;
2016-10-11 02:55:42 +02:00
2020-03-24 22:22:21 +01:00
m_shader_interpreter . destroy ( ) ;
2017-07-27 18:04:55 +02:00
for ( u32 i = 0 ; i < occlusion_query_count ; + + i )
{
2018-03-05 12:09:43 +01:00
auto & query = m_occlusion_query_data [ i ] ;
2017-07-27 18:04:55 +02:00
query . active = false ;
query . pending = false ;
2019-12-03 23:34:23 +01:00
GLuint handle = query . driver_handle ;
2017-11-16 22:52:21 +01:00
glDeleteQueries ( 1 , & handle ) ;
query . driver_handle = 0 ;
2017-07-27 18:04:55 +02:00
}
2020-05-27 19:58:50 +02:00
zcull_ctrl . release ( ) ;
2020-11-21 05:56:54 +01:00
gl : : set_primary_context_thread ( false ) ;
2012-11-15 00:39:56 +01:00
}
2017-02-16 19:29:56 +01:00
void GLGSRender : : clear_surface ( u32 arg )
2013-11-09 22:29:49 +01:00
{
2019-12-04 13:07:20 +01:00
if ( skip_current_frame ) return ;
2019-04-29 17:56:35 +02:00
// If stencil write mask is disabled, remove clear_stencil bit
2021-09-19 21:01:25 +02:00
if ( ! rsx : : method_registers . stencil_mask ( ) ) arg & = ~ RSX_GCM_CLEAR_STENCIL_BIT ;
2019-04-29 17:56:35 +02:00
// Ignore invalid clear flags
2021-09-19 21:01:25 +02:00
if ( ( arg & RSX_GCM_CLEAR_ANY_MASK ) = = 0 ) return ;
2014-08-18 16:37:23 +02:00
2019-12-04 13:07:20 +01:00
u8 ctx = rsx : : framebuffer_creation_context : : context_draw ;
2021-09-19 21:01:25 +02:00
if ( arg & RSX_GCM_CLEAR_COLOR_MASK ) ctx | = rsx : : framebuffer_creation_context : : context_clear_color ;
if ( arg & RSX_GCM_CLEAR_DEPTH_STENCIL_MASK ) ctx | = rsx : : framebuffer_creation_context : : context_clear_depth ;
2019-12-04 13:07:20 +01:00
2019-12-15 11:38:42 +01:00
init_buffers ( static_cast < rsx : : framebuffer_creation_context > ( ctx ) , true ) ;
2019-12-04 13:07:20 +01:00
if ( ! framebuffer_status_valid ) return ;
2015-10-11 22:00:51 +02:00
GLbitfield mask = 0 ;
2019-05-14 18:50:45 +02:00
gl : : command_context cmd { gl_state } ;
2021-06-24 22:46:11 +02:00
const bool full_frame =
rsx : : method_registers . scissor_origin_x ( ) = = 0 & &
rsx : : method_registers . scissor_origin_y ( ) = = 0 & &
rsx : : method_registers . scissor_width ( ) > = rsx : : method_registers . surface_clip_width ( ) & &
rsx : : method_registers . scissor_height ( ) > = rsx : : method_registers . surface_clip_height ( ) ;
2019-05-14 18:50:45 +02:00
2019-08-27 13:55:45 +02:00
bool update_color = false , update_z = false ;
2020-08-15 13:07:18 +02:00
rsx : : surface_depth_format2 surface_depth_format = rsx : : method_registers . surface_depth_fmt ( ) ;
2016-06-20 23:38:38 +02:00
2021-09-19 21:01:25 +02:00
if ( auto ds = std : : get < 1 > ( m_rtts . m_bound_depth_stencil ) ; arg & RSX_GCM_CLEAR_DEPTH_STENCIL_MASK )
2015-10-09 20:04:20 +02:00
{
2021-09-19 21:01:25 +02:00
if ( arg & RSX_GCM_CLEAR_DEPTH_BIT )
2018-12-30 21:47:15 +01:00
{
u32 max_depth_value = get_max_depth_value ( surface_depth_format ) ;
2020-08-15 13:07:18 +02:00
u32 clear_depth = rsx : : method_registers . z_clear_value ( is_depth_stencil_format ( surface_depth_format ) ) ;
2014-02-16 09:56:58 +01:00
2018-12-30 21:47:15 +01:00
gl_state . depth_mask ( GL_TRUE ) ;
gl_state . clear_depth ( f32 ( clear_depth ) / max_depth_value ) ;
mask | = GLenum ( gl : : buffers : : depth ) ;
}
2017-02-16 19:29:56 +01:00
2020-08-15 13:07:18 +02:00
if ( is_depth_stencil_format ( surface_depth_format ) )
2017-06-30 23:24:41 +02:00
{
2021-09-19 21:01:25 +02:00
if ( arg & RSX_GCM_CLEAR_STENCIL_BIT )
2018-12-30 21:47:15 +01:00
{
u8 clear_stencil = rsx : : method_registers . stencil_clear_value ( ) ;
2014-02-16 09:56:58 +01:00
2018-12-30 21:47:15 +01:00
gl_state . stencil_mask ( rsx : : method_registers . stencil_mask ( ) ) ;
gl_state . clear_stencil ( clear_stencil ) ;
mask | = GLenum ( gl : : buffers : : stencil ) ;
}
2014-02-16 09:56:58 +01:00
2021-09-19 21:01:25 +02:00
if ( const auto ds_mask = ( arg & RSX_GCM_CLEAR_DEPTH_STENCIL_MASK ) ;
ds_mask ! = RSX_GCM_CLEAR_DEPTH_STENCIL_MASK | | ! full_frame )
2018-12-30 21:47:15 +01:00
{
2020-12-09 08:47:45 +01:00
ensure ( mask ) ;
2015-10-04 00:45:26 +02:00
2021-06-24 22:46:11 +02:00
if ( ds - > state_flags & rsx : : surface_state_flags : : erase_bkgnd & & // Needs initialization
ds - > old_contents . empty ( ) & & ! g_cfg . video . read_depth_buffer ) // No way to load data from memory, so no initialization given
2018-12-30 21:47:15 +01:00
{
2021-06-24 22:46:11 +02:00
// Only one aspect was cleared. Make sure to memory initialize the other before removing dirty flag
2021-09-19 21:01:25 +02:00
if ( ds_mask = = RSX_GCM_CLEAR_DEPTH_BIT )
2021-06-24 22:46:11 +02:00
{
// Depth was cleared, initialize stencil
gl_state . stencil_mask ( 0xFF ) ;
gl_state . clear_stencil ( 0xFF ) ;
mask | = GLenum ( gl : : buffers : : stencil ) ;
}
2021-09-19 21:01:25 +02:00
else if ( ds_mask = = RSX_GCM_CLEAR_STENCIL_BIT )
2021-06-24 22:46:11 +02:00
{
// Stencil was cleared, initialize depth
gl_state . depth_mask ( GL_TRUE ) ;
gl_state . clear_depth ( 1.f ) ;
mask | = GLenum ( gl : : buffers : : depth ) ;
}
2018-12-30 21:47:15 +01:00
}
else
{
2021-06-24 22:46:11 +02:00
ds - > write_barrier ( cmd ) ;
2018-12-30 21:47:15 +01:00
}
}
}
if ( mask )
{
// Memory has been initialized
2019-08-27 13:55:45 +02:00
update_z = true ;
2018-12-30 21:47:15 +01:00
}
2015-10-11 22:00:51 +02:00
}
2018-04-01 15:41:57 +02:00
if ( auto colormask = ( arg & 0xf0 ) )
2015-10-11 22:00:51 +02:00
{
2020-06-22 18:13:11 +02:00
u8 clear_a = rsx : : method_registers . clear_color_a ( ) ;
u8 clear_r = rsx : : method_registers . clear_color_r ( ) ;
u8 clear_g = rsx : : method_registers . clear_color_g ( ) ;
u8 clear_b = rsx : : method_registers . clear_color_b ( ) ;
2018-04-01 15:41:57 +02:00
switch ( rsx : : method_registers . surface_color ( ) )
{
case rsx : : surface_color_format : : x32 :
case rsx : : surface_color_format : : w16z16y16x16 :
case rsx : : surface_color_format : : w32z32y32x32 :
{
2021-04-11 15:55:14 +02:00
// Nop
2020-06-22 19:16:08 +02:00
colormask = 0 ;
2018-04-01 15:41:57 +02:00
break ;
}
2022-01-15 12:49:51 +01:00
case rsx : : surface_color_format : : b8 :
{
rsx : : get_b8_clear_color ( clear_r , clear_g , clear_b , clear_a ) ;
colormask = rsx : : get_b8_clearmask ( colormask ) ;
break ;
}
2018-04-12 13:13:13 +02:00
case rsx : : surface_color_format : : g8b8 :
{
2020-06-22 18:13:11 +02:00
rsx : : get_g8b8_clear_color ( clear_r , clear_g , clear_b , clear_a ) ;
2022-01-15 12:49:51 +01:00
colormask = rsx : : get_g8b8_r8g8_clearmask ( colormask ) ;
2020-06-22 18:13:11 +02:00
break ;
2018-04-12 13:13:13 +02:00
}
2020-06-22 18:13:11 +02:00
case rsx : : surface_color_format : : a8b8g8r8 :
case rsx : : surface_color_format : : x8b8g8r8_o8b8g8r8 :
case rsx : : surface_color_format : : x8b8g8r8_z8b8g8r8 :
2018-04-01 15:41:57 +02:00
{
2020-06-22 18:13:11 +02:00
rsx : : get_abgr8_clear_color ( clear_r , clear_g , clear_b , clear_a ) ;
2022-01-15 12:49:51 +01:00
colormask = rsx : : get_abgr8_clearmask ( colormask ) ;
2020-06-22 18:13:11 +02:00
break ;
}
2020-07-05 15:11:45 +02:00
default :
{
break ;
}
2020-06-22 18:13:11 +02:00
}
2015-10-04 00:45:26 +02:00
2020-06-22 18:13:11 +02:00
if ( colormask )
{
2018-04-01 15:41:57 +02:00
gl_state . clear_color ( clear_r , clear_g , clear_b , clear_a ) ;
mask | = GLenum ( gl : : buffers : : color ) ;
2017-06-30 23:24:41 +02:00
2022-03-09 22:06:03 +01:00
int hw_index = 0 ;
for ( const auto & index : m_rtts . m_bound_render_target_ids )
2017-06-30 23:24:41 +02:00
{
2021-06-24 22:46:11 +02:00
if ( ! full_frame )
{
m_rtts . m_bound_render_targets [ index ] . second - > write_barrier ( cmd ) ;
}
2022-03-09 22:06:03 +01:00
gl_state . color_maski ( hw_index + + , colormask ) ;
2017-06-30 23:24:41 +02:00
}
2018-04-01 15:41:57 +02:00
2019-08-27 13:55:45 +02:00
update_color = true ;
2017-06-30 23:24:41 +02:00
}
2015-10-11 22:00:51 +02:00
}
2015-10-04 00:45:26 +02:00
2019-08-27 13:55:45 +02:00
if ( update_color | | update_z )
{
2022-03-05 14:19:17 +01:00
m_rtts . on_write ( { update_color , update_color , update_color , update_color } , update_z ) ;
2019-08-27 13:55:45 +02:00
}
2016-01-06 00:15:35 +01:00
glClear ( mask ) ;
2013-11-09 22:29:49 +01:00
}
2013-08-26 16:18:59 +02:00
2018-07-11 22:51:29 +02:00
bool GLGSRender : : load_program ( )
2017-07-31 13:38:28 +02:00
{
2020-04-27 16:37:31 +02:00
const auto shadermode = g_cfg . video . shadermode . get ( ) ;
2020-12-03 19:11:32 +01:00
if ( m_graphics_state & rsx : : pipeline_state : : invalidate_pipeline_bits )
2018-04-10 17:06:29 +02:00
{
get_current_fragment_program ( fs_sampler_state ) ;
2020-12-09 08:47:45 +01:00
ensure ( current_fragment_program . valid ) ;
2015-10-04 00:45:26 +02:00
2018-07-09 20:31:31 +02:00
get_current_vertex_program ( vs_sampler_state ) ;
2018-07-11 22:51:29 +02:00
}
2020-04-19 19:23:12 +02:00
else if ( m_program )
2018-07-11 22:51:29 +02:00
{
2020-04-19 19:23:12 +02:00
if ( ! m_shader_interpreter . is_interpreter ( m_program ) ) [[likely]]
{
return true ;
}
2020-04-27 16:37:31 +02:00
if ( shadermode = = shader_mode : : interpreter_only )
2020-04-19 19:23:12 +02:00
{
m_program = m_shader_interpreter . get ( current_fp_metadata ) ;
return true ;
}
2018-07-11 22:51:29 +02:00
}
2017-11-02 16:54:57 +01:00
2020-04-19 19:23:12 +02:00
const bool was_interpreter = m_shader_interpreter . is_interpreter ( m_program ) ;
2022-03-23 20:53:18 +01:00
m_vertex_prog = nullptr ;
m_fragment_prog = nullptr ;
2020-04-27 16:37:31 +02:00
if ( shadermode ! = shader_mode : : interpreter_only ) [[likely]]
2020-03-24 22:22:21 +01:00
{
void * pipeline_properties = nullptr ;
2022-03-23 20:53:18 +01:00
std : : tie ( m_program , m_vertex_prog , m_fragment_prog ) = m_prog_buffer . get_graphics_pipeline ( current_vertex_program , current_fragment_program , pipeline_properties ,
2020-10-27 21:41:20 +01:00
shadermode ! = shader_mode : : recompiler , true ) ;
2017-11-02 16:54:57 +01:00
2020-03-24 22:22:21 +01:00
if ( m_prog_buffer . check_cache_missed ( ) )
2018-07-11 22:51:29 +02:00
{
2020-03-24 22:22:21 +01:00
// Notify the user with HUD notification
if ( g_cfg . misc . show_shader_compilation_hint )
2018-03-20 02:00:49 +01:00
{
2020-03-24 22:22:21 +01:00
if ( m_overlay_manager )
2018-07-11 22:51:29 +02:00
{
2020-03-24 22:22:21 +01:00
if ( auto dlg = m_overlay_manager - > get < rsx : : overlays : : shader_compile_notification > ( ) )
{
// Extend duration
dlg - > touch ( ) ;
}
else
{
// Create dialog but do not show immediately
m_overlay_manager - > create < rsx : : overlays : : shader_compile_notification > ( ) ;
}
2018-04-10 17:06:29 +02:00
}
2018-03-20 02:00:49 +01:00
}
2018-01-17 17:14:00 +01:00
}
2020-03-24 22:22:21 +01:00
else
{
2020-12-09 08:47:45 +01:00
ensure ( m_program ) ;
2020-03-24 22:22:21 +01:00
m_program - > sync ( ) ;
}
2018-01-17 17:14:00 +01:00
}
2020-04-19 19:23:12 +02:00
else
{
m_program = nullptr ;
}
2020-03-24 22:22:21 +01:00
2020-04-27 16:37:31 +02:00
if ( ! m_program & & ( shadermode = = shader_mode : : async_with_interpreter | | shadermode = = shader_mode : : interpreter_only ) )
2020-04-05 13:16:57 +02:00
{
2020-03-24 22:22:21 +01:00
// Fall back to interpreter
2020-04-19 19:23:12 +02:00
m_program = m_shader_interpreter . get ( current_fp_metadata ) ;
if ( was_interpreter ! = m_shader_interpreter . is_interpreter ( m_program ) )
2020-03-24 22:22:21 +01:00
{
// Program has changed, reupload
m_interpreter_state = rsx : : invalidate_pipeline_bits ;
}
2020-04-05 13:16:57 +02:00
}
2018-01-17 17:14:00 +01:00
2018-07-11 22:51:29 +02:00
return m_program ! = nullptr ;
}
2018-10-28 16:58:42 +01:00
void GLGSRender : : load_program_env ( )
2018-07-11 22:51:29 +02:00
{
if ( ! m_program )
{
2020-12-09 16:04:52 +01:00
fmt : : throw_exception ( " Unreachable right now " ) ;
2018-07-11 22:51:29 +02:00
}
2018-10-20 16:43:00 +02:00
const u32 fragment_constants_size = current_fp_metadata . program_constants_buffer_length ;
const bool update_transform_constants = ! ! ( m_graphics_state & rsx : : pipeline_state : : transform_constants_dirty ) ;
const bool update_fragment_constants = ! ! ( m_graphics_state & rsx : : pipeline_state : : fragment_constants_dirty ) & & fragment_constants_size ;
const bool update_vertex_env = ! ! ( m_graphics_state & rsx : : pipeline_state : : vertex_state_dirty ) ;
const bool update_fragment_env = ! ! ( m_graphics_state & rsx : : pipeline_state : : fragment_state_dirty ) ;
const bool update_fragment_texture_env = ! ! ( m_graphics_state & rsx : : pipeline_state : : fragment_texture_state_dirty ) ;
2020-04-19 19:23:12 +02:00
const bool update_instruction_buffers = ( ! ! m_interpreter_state & & m_shader_interpreter . is_interpreter ( m_program ) ) ;
2020-05-28 23:51:36 +02:00
const bool update_raster_env = ( rsx : : method_registers . polygon_stipple_enabled ( ) & & ! ! ( m_graphics_state & rsx : : pipeline_state : : polygon_stipple_pattern_dirty ) ) ;
2018-10-20 16:43:00 +02:00
2018-07-11 22:51:29 +02:00
m_program - > use ( ) ;
2017-03-11 10:07:26 +01:00
if ( manually_flush_ring_buffers )
{
2018-10-20 16:43:00 +02:00
if ( update_fragment_env ) m_fragment_env_buffer - > reserve_storage_on_heap ( 128 ) ;
if ( update_vertex_env ) m_vertex_env_buffer - > reserve_storage_on_heap ( 256 ) ;
if ( update_fragment_texture_env ) m_texture_parameters_buffer - > reserve_storage_on_heap ( 256 ) ;
2020-12-18 15:43:34 +01:00
if ( update_fragment_constants ) m_fragment_constants_buffer - > reserve_storage_on_heap ( utils : : align ( fragment_constants_size , 256 ) ) ;
2018-04-20 22:44:34 +02:00
if ( update_transform_constants ) m_transform_constants_buffer - > reserve_storage_on_heap ( 8192 ) ;
2020-05-28 23:51:36 +02:00
if ( update_raster_env ) m_raster_env_ring_buffer - > reserve_storage_on_heap ( 128 ) ;
2020-03-24 22:22:21 +01:00
if ( update_instruction_buffers )
{
m_vertex_instructions_buffer - > reserve_storage_on_heap ( 513 * 16 ) ;
m_fragment_instructions_buffer - > reserve_storage_on_heap ( current_fp_metadata . program_ucode_length ) ;
}
2017-03-11 10:07:26 +01:00
}
2018-10-20 16:43:00 +02:00
if ( update_vertex_env )
{
// Vertex state
2018-10-31 22:25:59 +01:00
auto mapping = m_vertex_env_buffer - > alloc_from_heap ( 144 , m_uniform_buffer_offset_align ) ;
2018-10-20 16:43:00 +02:00
auto buf = static_cast < u8 * > ( mapping . first ) ;
fill_scale_offset_data ( buf , false ) ;
fill_user_clip_data ( buf + 64 ) ;
* ( reinterpret_cast < u32 * > ( buf + 128 ) ) = rsx : : method_registers . transform_branch_bits ( ) ;
2021-08-14 18:46:05 +02:00
* ( reinterpret_cast < f32 * > ( buf + 132 ) ) = rsx : : method_registers . point_size ( ) * rsx : : get_resolution_scale ( ) ;
2018-10-31 22:25:59 +01:00
* ( reinterpret_cast < f32 * > ( buf + 136 ) ) = rsx : : method_registers . clip_min ( ) ;
* ( reinterpret_cast < f32 * > ( buf + 140 ) ) = rsx : : method_registers . clip_max ( ) ;
2018-10-20 16:43:00 +02:00
2019-10-02 01:26:29 +02:00
m_vertex_env_buffer - > bind_range ( GL_VERTEX_PARAMS_BIND_SLOT , mapping . second , 144 ) ;
2018-10-20 16:43:00 +02:00
}
2018-04-20 22:44:34 +02:00
if ( update_transform_constants )
2017-03-11 10:07:26 +01:00
{
// Vertex constants
2022-03-23 23:23:20 +01:00
const usz transform_constants_size = ( ! m_vertex_prog | | m_vertex_prog - > has_indexed_constants ) ? 8192 : m_vertex_prog - > constant_ids . size ( ) * 16 ;
if ( transform_constants_size )
{
auto mapping = m_transform_constants_buffer - > alloc_from_heap ( transform_constants_size , m_uniform_buffer_offset_align ) ;
auto buf = static_cast < u8 * > ( mapping . first ) ;
2022-03-23 21:59:42 +01:00
2022-03-23 23:23:20 +01:00
const std : : vector < u16 > & constant_ids = ( transform_constants_size = = 8192 ) ? std : : vector < u16 > { } : m_vertex_prog - > constant_ids ;
2022-03-23 23:51:59 +01:00
fill_vertex_program_constants_data ( buf , constant_ids ) ;
2018-10-20 16:43:00 +02:00
2022-03-23 23:23:20 +01:00
m_transform_constants_buffer - > bind_range ( GL_VERTEX_CONSTANT_BUFFERS_BIND_SLOT , mapping . second , transform_constants_size ) ;
}
2017-03-11 10:07:26 +01:00
}
2016-08-26 16:23:23 +02:00
2020-03-24 22:22:21 +01:00
if ( update_fragment_constants & & ! update_instruction_buffers )
2018-02-23 09:30:13 +01:00
{
2018-10-20 16:43:00 +02:00
// Fragment constants
auto mapping = m_fragment_constants_buffer - > alloc_from_heap ( fragment_constants_size , m_uniform_buffer_offset_align ) ;
auto buf = static_cast < u8 * > ( mapping . first ) ;
2019-11-09 16:51:53 +01:00
m_prog_buffer . fill_fragment_constants_buffer ( { reinterpret_cast < float * > ( buf ) , fragment_constants_size } ,
2022-03-23 20:53:18 +01:00
* ensure ( m_fragment_prog ) , current_fragment_program , true ) ;
2018-10-20 16:43:00 +02:00
2019-10-02 01:26:29 +02:00
m_fragment_constants_buffer - > bind_range ( GL_FRAGMENT_CONSTANT_BUFFERS_BIND_SLOT , mapping . second , fragment_constants_size ) ;
2018-02-23 09:30:13 +01:00
}
2017-04-04 18:14:36 +02:00
2018-10-20 16:43:00 +02:00
if ( update_fragment_env )
{
// Fragment state
auto mapping = m_fragment_env_buffer - > alloc_from_heap ( 32 , m_uniform_buffer_offset_align ) ;
auto buf = static_cast < u8 * > ( mapping . first ) ;
fill_fragment_state_buffer ( buf , current_fragment_program ) ;
2019-10-02 01:26:29 +02:00
m_fragment_env_buffer - > bind_range ( GL_FRAGMENT_STATE_BIND_SLOT , mapping . second , 32 ) ;
2018-10-20 16:43:00 +02:00
}
2016-06-12 11:05:22 +02:00
2018-10-20 16:43:00 +02:00
if ( update_fragment_texture_env )
{
// Fragment texture parameters
2021-07-31 16:27:16 +02:00
auto mapping = m_texture_parameters_buffer - > alloc_from_heap ( 512 , m_uniform_buffer_offset_align ) ;
2021-05-12 23:56:01 +02:00
current_fragment_program . texture_params . write_to ( mapping . first , current_fp_metadata . referenced_textures_mask ) ;
2017-03-11 10:07:26 +01:00
2021-07-31 16:27:16 +02:00
m_texture_parameters_buffer - > bind_range ( GL_FRAGMENT_TEXTURE_PARAMS_BIND_SLOT , mapping . second , 512 ) ;
2018-10-20 16:43:00 +02:00
}
2016-06-27 00:52:08 +02:00
2020-05-28 23:51:36 +02:00
if ( update_raster_env )
{
auto mapping = m_raster_env_ring_buffer - > alloc_from_heap ( 128 , m_uniform_buffer_offset_align ) ;
std : : memcpy ( mapping . first , rsx : : method_registers . polygon_stipple_pattern ( ) , 128 ) ;
m_raster_env_ring_buffer - > bind_range ( GL_RASTERIZER_STATE_BIND_SLOT , mapping . second , 128 ) ;
m_graphics_state & = ~ ( rsx : : pipeline_state : : polygon_stipple_pattern_dirty ) ;
}
2020-03-24 22:22:21 +01:00
if ( update_instruction_buffers )
{
if ( m_interpreter_state & rsx : : vertex_program_dirty )
{
// Attach vertex buffer data
const auto vp_block_length = current_vp_metadata . ucode_length + 16 ;
auto vp_mapping = m_vertex_instructions_buffer - > alloc_from_heap ( vp_block_length , 16 ) ;
auto vp_buf = static_cast < u8 * > ( vp_mapping . first ) ;
auto vp_config = reinterpret_cast < u32 * > ( vp_buf ) ;
vp_config [ 0 ] = current_vertex_program . base_address ;
vp_config [ 1 ] = current_vertex_program . entry ;
vp_config [ 2 ] = current_vertex_program . output_mask ;
2020-04-18 19:38:56 +02:00
vp_config [ 3 ] = rsx : : method_registers . two_side_light_en ( ) ? 1u : 0u ;
2020-03-24 22:22:21 +01:00
std : : memcpy ( vp_buf + 16 , current_vertex_program . data . data ( ) , current_vp_metadata . ucode_length ) ;
m_vertex_instructions_buffer - > bind_range ( GL_INTERPRETER_VERTEX_BLOCK , vp_mapping . second , vp_block_length ) ;
m_vertex_instructions_buffer - > notify ( ) ;
}
if ( m_interpreter_state & rsx : : fragment_program_dirty )
{
// Attach fragment buffer data
const auto fp_block_length = current_fp_metadata . program_ucode_length + 80 ;
auto fp_mapping = m_fragment_instructions_buffer - > alloc_from_heap ( fp_block_length , 16 ) ;
auto fp_buf = static_cast < u8 * > ( fp_mapping . first ) ;
// Control mask
const auto control_masks = reinterpret_cast < u32 * > ( fp_buf ) ;
control_masks [ 0 ] = rsx : : method_registers . shader_control ( ) ;
2021-05-12 23:56:01 +02:00
control_masks [ 1 ] = current_fragment_program . texture_state . texture_dimensions ;
2020-03-24 22:22:21 +01:00
// Bind textures
m_shader_interpreter . update_fragment_textures ( fs_sampler_state , current_fp_metadata . referenced_textures_mask , reinterpret_cast < u32 * > ( fp_buf + 16 ) ) ;
2020-10-27 21:41:20 +01:00
std : : memcpy ( fp_buf + 80 , current_fragment_program . get_data ( ) , current_fragment_program . ucode_length ) ;
2020-03-24 22:22:21 +01:00
m_fragment_instructions_buffer - > bind_range ( GL_INTERPRETER_FRAGMENT_BLOCK , fp_mapping . second , fp_block_length ) ;
m_fragment_instructions_buffer - > notify ( ) ;
}
}
2016-10-18 09:57:28 +02:00
if ( manually_flush_ring_buffers )
2017-03-11 10:07:26 +01:00
{
2018-10-20 16:43:00 +02:00
if ( update_fragment_env ) m_fragment_env_buffer - > unmap ( ) ;
if ( update_vertex_env ) m_vertex_env_buffer - > unmap ( ) ;
if ( update_fragment_texture_env ) m_texture_parameters_buffer - > unmap ( ) ;
if ( update_fragment_constants ) m_fragment_constants_buffer - > unmap ( ) ;
2018-04-20 22:44:34 +02:00
if ( update_transform_constants ) m_transform_constants_buffer - > unmap ( ) ;
2020-05-28 23:51:36 +02:00
if ( update_raster_env ) m_raster_env_ring_buffer - > unmap ( ) ;
2020-03-24 22:22:21 +01:00
if ( update_instruction_buffers )
{
m_vertex_instructions_buffer - > unmap ( ) ;
m_fragment_instructions_buffer - > unmap ( ) ;
}
2017-03-11 10:07:26 +01:00
}
2016-10-18 09:57:28 +02:00
2018-10-20 16:43:00 +02:00
const u32 handled_flags = ( rsx : : pipeline_state : : fragment_state_dirty | rsx : : pipeline_state : : vertex_state_dirty | rsx : : pipeline_state : : transform_constants_dirty | rsx : : pipeline_state : : fragment_constants_dirty | rsx : : pipeline_state : : fragment_texture_state_dirty ) ;
2018-07-11 22:51:29 +02:00
m_graphics_state & = ~ handled_flags ;
2017-11-01 14:38:37 +01:00
}
2018-10-28 16:58:42 +01:00
void GLGSRender : : update_vertex_env ( const gl : : vertex_upload_info & upload_info )
{
if ( manually_flush_ring_buffers )
{
m_vertex_layout_buffer - > reserve_storage_on_heap ( 128 + 16 ) ;
}
// Vertex layout state
auto mapping = m_vertex_layout_buffer - > alloc_from_heap ( 128 + 16 , m_uniform_buffer_offset_align ) ;
2019-01-14 13:33:05 +01:00
auto buf = static_cast < u32 * > ( mapping . first ) ;
buf [ 0 ] = upload_info . vertex_index_base ;
buf [ 1 ] = upload_info . vertex_index_offset ;
2018-10-28 16:58:42 +01:00
buf + = 4 ;
2019-01-14 13:33:05 +01:00
2019-12-03 23:34:23 +01:00
fill_vertex_layout_state ( m_vertex_layout , upload_info . first_vertex , upload_info . allocated_vertex_count , reinterpret_cast < s32 * > ( buf ) , upload_info . persistent_mapping_offset , upload_info . volatile_mapping_offset ) ;
2018-10-28 16:58:42 +01:00
2019-10-02 01:26:29 +02:00
m_vertex_layout_buffer - > bind_range ( GL_VERTEX_LAYOUT_BIND_SLOT , mapping . second , 128 + 16 ) ;
2018-10-28 16:58:42 +01:00
if ( manually_flush_ring_buffers )
{
m_vertex_layout_buffer - > unmap ( ) ;
}
}
2016-02-15 10:50:14 +01:00
bool GLGSRender : : on_access_violation ( u32 address , bool is_writing )
{
2021-09-25 01:01:27 +02:00
const bool can_flush = is_current_thread ( ) ;
2018-09-22 02:14:26 +02:00
const rsx : : invalidation_cause cause =
is_writing ? ( can_flush ? rsx : : invalidation_cause : : write : rsx : : invalidation_cause : : deferred_write )
: ( can_flush ? rsx : : invalidation_cause : : read : rsx : : invalidation_cause : : deferred_read ) ;
2018-12-29 14:28:12 +01:00
2019-02-25 16:03:14 +01:00
auto cmd = can_flush ? gl : : command_context { gl_state } : gl : : command_context { } ;
auto result = m_gl_texture_cache . invalidate_address ( cmd , address , cause ) ;
2017-10-21 23:12:32 +02:00
2021-01-28 19:53:49 +01:00
if ( result . invalidate_samplers )
2017-10-30 13:27:22 +01:00
{
2018-09-03 21:28:33 +02:00
std : : lock_guard lock ( m_sampler_mutex ) ;
2017-10-30 13:27:22 +01:00
m_samplers_dirty . store ( true ) ;
}
2021-01-28 19:53:49 +01:00
if ( ! result . violation_handled )
{
2022-05-10 23:58:59 +02:00
return zcull_ctrl - > on_access_violation ( address ) ;
2021-01-28 19:53:49 +01:00
}
2017-10-27 23:32:27 +02:00
if ( result . num_flushable > 0 )
2017-09-08 16:52:13 +02:00
{
2020-01-17 17:24:33 +01:00
auto & task = post_flush_request ( address , result ) ;
2017-09-08 16:52:13 +02:00
2022-05-16 22:38:11 +02:00
m_eng_interrupt_mask | = rsx : : backend_interrupt ;
2017-10-21 23:12:32 +02:00
vm : : temporary_unlock ( ) ;
2018-05-29 14:11:34 +02:00
task . producer_wait ( ) ;
2017-09-08 16:52:13 +02:00
}
2017-10-21 23:12:32 +02:00
2017-10-23 14:39:24 +02:00
return true ;
2016-08-27 08:12:44 +02:00
}
2017-02-16 19:29:56 +01:00
2019-08-25 17:47:49 +02:00
void GLGSRender : : on_invalidate_memory_range ( const utils : : address_range & range , rsx : : invalidation_cause cause )
2017-08-07 23:54:40 +02:00
{
2018-12-29 14:28:12 +01:00
gl : : command_context cmd { gl_state } ;
2021-02-18 12:38:56 +01:00
auto data = m_gl_texture_cache . invalidate_range ( cmd , range , cause ) ;
2018-09-22 02:14:26 +02:00
AUDIT ( data . empty ( ) ) ;
2019-08-25 17:47:49 +02:00
if ( cause = = rsx : : invalidation_cause : : unmap & & data . violation_handled )
2017-10-30 13:27:22 +01:00
{
2018-09-22 02:14:26 +02:00
m_gl_texture_cache . purge_unreleased_sections ( ) ;
2017-10-30 13:27:22 +01:00
{
2018-09-03 21:28:33 +02:00
std : : lock_guard lock ( m_sampler_mutex ) ;
2017-10-30 13:27:22 +01:00
m_samplers_dirty . store ( true ) ;
}
}
2017-08-07 23:54:40 +02:00
}
2019-08-25 17:47:49 +02:00
void GLGSRender : : on_semaphore_acquire_wait ( )
{
2019-08-26 20:41:37 +02:00
if ( ! work_queue . empty ( ) | |
( async_flip_requested & flip_request : : emu_requested ) )
2019-08-25 17:47:49 +02:00
{
do_local_task ( rsx : : FIFO_state : : lock_wait ) ;
}
}
2018-05-29 13:53:16 +02:00
void GLGSRender : : do_local_task ( rsx : : FIFO_state state )
2017-02-16 19:29:56 +01:00
{
2018-02-02 07:25:30 +01:00
if ( ! work_queue . empty ( ) )
2017-02-16 19:29:56 +01:00
{
2018-09-03 21:28:33 +02:00
std : : lock_guard lock ( queue_guard ) ;
2018-02-03 09:37:42 +01:00
2020-01-17 17:24:33 +01:00
work_queue . remove_if ( [ ] ( auto & q ) { return q . received ; } ) ;
2018-02-02 07:25:30 +01:00
2020-01-17 17:24:33 +01:00
for ( auto & q : work_queue )
2018-02-02 07:25:30 +01:00
{
if ( q . processed ) continue ;
2017-03-10 14:27:38 +01:00
2018-12-29 14:28:12 +01:00
gl : : command_context cmd { gl_state } ;
q . result = m_gl_texture_cache . flush_all ( cmd , q . section_data ) ;
2018-02-02 07:25:30 +01:00
q . processed = true ;
}
2017-02-16 19:29:56 +01:00
}
2018-05-29 13:53:16 +02:00
else if ( ! in_begin_end & & state ! = rsx : : FIFO_state : : lock_wait )
2018-02-03 09:37:42 +01:00
{
2018-06-23 16:50:34 +02:00
if ( m_graphics_state & rsx : : pipeline_state : : framebuffer_reads_dirty )
{
//This will re-engage locks and break the texture cache if another thread is waiting in access violation handler!
//Only call when there are no waiters
m_gl_texture_cache . do_update ( ) ;
m_graphics_state & = ~ rsx : : pipeline_state : : framebuffer_reads_dirty ;
}
2018-02-03 09:37:42 +01:00
}
2018-01-17 17:14:00 +01:00
2018-05-29 13:53:16 +02:00
rsx : : thread : : do_local_task ( state ) ;
if ( state = = rsx : : FIFO_state : : lock_wait )
{
// Critical check finished
return ;
}
2018-05-20 22:05:00 +02:00
if ( m_overlay_manager )
2018-01-17 17:14:00 +01:00
{
2018-09-22 09:51:48 +02:00
if ( ! in_begin_end & & async_flip_requested & flip_request : : native_ui )
2018-01-17 17:14:00 +01:00
{
2019-09-19 19:08:06 +02:00
rsx : : display_flip_info_t info { } ;
info . buffer = current_display_buffer ;
flip ( info ) ;
2018-01-17 17:14:00 +01:00
}
}
2017-02-16 19:29:56 +01:00
}
2020-01-17 17:24:33 +01:00
gl : : work_item & GLGSRender : : post_flush_request ( u32 address , gl : : texture_cache : : thrashed_set & flush_data )
2017-02-16 19:29:56 +01:00
{
2018-09-03 21:28:33 +02:00
std : : lock_guard lock ( queue_guard ) ;
2017-02-16 19:29:56 +01:00
2020-01-17 17:24:33 +01:00
auto & result = work_queue . emplace_back ( ) ;
2017-02-16 19:29:56 +01:00
result . address_to_flush = address ;
2017-10-27 23:32:27 +02:00
result . section_data = std : : move ( flush_data ) ;
2017-02-16 19:29:56 +01:00
return result ;
}
2017-03-29 21:27:29 +02:00
bool GLGSRender : : scaled_image_from_memory ( rsx : : blit_src_info & src , rsx : : blit_dst_info & dst , bool interpolate )
{
2018-12-29 14:28:12 +01:00
gl : : command_context cmd { gl_state } ;
if ( m_gl_texture_cache . blit ( cmd , src , dst , interpolate , m_rtts ) )
2018-06-04 18:57:16 +02:00
{
m_samplers_dirty . store ( true ) ;
return true ;
}
return false ;
2017-03-29 21:27:29 +02:00
}
2017-07-27 18:04:55 +02:00
2017-10-26 05:01:10 +02:00
void GLGSRender : : notify_tile_unbound ( u32 tile )
{
2020-01-30 16:44:29 +01:00
// TODO: Handle texture writeback
if ( false )
{
2020-12-09 16:04:52 +01:00
u32 addr = rsx : : get_address ( tiles [ tile ] . offset , tiles [ tile ] . location ) ;
2020-01-30 16:44:29 +01:00
on_notify_memory_unmapped ( addr , tiles [ tile ] . size ) ;
m_rtts . invalidate_surface_address ( addr , false ) ;
}
2018-05-23 11:55:14 +02:00
{
2018-09-03 21:28:33 +02:00
std : : lock_guard lock ( m_sampler_mutex ) ;
2018-05-23 11:55:14 +02:00
m_samplers_dirty . store ( true ) ;
}
2017-10-26 05:01:10 +02:00
}
2018-03-05 12:09:43 +01:00
void GLGSRender : : begin_occlusion_query ( rsx : : reports : : occlusion_query_info * query )
2017-07-27 18:04:55 +02:00
{
2017-11-16 22:52:21 +01:00
query - > result = 0 ;
2021-09-04 20:50:53 +02:00
glBeginQuery ( m_occlusion_type , query - > driver_handle ) ;
2017-07-27 18:04:55 +02:00
}
2018-03-05 12:09:43 +01:00
void GLGSRender : : end_occlusion_query ( rsx : : reports : : occlusion_query_info * query )
2017-07-27 18:04:55 +02:00
{
2020-12-09 08:47:45 +01:00
ensure ( query - > active ) ;
2021-09-04 20:50:53 +02:00
glEndQuery ( m_occlusion_type ) ;
2017-07-27 18:04:55 +02:00
}
2018-03-05 12:09:43 +01:00
bool GLGSRender : : check_occlusion_query_status ( rsx : : reports : : occlusion_query_info * query )
2017-07-27 18:04:55 +02:00
{
2018-03-05 12:09:43 +01:00
if ( ! query - > num_draws )
return true ;
2017-11-16 22:52:21 +01:00
GLint status = GL_TRUE ;
2019-12-03 23:34:23 +01:00
glGetQueryObjectiv ( query - > driver_handle , GL_QUERY_RESULT_AVAILABLE , & status ) ;
2017-08-11 22:32:44 +02:00
2017-11-16 22:52:21 +01:00
return status ! = GL_FALSE ;
2017-07-27 18:04:55 +02:00
}
2018-03-05 12:09:43 +01:00
void GLGSRender : : get_occlusion_query_result ( rsx : : reports : : occlusion_query_info * query )
2017-07-27 18:04:55 +02:00
{
2018-03-05 12:09:43 +01:00
if ( query - > num_draws )
{
2018-03-24 10:53:34 +01:00
GLint result = 0 ;
2019-12-03 23:34:23 +01:00
glGetQueryObjectiv ( query - > driver_handle , GL_QUERY_RESULT , & result ) ;
2017-07-27 18:04:55 +02:00
2018-03-05 12:09:43 +01:00
query - > result + = result ;
}
2018-01-17 17:14:00 +01:00
}
2018-03-13 14:34:31 +01:00
void GLGSRender : : discard_occlusion_query ( rsx : : reports : : occlusion_query_info * query )
{
2018-03-24 10:53:34 +01:00
if ( query - > active )
{
//Discard is being called on an active query, close it
2021-09-04 20:50:53 +02:00
glEndQuery ( m_occlusion_type ) ;
2018-03-24 10:53:34 +01:00
}
2018-03-13 14:34:31 +01:00
}