2020-12-05 13:08:24 +01:00
# pragma once
2016-02-15 10:50:14 +01:00
2020-12-22 09:42:57 +01:00
# include "util/types.hpp"
2016-02-15 10:50:14 +01:00
2016-09-18 07:19:26 +02:00
# include "GLRenderTargets.h"
2017-09-08 16:52:13 +02:00
# include "../Common/texture_cache.h"
2017-04-23 11:32:37 +02:00
2020-12-22 09:42:57 +01:00
# include <memory>
# include <vector>
2017-02-16 19:29:56 +01:00
class GLGSRender ;
2016-02-15 10:50:14 +01:00
2017-02-13 15:22:25 +01:00
namespace gl
{
2017-09-28 20:32:00 +02:00
class blitter ;
2021-03-23 20:32:50 +01:00
extern blitter * g_hw_blitter ;
2017-09-28 20:32:00 +02:00
2018-10-28 14:59:39 +01:00
class cached_texture_section ;
class texture_cache ;
struct texture_cache_traits
2017-02-13 15:22:25 +01:00
{
2018-12-29 14:28:12 +01:00
using commandbuffer_type = gl : : command_context ;
2018-10-28 14:59:39 +01:00
using section_storage_type = gl : : cached_texture_section ;
using texture_cache_type = gl : : texture_cache ;
using texture_cache_base_type = rsx : : texture_cache < texture_cache_type , texture_cache_traits > ;
using image_resource_type = gl : : texture * ;
using image_view_type = gl : : texture_view * ;
using image_storage_type = gl : : texture ;
using texture_format = gl : : texture : : format ;
} ;
class cached_texture_section : public rsx : : cached_texture_section < gl : : cached_texture_section , gl : : texture_cache_traits >
{
using baseclass = rsx : : cached_texture_section < gl : : cached_texture_section , gl : : texture_cache_traits > ;
friend baseclass ;
2018-09-22 02:14:26 +02:00
2017-09-08 16:52:13 +02:00
fence m_fence ;
2019-10-12 12:32:49 +02:00
buffer pbo ;
2017-02-13 15:22:25 +01:00
2018-07-17 18:42:51 +02:00
gl : : viewable_image * vram_texture = nullptr ;
2018-04-07 12:19:49 +02:00
2018-07-17 18:42:51 +02:00
std : : unique_ptr < gl : : viewable_image > managed_texture ;
2018-04-07 12:19:49 +02:00
std : : unique_ptr < gl : : texture > scaled_texture ;
2017-02-16 19:29:56 +01:00
2017-09-08 16:52:13 +02:00
texture : : format format = texture : : format : : rgba ;
texture : : type type = texture : : type : : ubyte ;
2017-02-13 15:22:25 +01:00
2019-03-08 09:27:33 +01:00
void init_buffer ( const gl : : texture * src )
2017-09-08 16:52:13 +02:00
{
2019-03-08 09:27:33 +01:00
const u32 vram_size = src - > pitch ( ) * src - > height ( ) ;
2020-12-18 15:43:34 +01:00
const u32 buffer_size = utils : : align ( vram_size , 4096 ) ;
2018-02-03 09:37:42 +01:00
2019-10-12 12:32:49 +02:00
if ( pbo )
2017-09-08 16:52:13 +02:00
{
2019-10-12 12:32:49 +02:00
if ( pbo . size ( ) > = buffer_size )
2018-02-03 09:37:42 +01:00
return ;
2019-10-12 12:32:49 +02:00
pbo . remove ( ) ;
2016-02-15 10:50:14 +01:00
}
2019-10-12 12:32:49 +02:00
pbo . create ( buffer : : target : : pixel_pack , buffer_size , nullptr , buffer : : memory_type : : host_visible , GL_STREAM_READ ) ;
2018-04-23 15:13:00 +02:00
glBindBuffer ( GL_PIXEL_PACK_BUFFER , GL_NONE ) ;
2017-09-08 16:52:13 +02:00
}
2017-02-27 20:39:22 +01:00
2017-09-08 16:52:13 +02:00
public :
2018-09-22 02:14:26 +02:00
using baseclass : : cached_texture_section ;
2018-02-10 09:52:44 +01:00
2022-03-28 22:42:08 +02:00
void create ( u16 w , u16 h , u16 depth , u16 mipmaps , gl : : texture * image , u32 rsx_pitch , bool managed ,
2019-03-14 13:27:50 +01:00
gl : : texture : : format gl_format = gl : : texture : : format : : rgba , gl : : texture : : type gl_type = gl : : texture : : type : : ubyte , bool swap_bytes = false )
2017-09-08 16:52:13 +02:00
{
2022-03-28 22:42:08 +02:00
if ( vram_texture & & ! managed_texture & & get_protection ( ) = = utils : : protection : : no )
{
// In-place image swap, still locked. Likely a color buffer that got rebound as depth buffer or vice-versa.
gl : : as_rtt ( vram_texture ) - > release ( ) ;
if ( ! managed )
{
// Incoming is also an external resource, reference it immediately
gl : : as_rtt ( image ) - > add_ref ( ) ;
}
}
2018-10-19 00:22:00 +02:00
auto new_texture = static_cast < gl : : viewable_image * > ( image ) ;
2020-12-09 16:04:52 +01:00
ensure ( ! exists ( ) | | ! is_managed ( ) | | vram_texture = = new_texture ) ;
2018-10-19 00:22:00 +02:00
vram_texture = new_texture ;
2018-07-17 18:42:51 +02:00
2022-03-28 22:42:08 +02:00
if ( managed )
2017-11-29 15:05:36 +01:00
{
2018-07-17 18:42:51 +02:00
managed_texture . reset ( vram_texture ) ;
2017-11-29 15:05:36 +01:00
}
else
{
2020-12-09 16:04:52 +01:00
ensure ( ! managed_texture ) ;
2017-11-29 15:05:36 +01:00
}
2017-03-29 21:27:29 +02:00
2020-08-05 21:50:31 +02:00
if ( auto rtt = dynamic_cast < gl : : render_target * > ( image ) )
{
swizzled = ( rtt - > raster_type ! = rsx : : surface_raster_type : : linear ) ;
}
2017-09-08 16:52:13 +02:00
flushed = false ;
2018-02-10 17:21:16 +01:00
synchronized = false ;
2018-06-23 16:50:34 +02:00
sync_timestamp = 0ull ;
2017-03-29 21:27:29 +02:00
2020-12-09 08:47:45 +01:00
ensure ( rsx_pitch ) ;
2018-09-10 12:22:24 +02:00
2018-12-18 18:04:03 +01:00
this - > rsx_pitch = rsx_pitch ;
2017-09-18 19:22:34 +02:00
this - > width = w ;
this - > height = h ;
2018-06-21 17:28:53 +02:00
this - > real_pitch = 0 ;
2017-09-22 15:12:10 +02:00
this - > depth = depth ;
this - > mipmaps = mipmaps ;
2017-03-29 21:27:29 +02:00
2017-09-08 16:52:13 +02:00
set_format ( gl_format , gl_type , swap_bytes ) ;
2018-09-22 02:14:26 +02:00
// Notify baseclass
baseclass : : on_section_resources_created ( ) ;
2017-09-08 16:52:13 +02:00
}
2017-03-29 21:27:29 +02:00
2017-10-12 14:48:31 +02:00
void set_dimensions ( u32 width , u32 height , u32 /*depth*/ , u32 pitch )
2017-09-08 16:52:13 +02:00
{
2017-09-18 19:22:34 +02:00
this - > width = width ;
this - > height = height ;
rsx_pitch = pitch ;
2017-09-08 16:52:13 +02:00
}
2017-09-04 12:05:02 +02:00
2017-12-18 10:02:19 +01:00
void set_format ( texture : : format gl_format , texture : : type gl_type , bool swap_bytes )
2017-09-08 16:52:13 +02:00
{
format = gl_format ;
type = gl_type ;
pack_unpack_swap_bytes = swap_bytes ;
2017-11-30 19:50:19 +01:00
if ( format = = gl : : texture : : format : : rgba )
{
switch ( type )
{
case gl : : texture : : type : : f16 :
gcm_format = CELL_GCM_TEXTURE_W16_Z16_Y16_X16_FLOAT ;
break ;
case gl : : texture : : type : : f32 :
gcm_format = CELL_GCM_TEXTURE_W32_Z32_Y32_X32_FLOAT ;
break ;
2019-05-11 07:36:16 +02:00
default :
break ;
2017-11-30 19:50:19 +01:00
}
}
2017-09-08 16:52:13 +02:00
}
2019-12-26 21:01:48 +01:00
void dma_transfer ( gl : : command_context & /*cmd*/ , gl : : texture * src , const areai & /*src_area*/ , const utils : : address_range & /*valid_range*/ , u32 pitch )
2019-09-04 21:19:58 +02:00
{
init_buffer ( src ) ;
glGetError ( ) ;
if ( context = = rsx : : texture_upload_context : : dma )
{
// Determine unpack config dynamically
const auto format_info = gl : : get_format_type ( src - > get_internal_format ( ) ) ;
2019-10-02 02:47:19 +02:00
format = static_cast < gl : : texture : : format > ( format_info . format ) ;
type = static_cast < gl : : texture : : type > ( format_info . type ) ;
2020-09-05 17:27:24 +02:00
pack_unpack_swap_bytes = format_info . swap_bytes ;
}
2019-09-04 21:19:58 +02:00
2020-09-06 17:17:08 +02:00
real_pitch = src - > pitch ( ) ;
rsx_pitch = pitch ;
2020-09-05 17:27:24 +02:00
bool use_driver_pixel_transform = true ;
if ( get_driver_caps ( ) . ARB_compute_shader_supported ) [ [ likely ] ]
{
2020-09-06 17:17:08 +02:00
if ( src - > aspect ( ) & image_aspect : : depth )
2019-09-04 21:19:58 +02:00
{
2020-09-05 17:27:24 +02:00
buffer scratch_mem ;
// Invoke compute
if ( auto error = glGetError ( ) ; ! error ) [ [ likely ] ]
{
2020-09-06 17:17:08 +02:00
pixel_buffer_layout pack_info { } ;
image_memory_requirements mem_info { } ;
pack_info . format = static_cast < GLenum > ( format ) ;
pack_info . type = static_cast < GLenum > ( type ) ;
pack_info . size = ( src - > aspect ( ) & image_aspect : : stencil ) ? 4 : 2 ;
pack_info . swap_bytes = true ;
2020-09-05 17:27:24 +02:00
2020-09-06 17:17:08 +02:00
mem_info . image_size_in_texels = src - > width ( ) * src - > height ( ) ;
mem_info . image_size_in_bytes = src - > pitch ( ) * src - > height ( ) ;
mem_info . memory_required = 0 ;
2020-09-07 21:42:30 +02:00
if ( pack_info . type = = GL_FLOAT_32_UNSIGNED_INT_24_8_REV )
{
// D32FS8 can be read back as D24S8 or D32S8X24. In case of the latter, double memory requirements
mem_info . image_size_in_bytes * = 2 ;
}
2020-09-06 17:17:08 +02:00
void * out_offset = copy_image_to_buffer ( pack_info , src , & scratch_mem , 0 , { { } , src - > size3D ( ) } , & mem_info ) ;
2020-09-05 17:27:24 +02:00
glBindBuffer ( GL_SHADER_STORAGE_BUFFER , GL_NONE ) ;
glMemoryBarrier ( GL_BUFFER_UPDATE_BARRIER_BIT ) ;
2020-09-06 17:17:08 +02:00
real_pitch = pack_info . size * src - > width ( ) ;
const u64 data_length = pack_info . size * mem_info . image_size_in_texels ;
scratch_mem . copy_to ( & pbo , reinterpret_cast < u64 > ( out_offset ) , 0 , data_length ) ;
2020-09-05 17:27:24 +02:00
}
else
{
rsx_log . error ( " Memory transfer failed with error 0x%x. Format=0x%x, Type=0x%x " , error , static_cast < u32 > ( format ) , static_cast < u32 > ( type ) ) ;
}
scratch_mem . remove ( ) ;
use_driver_pixel_transform = false ;
2019-09-04 21:19:58 +02:00
}
2020-09-05 17:27:24 +02:00
}
if ( use_driver_pixel_transform )
{
if ( src - > aspect ( ) & image_aspect : : stencil )
2019-09-04 21:19:58 +02:00
{
pack_unpack_swap_bytes = false ;
}
2020-09-05 17:27:24 +02:00
pbo . bind ( buffer : : target : : pixel_pack ) ;
pixel_pack_settings pack_settings ;
pack_settings . alignment ( 1 ) ;
pack_settings . swap_bytes ( pack_unpack_swap_bytes ) ;
src - > copy_to ( nullptr , format , type , pack_settings ) ;
}
2019-09-04 21:19:58 +02:00
if ( auto error = glGetError ( ) )
{
if ( error = = GL_OUT_OF_MEMORY & & : : gl : : get_driver_caps ( ) . vendor_AMD )
{
// AMD driver bug
// Pixel transfer fails with GL_OUT_OF_MEMORY. Usually happens with float textures or operations attempting to swap endianness.
// Failed operations also leak a large amount of memory
2020-02-01 09:07:25 +01:00
rsx_log . error ( " Memory transfer failure (AMD bug). Please update your driver to Adrenalin 19.4.3 or newer. Format=0x%x, Type=0x%x, Swap=%d " , static_cast < u32 > ( format ) , static_cast < u32 > ( type ) , pack_unpack_swap_bytes ) ;
2019-09-04 21:19:58 +02:00
}
else
{
2020-02-01 09:07:25 +01:00
rsx_log . error ( " Memory transfer failed with error 0x%x. Format=0x%x, Type=0x%x " , error , static_cast < u32 > ( format ) , static_cast < u32 > ( type ) ) ;
2019-09-04 21:19:58 +02:00
}
}
glBindBuffer ( GL_PIXEL_PACK_BUFFER , GL_NONE ) ;
m_fence . reset ( ) ;
synchronized = true ;
2021-09-17 21:59:37 +02:00
sync_timestamp = rsx : : get_shared_tag ( ) ;
2019-09-04 21:19:58 +02:00
}
2019-03-16 10:14:11 +01:00
void copy_texture ( gl : : command_context & cmd , bool miss )
2017-09-08 16:52:13 +02:00
{
2020-12-09 16:04:52 +01:00
ensure ( exists ( ) ) ;
2018-10-19 00:22:00 +02:00
2020-02-05 08:00:08 +01:00
if ( ! miss ) [ [ likely ] ]
2018-10-28 14:59:39 +01:00
{
baseclass : : on_speculative_flush ( ) ;
}
2019-03-16 10:14:11 +01:00
else
{
baseclass : : on_miss ( ) ;
}
2018-10-28 14:59:39 +01:00
2019-01-05 11:12:36 +01:00
if ( context = = rsx : : texture_upload_context : : framebuffer_storage )
{
auto as_rtt = static_cast < gl : : render_target * > ( vram_texture ) ;
2019-05-14 18:50:45 +02:00
if ( as_rtt - > dirty ( ) ) as_rtt - > read_barrier ( cmd ) ;
2019-01-05 11:12:36 +01:00
}
2018-04-23 15:13:00 +02:00
gl : : texture * target_texture = vram_texture ;
2018-02-03 09:37:42 +01:00
if ( ( rsx : : get_resolution_scale_percent ( ) ! = 100 & & context = = rsx : : texture_upload_context : : framebuffer_storage ) | |
2018-06-21 17:28:53 +02:00
( vram_texture - > pitch ( ) ! = rsx_pitch ) )
2017-09-28 20:32:00 +02:00
{
2017-11-29 15:05:36 +01:00
u32 real_width = width ;
u32 real_height = height ;
2019-03-14 13:27:50 +01:00
if ( context = = rsx : : texture_upload_context : : framebuffer_storage )
2017-11-29 15:05:36 +01:00
{
2019-05-20 16:14:02 +02:00
auto surface = gl : : as_rtt ( vram_texture ) ;
real_width * = surface - > samples_x ;
real_height * = surface - > samples_y ;
2017-11-29 15:05:36 +01:00
}
2017-09-28 20:32:00 +02:00
areai src_area = { 0 , 0 , 0 , 0 } ;
2019-12-03 23:34:23 +01:00
const areai dst_area = { 0 , 0 , static_cast < s32 > ( real_width ) , static_cast < s32 > ( real_height ) } ;
2017-09-28 20:32:00 +02:00
2018-04-07 12:19:49 +02:00
auto ifmt = vram_texture - > get_internal_format ( ) ;
src_area . x2 = vram_texture - > width ( ) ;
src_area . y2 = vram_texture - > height ( ) ;
2017-09-28 20:32:00 +02:00
2017-10-02 15:53:27 +02:00
if ( src_area . x2 ! = dst_area . x2 | | src_area . y2 ! = dst_area . y2 )
{
2018-04-07 12:19:49 +02:00
if ( scaled_texture )
2017-10-02 15:53:27 +02:00
{
2018-04-07 12:19:49 +02:00
auto sfmt = scaled_texture - > get_internal_format ( ) ;
if ( scaled_texture - > width ( ) ! = real_width | |
scaled_texture - > height ( ) ! = real_height | |
sfmt ! = ifmt )
2017-10-02 15:53:27 +02:00
{
2018-04-07 12:19:49 +02:00
//Discard current scaled texture
scaled_texture . reset ( ) ;
2017-10-02 15:53:27 +02:00
}
}
2018-04-07 12:19:49 +02:00
if ( ! scaled_texture )
2017-10-02 15:53:27 +02:00
{
2019-12-03 23:34:23 +01:00
scaled_texture = std : : make_unique < gl : : texture > ( GL_TEXTURE_2D , real_width , real_height , 1 , 1 , static_cast < GLenum > ( ifmt ) ) ;
2017-10-02 15:53:27 +02:00
}
2019-11-17 21:03:18 +01:00
const bool linear_interp = is_depth_texture ( ) ? false : true ;
g_hw_blitter - > scale_image ( cmd , vram_texture , scaled_texture . get ( ) , src_area , dst_area , linear_interp , { } ) ;
2018-04-23 15:13:00 +02:00
target_texture = scaled_texture . get ( ) ;
2017-10-02 15:53:27 +02:00
}
2017-09-28 20:32:00 +02:00
}
2019-09-04 21:19:58 +02:00
dma_transfer ( cmd , target_texture , { } , { } , rsx_pitch ) ;
2017-09-08 16:52:13 +02:00
}
2017-02-13 15:22:25 +01:00
2018-11-01 02:31:12 +01:00
/**
* Flush
*/
void * map_synchronized ( u32 offset , u32 size )
{
2019-03-14 13:27:50 +01:00
AUDIT ( synchronized & & ! m_fence . is_empty ( ) ) ;
m_fence . wait_for_signal ( ) ;
2018-05-10 13:50:32 +02:00
2020-12-09 08:47:45 +01:00
ensure ( offset + GLsizeiptr { size } < = pbo . size ( ) ) ;
2022-05-25 23:43:37 +02:00
return pbo . map ( offset , size , gl : : buffer : : access : : read ) ;
2018-11-01 02:31:12 +01:00
}
2020-12-18 15:43:34 +01:00
void finish_flush ( ) ;
2018-02-03 09:37:42 +01:00
2018-11-01 02:31:12 +01:00
/**
* Misc
*/
2017-09-08 16:52:13 +02:00
void destroy ( )
{
2019-10-12 12:32:49 +02:00
if ( ! is_locked ( ) & & ! pbo & & vram_texture = = nullptr & & m_fence . is_empty ( ) & & ! managed_texture )
2017-09-08 16:52:13 +02:00
//Already destroyed
return ;
2017-02-13 15:22:25 +01:00
2019-10-12 12:32:49 +02:00
if ( pbo )
2017-02-13 15:22:25 +01:00
{
2019-10-12 12:32:49 +02:00
// Destroy pbo cache since vram texture is managed elsewhere
pbo . remove ( ) ;
2018-04-07 12:19:49 +02:00
scaled_texture . reset ( ) ;
2017-02-13 15:22:25 +01:00
}
2017-03-29 21:27:29 +02:00
2019-10-12 12:32:49 +02:00
managed_texture . reset ( ) ;
2018-04-07 12:19:49 +02:00
vram_texture = nullptr ;
2017-10-23 14:39:24 +02:00
2017-09-21 15:24:42 +02:00
if ( ! m_fence . is_empty ( ) )
2018-10-28 14:59:39 +01:00
{
2017-09-21 15:24:42 +02:00
m_fence . destroy ( ) ;
2018-10-28 14:59:39 +01:00
}
2018-02-09 15:49:37 +01:00
2018-09-22 02:14:26 +02:00
baseclass : : on_section_resources_destroyed ( ) ;
2017-09-08 16:52:13 +02:00
}
2018-02-09 15:49:37 +01:00
2020-10-20 22:38:05 +02:00
void sync_surface_memory ( const std : : vector < cached_texture_section * > & surfaces )
{
auto rtt = gl : : as_rtt ( vram_texture ) ;
rtt - > sync_tag ( ) ;
for ( auto & surface : surfaces )
{
rtt - > inherit_surface_contents ( gl : : as_rtt ( surface - > vram_texture ) ) ;
}
}
2018-10-19 00:22:00 +02:00
bool exists ( ) const
2017-09-08 16:52:13 +02:00
{
2018-09-22 02:14:26 +02:00
return ( vram_texture ! = nullptr ) ;
2017-09-08 16:52:13 +02:00
}
2018-02-09 15:49:37 +01:00
2018-10-19 00:22:00 +02:00
bool is_managed ( ) const
{
2019-06-08 08:25:58 +02:00
return ! exists ( ) | | managed_texture ;
2018-10-19 00:22:00 +02:00
}
2018-09-22 02:14:26 +02:00
texture : : format get_format ( ) const
2017-09-08 16:52:13 +02:00
{
2018-09-22 02:14:26 +02:00
return format ;
2017-09-08 16:52:13 +02:00
}
2017-03-29 21:27:29 +02:00
2017-09-08 16:52:13 +02:00
bool is_flushed ( ) const
{
return flushed ;
}
2017-03-29 21:27:29 +02:00
2017-09-08 16:52:13 +02:00
bool is_synchronized ( ) const
{
2018-02-10 17:21:16 +01:00
return synchronized ;
2017-09-08 16:52:13 +02:00
}
2017-09-04 12:05:02 +02:00
2017-12-18 10:02:19 +01:00
void set_flushed ( bool state )
2017-09-08 16:52:13 +02:00
{
flushed = state ;
}
bool is_empty ( ) const
{
2018-09-05 22:52:33 +02:00
return vram_texture = = nullptr ;
2017-09-08 16:52:13 +02:00
}
2018-07-17 18:42:51 +02:00
gl : : texture_view * get_view ( u32 remap_encoding , const std : : pair < std : : array < u8 , 4 > , std : : array < u8 , 4 > > & remap )
2018-04-07 12:19:49 +02:00
{
2018-07-17 18:42:51 +02:00
return vram_texture - > get_view ( remap_encoding , remap ) ;
2018-04-07 12:19:49 +02:00
}
gl : : texture * get_raw_texture ( ) const
2017-09-08 16:52:13 +02:00
{
2018-04-07 12:19:49 +02:00
return managed_texture . get ( ) ;
2017-09-08 16:52:13 +02:00
}
2022-03-27 19:25:34 +02:00
gl : : render_target * get_render_target ( )
{
return gl : : as_rtt ( vram_texture ) ;
}
2018-07-17 18:42:51 +02:00
gl : : texture_view * get_raw_view ( )
2017-09-08 16:52:13 +02:00
{
2018-07-17 18:42:51 +02:00
return vram_texture - > get_view ( 0xAAE4 , rsx : : default_remap_vector ) ;
2017-09-08 16:52:13 +02:00
}
bool is_depth_texture ( ) const
{
2021-12-07 18:46:31 +01:00
return ! ! ( vram_texture - > aspect ( ) & gl : : image_aspect : : depth ) ;
2017-09-08 16:52:13 +02:00
}
bool has_compatible_format ( gl : : texture * tex ) const
{
2018-04-07 12:19:49 +02:00
//TODO
return ( tex - > get_internal_format ( ) = = vram_texture - > get_internal_format ( ) ) ;
}
} ;
2018-10-28 14:59:39 +01:00
class texture_cache : public rsx : : texture_cache < gl : : texture_cache , gl : : texture_cache_traits >
2018-04-07 12:19:49 +02:00
{
private :
2018-10-28 14:59:39 +01:00
using baseclass = rsx : : texture_cache < gl : : texture_cache , gl : : texture_cache_traits > ;
friend baseclass ;
2018-04-07 12:19:49 +02:00
2018-10-28 14:59:39 +01:00
private :
2018-04-07 12:19:49 +02:00
struct discardable_storage
{
std : : unique_ptr < gl : : texture > image ;
std : : unique_ptr < gl : : texture_view > view ;
2017-09-08 16:52:13 +02:00
2019-06-08 09:33:48 +02:00
discardable_storage ( ) = default ;
2018-04-07 12:19:49 +02:00
discardable_storage ( std : : unique_ptr < gl : : texture > & tex )
2017-09-04 12:05:02 +02:00
{
2018-04-07 12:19:49 +02:00
image = std : : move ( tex ) ;
}
2018-02-15 19:37:12 +01:00
2018-04-07 12:19:49 +02:00
discardable_storage ( std : : unique_ptr < gl : : texture_view > & _view )
{
view = std : : move ( _view ) ;
2017-09-04 12:05:02 +02:00
}
2017-03-29 21:27:29 +02:00
2018-04-07 12:19:49 +02:00
discardable_storage ( std : : unique_ptr < gl : : texture > & tex , std : : unique_ptr < gl : : texture_view > & _view )
{
image = std : : move ( tex ) ;
view = std : : move ( _view ) ;
}
} ;
2018-02-09 15:49:37 +01:00
2017-02-13 15:22:25 +01:00
private :
2017-03-29 21:27:29 +02:00
blitter m_hw_blitter ;
2018-04-07 12:19:49 +02:00
std : : vector < discardable_storage > m_temporary_surfaces ;
2017-02-16 19:29:56 +01:00
2017-02-13 15:22:25 +01:00
void clear ( )
2016-02-15 10:50:14 +01:00
{
2018-09-22 02:14:26 +02:00
baseclass : : clear ( ) ;
2017-09-08 16:52:13 +02:00
clear_temporary_subresources ( ) ;
2016-02-15 10:50:14 +01:00
}
2018-02-09 15:49:37 +01:00
2017-09-08 16:52:13 +02:00
void clear_temporary_subresources ( )
2016-02-15 10:50:14 +01:00
{
2019-06-01 14:41:02 +02:00
m_temporary_surfaces . clear ( ) ;
2017-02-13 15:22:25 +01:00
}
2019-03-29 20:04:54 +01:00
gl : : texture_view * create_temporary_subresource_impl ( gl : : command_context & cmd , gl : : texture * src , GLenum sized_internal_fmt , GLenum dst_type , u32 gcm_format ,
2021-03-23 20:32:50 +01:00
u16 x , u16 y , u16 width , u16 height , u16 depth , u8 mipmaps , const rsx : : texture_channel_remap_t & remap , bool copy ) ;
2017-12-07 13:08:11 +01:00
2021-05-28 00:44:07 +02:00
std : : array < GLenum , 4 > get_component_mapping ( u32 gcm_format , rsx : : component_order flags ) const
2017-12-07 13:08:11 +01:00
{
2018-04-22 21:08:53 +02:00
switch ( gcm_format )
{
case CELL_GCM_TEXTURE_DEPTH24_D8 :
case CELL_GCM_TEXTURE_DEPTH24_D8_FLOAT :
case CELL_GCM_TEXTURE_DEPTH16 :
case CELL_GCM_TEXTURE_DEPTH16_FLOAT :
//Dont bother letting this propagate
return { GL_RED , GL_RED , GL_RED , GL_RED } ;
default :
break ;
}
2017-12-07 13:08:11 +01:00
switch ( flags )
{
2021-05-28 00:44:07 +02:00
case rsx : : component_order : : default_ :
2017-12-07 13:08:11 +01:00
{
2018-04-07 12:19:49 +02:00
return gl : : get_swizzle_remap ( gcm_format ) ;
2017-12-07 13:08:11 +01:00
}
2021-05-28 00:44:07 +02:00
case rsx : : component_order : : native :
2017-12-07 13:08:11 +01:00
{
2018-04-07 12:19:49 +02:00
return { GL_ALPHA , GL_RED , GL_GREEN , GL_BLUE } ;
2017-12-07 13:08:11 +01:00
}
2021-05-28 00:44:07 +02:00
case rsx : : component_order : : swapped_native :
2017-12-07 13:08:11 +01:00
{
2018-04-07 12:19:49 +02:00
return { GL_BLUE , GL_ALPHA , GL_RED , GL_GREEN } ;
2017-12-07 13:08:11 +01:00
}
2018-04-07 12:19:49 +02:00
default :
2020-12-09 16:04:52 +01:00
fmt : : throw_exception ( " Unknown texture create flags " ) ;
2017-12-07 13:08:11 +01:00
}
}
2018-02-09 15:49:37 +01:00
2020-12-18 15:43:34 +01:00
void copy_transfer_regions_impl ( gl : : command_context & cmd , gl : : texture * dst_image , const std : : vector < copy_region_descriptor > & sources ) const ;
2019-02-02 20:44:18 +01:00
2019-02-25 16:03:14 +01:00
gl : : texture * get_template_from_collection_impl ( const std : : vector < copy_region_descriptor > & sections_to_transfer ) const
{
2020-02-05 08:00:08 +01:00
if ( sections_to_transfer . size ( ) = = 1 ) [ [ likely ] ]
2019-02-26 11:31:32 +01:00
{
return sections_to_transfer . front ( ) . src ;
}
2019-02-25 16:03:14 +01:00
gl : : texture * result = nullptr ;
for ( const auto & section : sections_to_transfer )
{
if ( ! section . src )
continue ;
if ( ! result )
{
result = section . src ;
}
else
{
const auto set1 = result - > get_native_component_layout ( ) ;
const auto set2 = section . src - > get_native_component_layout ( ) ;
if ( set1 [ 0 ] ! = set2 [ 0 ] | |
set1 [ 1 ] ! = set2 [ 1 ] | |
set1 [ 2 ] ! = set2 [ 2 ] | |
set1 [ 3 ] ! = set2 [ 3 ] )
{
// TODO
// This requires a far more complex setup as its not always possible to mix and match without compute assistance
return nullptr ;
}
}
}
return result ;
}
2017-09-08 16:52:13 +02:00
protected :
2018-02-09 15:49:37 +01:00
2019-03-29 20:04:54 +01:00
gl : : texture_view * create_temporary_subresource_view ( gl : : command_context & cmd , gl : : texture * * src , u32 gcm_format , u16 x , u16 y , u16 w , u16 h ,
2019-10-08 17:07:36 +02:00
const rsx : : texture_channel_remap_t & remap_vector ) override
2017-02-13 15:22:25 +01:00
{
2020-08-09 15:28:13 +02:00
return create_temporary_subresource_impl ( cmd , * src , GL_NONE , GL_TEXTURE_2D , gcm_format , x , y , w , h , 1 , 1 , remap_vector , true ) ;
2016-02-15 10:50:14 +01:00
}
2019-03-29 20:04:54 +01:00
gl : : texture_view * create_temporary_subresource_view ( gl : : command_context & cmd , gl : : texture * src , u32 gcm_format , u16 x , u16 y , u16 w , u16 h ,
2019-10-08 17:07:36 +02:00
const rsx : : texture_channel_remap_t & remap_vector ) override
2017-08-13 23:27:19 +02:00
{
2019-12-03 23:34:23 +01:00
return create_temporary_subresource_impl ( cmd , src , static_cast < GLenum > ( src - > get_internal_format ( ) ) ,
2020-08-09 15:28:13 +02:00
GL_TEXTURE_2D , gcm_format , x , y , w , h , 1 , 1 , remap_vector , true ) ;
2017-08-13 23:27:19 +02:00
}
2020-08-09 15:28:13 +02:00
gl : : texture_view * generate_cubemap_from_images ( gl : : command_context & cmd , u32 gcm_format , u16 size , const std : : vector < copy_region_descriptor > & sources , const rsx : : texture_channel_remap_t & remap_vector ) override
2017-11-02 16:54:57 +01:00
{
2020-08-09 15:28:13 +02:00
auto _template = get_template_from_collection_impl ( sources ) ;
auto result = create_temporary_subresource_impl ( cmd , _template , GL_NONE , GL_TEXTURE_3D , gcm_format , 0 , 0 , size , size , 1 , 1 , remap_vector , false ) ;
2017-11-02 19:54:19 +01:00
2020-08-09 15:28:13 +02:00
copy_transfer_regions_impl ( cmd , result - > image ( ) , sources ) ;
2018-04-07 12:19:49 +02:00
return result ;
2018-03-30 12:28:46 +02:00
}
2020-08-09 15:28:13 +02:00
gl : : texture_view * generate_3d_from_2d_images ( gl : : command_context & cmd , u32 gcm_format , u16 width , u16 height , u16 depth , const std : : vector < copy_region_descriptor > & sources , const rsx : : texture_channel_remap_t & remap_vector ) override
2018-03-30 12:28:46 +02:00
{
2020-08-09 15:28:13 +02:00
auto _template = get_template_from_collection_impl ( sources ) ;
auto result = create_temporary_subresource_impl ( cmd , _template , GL_NONE , GL_TEXTURE_3D , gcm_format , 0 , 0 , width , height , depth , 1 , remap_vector , false ) ;
2017-11-02 19:54:19 +01:00
2020-08-09 15:28:13 +02:00
copy_transfer_regions_impl ( cmd , result - > image ( ) , sources ) ;
2018-04-07 12:19:49 +02:00
return result ;
2017-11-02 16:54:57 +01:00
}
2019-02-02 20:44:18 +01:00
gl : : texture_view * generate_atlas_from_images ( gl : : command_context & cmd , u32 gcm_format , u16 width , u16 height , const std : : vector < copy_region_descriptor > & sections_to_copy ,
2019-10-08 17:07:36 +02:00
const rsx : : texture_channel_remap_t & remap_vector ) override
2018-02-21 11:46:23 +01:00
{
2019-02-26 11:31:32 +01:00
auto _template = get_template_from_collection_impl ( sections_to_copy ) ;
2020-08-09 15:28:13 +02:00
auto result = create_temporary_subresource_impl ( cmd , _template , GL_NONE , GL_TEXTURE_2D , gcm_format , 0 , 0 , width , height , 1 , 1 , remap_vector , false ) ;
2018-02-21 11:46:23 +01:00
2019-02-02 20:44:18 +01:00
copy_transfer_regions_impl ( cmd , result - > image ( ) , sections_to_copy ) ;
2018-02-21 11:46:23 +01:00
return result ;
}
2020-08-09 15:28:13 +02:00
gl : : texture_view * generate_2d_mipmaps_from_images ( gl : : command_context & cmd , u32 gcm_format , u16 width , u16 height , const std : : vector < copy_region_descriptor > & sections_to_copy ,
2019-10-09 21:29:23 +02:00
const rsx : : texture_channel_remap_t & remap_vector ) override
{
2020-08-09 15:28:13 +02:00
const auto mipmaps = : : narrow < u8 > ( sections_to_copy . size ( ) ) ;
auto _template = get_template_from_collection_impl ( sections_to_copy ) ;
auto result = create_temporary_subresource_impl ( cmd , _template , GL_NONE , GL_TEXTURE_2D , gcm_format , 0 , 0 , width , height , 1 , mipmaps , remap_vector , false ) ;
2019-10-09 21:29:23 +02:00
2020-08-09 15:28:13 +02:00
copy_transfer_regions_impl ( cmd , result - > image ( ) , sections_to_copy ) ;
return result ;
2019-10-09 21:29:23 +02:00
}
2019-10-17 20:35:04 +02:00
void release_temporary_subresource ( gl : : texture_view * view ) override
{
for ( auto & e : m_temporary_surfaces )
{
if ( e . image . get ( ) = = view - > image ( ) )
{
e . view . reset ( ) ;
e . image . reset ( ) ;
return ;
}
}
}
2019-04-05 13:39:43 +02:00
void update_image_contents ( gl : : command_context & cmd , gl : : texture_view * dst , gl : : texture * src , u16 width , u16 height ) override
2018-02-21 11:46:23 +01:00
{
2019-04-05 13:39:43 +02:00
std : : vector < copy_region_descriptor > region =
{ {
src ,
2019-10-08 17:07:36 +02:00
rsx : : surface_transform : : identity ,
2019-10-09 21:29:23 +02:00
0 ,
2019-04-05 13:39:43 +02:00
0 , 0 , 0 , 0 , 0 ,
width , height , width , height
} } ;
copy_transfer_regions_impl ( cmd , dst - > image ( ) , region ) ;
2018-02-21 11:46:23 +01:00
}
2022-01-09 21:07:18 +01:00
cached_texture_section * create_new_texture ( gl : : command_context & cmd , const utils : : address_range & rsx_range , u16 width , u16 height , u16 depth , u16 mipmaps , u32 pitch ,
2021-05-28 00:44:07 +02:00
u32 gcm_format , rsx : : texture_upload_context context , rsx : : texture_dimension_extended type , bool swizzled , rsx : : component_order swizzle_flags , rsx : : flags32_t /*flags*/ ) override
2016-02-15 10:50:14 +01:00
{
2021-01-26 21:46:32 +01:00
const rsx : : image_section_attributes_t search_desc = { . gcm_format = gcm_format , . width = width , . height = height , . depth = depth , . mipmaps = mipmaps } ;
const bool allow_dirty = ( context ! = rsx : : texture_upload_context : : framebuffer_storage ) ;
auto & cached = * find_cached_texture ( rsx_range , search_desc , true , true , allow_dirty ) ;
ensure ( ! cached . is_locked ( ) ) ;
2018-07-17 18:42:51 +02:00
2021-01-26 21:46:32 +01:00
gl : : viewable_image * image = nullptr ;
if ( cached . exists ( ) )
{
// Try and reuse this image data. It is very likely to match our needs
image = dynamic_cast < gl : : viewable_image * > ( cached . get_raw_texture ( ) ) ;
2021-01-27 17:36:33 +01:00
if ( ! image | | cached . get_image_type ( ) ! = type )
2021-01-26 21:46:32 +01:00
{
// Type mismatch, discard
cached . destroy ( ) ;
image = nullptr ;
}
else
{
2021-01-27 17:36:33 +01:00
ensure ( cached . is_managed ( ) ) ;
2021-01-26 21:46:32 +01:00
cached . set_dimensions ( width , height , depth , pitch ) ;
cached . set_format ( texture : : format : : rgba , texture : : type : : ubyte , true ) ;
// Clear the image before use if it is not going to be uploaded wholly from CPU
if ( context ! = rsx : : texture_upload_context : : shader_read )
{
if ( image - > format_class ( ) = = RSX_FORMAT_CLASS_COLOR )
{
g_hw_blitter - > fast_clear_image ( cmd , image , color4f { } ) ;
}
else
{
g_hw_blitter - > fast_clear_image ( cmd , image , 1.f , 0 ) ;
}
}
}
}
if ( ! image )
{
ensure ( ! cached . exists ( ) ) ;
image = gl : : create_texture ( gcm_format , width , height , depth , mipmaps , type ) ;
// Prepare section
cached . reset ( rsx_range ) ;
cached . set_image_type ( type ) ;
cached . set_gcm_format ( gcm_format ) ;
cached . create ( width , height , depth , mipmaps , image , pitch , true ) ;
}
2018-09-22 02:14:26 +02:00
2021-05-28 00:44:07 +02:00
cached . set_view_flags ( swizzle_flags ) ;
2017-09-18 19:22:34 +02:00
cached . set_context ( context ) ;
2020-08-01 13:27:13 +02:00
cached . set_swizzled ( swizzled ) ;
2018-09-22 02:14:26 +02:00
cached . set_dirty ( false ) ;
2016-03-11 20:25:49 +01:00
2021-05-28 00:44:07 +02:00
const auto swizzle = get_component_mapping ( gcm_format , swizzle_flags ) ;
2021-01-26 21:46:32 +01:00
image - > set_native_component_layout ( swizzle ) ;
2018-02-03 09:37:42 +01:00
if ( context ! = rsx : : texture_upload_context : : blit_engine_dst )
2017-10-28 21:17:27 +02:00
{
2018-10-19 00:22:00 +02:00
AUDIT ( cached . get_memory_read_flags ( ) ! = rsx : : memory_read_flags : : flush_always ) ;
2018-09-22 02:14:26 +02:00
read_only_range = cached . get_min_max ( read_only_range , rsx : : section_bounds : : locked_range ) ; // TODO ruipin: This was outside the if, but is inside the if in Vulkan. Ask kd-11
2017-09-19 14:46:16 +02:00
cached . protect ( utils : : protection : : ro ) ;
2018-02-03 09:37:42 +01:00
}
else
{
//TODO: More tests on byte order
//ARGB8+native+unswizzled is confirmed with Dark Souls II character preview
2018-02-10 17:21:16 +01:00
switch ( gcm_format )
{
case CELL_GCM_TEXTURE_A8R8G8B8 :
2018-02-03 09:37:42 +01:00
{
2019-01-15 12:31:35 +01:00
cached . set_format ( gl : : texture : : format : : bgra , gl : : texture : : type : : uint_8_8_8_8 , false ) ;
2018-02-10 17:21:16 +01:00
break ;
2018-02-03 09:37:42 +01:00
}
2018-02-10 17:21:16 +01:00
case CELL_GCM_TEXTURE_R5G6B5 :
2018-02-03 09:37:42 +01:00
{
cached . set_format ( gl : : texture : : format : : rgb , gl : : texture : : type : : ushort_5_6_5 , true ) ;
2018-02-10 17:21:16 +01:00
break ;
}
case CELL_GCM_TEXTURE_DEPTH24_D8 :
{
2019-09-07 17:31:22 +02:00
cached . set_format ( gl : : texture : : format : : depth_stencil , gl : : texture : : type : : uint_24_8 , false ) ;
2018-02-10 17:21:16 +01:00
break ;
}
case CELL_GCM_TEXTURE_DEPTH16 :
{
cached . set_format ( gl : : texture : : format : : depth , gl : : texture : : type : : ushort , true ) ;
break ;
}
default :
2020-12-09 16:04:52 +01:00
fmt : : throw_exception ( " Unexpected gcm format 0x%X " , gcm_format ) ;
2018-02-03 09:37:42 +01:00
}
2018-05-10 13:50:32 +02:00
//NOTE: Protection is handled by the caller
2018-09-22 02:14:26 +02:00
cached . set_dimensions ( width , height , depth , ( rsx_range . length ( ) / height ) ) ;
no_access_range = cached . get_min_max ( no_access_range , rsx : : section_bounds : : locked_range ) ;
2017-10-28 21:17:27 +02:00
}
2017-09-19 14:46:16 +02:00
2018-02-03 09:37:42 +01:00
update_cache_tag ( ) ;
2017-09-08 16:52:13 +02:00
return & cached ;
2016-02-15 10:50:14 +01:00
}
2019-12-26 21:01:48 +01:00
cached_texture_section * create_nul_section ( gl : : command_context & /*cmd*/ , const utils : : address_range & rsx_range , bool /*memory_load*/ ) override
2019-09-04 21:19:58 +02:00
{
2021-01-26 21:46:32 +01:00
auto & cached = * find_cached_texture ( rsx_range , { . gcm_format = RSX_GCM_FORMAT_IGNORED } , true , false , false ) ;
2020-12-09 16:04:52 +01:00
ensure ( ! cached . is_locked ( ) ) ;
2019-09-04 21:19:58 +02:00
// Prepare section
cached . reset ( rsx_range ) ;
cached . set_context ( rsx : : texture_upload_context : : dma ) ;
cached . set_dirty ( false ) ;
no_access_range = cached . get_min_max ( no_access_range , rsx : : section_bounds : : locked_range ) ;
update_cache_tag ( ) ;
return & cached ;
}
2022-01-09 21:07:18 +01:00
cached_texture_section * upload_image_from_cpu ( gl : : command_context & cmd , const utils : : address_range & rsx_range , u16 width , u16 height , u16 depth , u16 mipmaps , u32 pitch , u32 gcm_format ,
2020-08-15 23:33:34 +02:00
rsx : : texture_upload_context context , const std : : vector < rsx : : subresource_layout > & subresource_layout , rsx : : texture_dimension_extended type , bool input_swizzled ) override
2016-02-15 10:50:14 +01:00
{
2020-08-01 13:27:13 +02:00
auto section = create_new_texture ( cmd , rsx_range , width , height , depth , mipmaps , pitch , gcm_format , context , type , input_swizzled ,
2021-05-28 00:44:07 +02:00
rsx : : component_order : : default_ , 0 ) ;
2017-09-14 13:37:14 +02:00
2020-09-06 17:17:08 +02:00
gl : : upload_texture ( section - > get_raw_texture ( ) , gcm_format , input_swizzled , subresource_layout ) ;
2018-07-17 18:42:51 +02:00
2019-02-25 16:03:14 +01:00
section - > last_write_tag = rsx : : get_shared_tag ( ) ;
2017-09-08 16:52:13 +02:00
return section ;
2017-02-16 19:29:56 +01:00
}
2021-05-28 00:44:07 +02:00
void set_component_order ( cached_texture_section & section , u32 gcm_format , rsx : : component_order flags ) override
2017-02-16 19:29:56 +01:00
{
2017-09-14 13:37:14 +02:00
if ( flags = = section . get_view_flags ( ) )
return ;
2018-07-17 18:42:51 +02:00
const auto swizzle = get_component_mapping ( gcm_format , flags ) ;
2018-09-18 20:06:34 +02:00
auto image = static_cast < gl : : viewable_image * > ( section . get_raw_texture ( ) ) ;
2020-12-09 08:47:45 +01:00
ensure ( image ) ;
2018-09-18 20:06:34 +02:00
image - > set_native_component_layout ( swizzle ) ;
2017-09-14 13:37:14 +02:00
section . set_view_flags ( flags ) ;
2017-12-07 10:09:07 +01:00
}
2020-12-14 18:24:28 +01:00
void insert_texture_barrier ( gl : : command_context & , gl : : texture * , bool ) override
2017-02-13 15:22:25 +01:00
{
2017-09-08 16:52:13 +02:00
auto & caps = gl : : get_driver_caps ( ) ;
2016-02-15 10:50:14 +01:00
2017-09-08 16:52:13 +02:00
if ( caps . ARB_texture_barrier_supported )
glTextureBarrier ( ) ;
else if ( caps . NV_texture_barrier_supported )
glTextureBarrierNV ( ) ;
2017-08-07 23:54:40 +02:00
}
2018-02-23 20:49:59 +01:00
bool render_target_format_is_compatible ( gl : : texture * tex , u32 gcm_format ) override
{
2018-04-07 12:19:49 +02:00
auto ifmt = tex - > get_internal_format ( ) ;
switch ( gcm_format )
2018-02-23 20:49:59 +01:00
{
2018-04-07 12:19:49 +02:00
default :
//TODO
2022-05-15 14:24:52 +02:00
// warn_once("Format incompatibility detected, reporting failure to force data copy (GL_INTERNAL_FORMAT=0x%X, GCM_FORMAT=0x%X)", static_cast<u32>(ifmt), gcm_format);
2018-04-07 12:19:49 +02:00
return false ;
case CELL_GCM_TEXTURE_W16_Z16_Y16_X16_FLOAT :
return ( ifmt = = gl : : texture : : internal_format : : rgba16f ) ;
case CELL_GCM_TEXTURE_W32_Z32_Y32_X32_FLOAT :
return ( ifmt = = gl : : texture : : internal_format : : rgba32f ) ;
case CELL_GCM_TEXTURE_X32_FLOAT :
return ( ifmt = = gl : : texture : : internal_format : : r32f ) ;
case CELL_GCM_TEXTURE_R5G6B5 :
2019-08-13 15:29:30 +02:00
return ( ifmt = = gl : : texture : : internal_format : : rgb565 ) ;
2018-04-07 12:19:49 +02:00
case CELL_GCM_TEXTURE_A8R8G8B8 :
2020-06-21 15:41:26 +02:00
case CELL_GCM_TEXTURE_D8R8G8B8 :
2018-04-07 12:19:49 +02:00
return ( ifmt = = gl : : texture : : internal_format : : rgba8 | |
ifmt = = gl : : texture : : internal_format : : depth24_stencil8 | |
ifmt = = gl : : texture : : internal_format : : depth32f_stencil8 ) ;
case CELL_GCM_TEXTURE_B8 :
return ( ifmt = = gl : : texture : : internal_format : : r8 ) ;
case CELL_GCM_TEXTURE_G8B8 :
return ( ifmt = = gl : : texture : : internal_format : : rg8 ) ;
case CELL_GCM_TEXTURE_DEPTH24_D8 :
case CELL_GCM_TEXTURE_DEPTH24_D8_FLOAT :
return ( ifmt = = gl : : texture : : internal_format : : depth24_stencil8 | |
2020-08-15 13:07:18 +02:00
ifmt = = gl : : texture : : internal_format : : depth32f_stencil8 ) ;
2019-02-25 16:03:14 +01:00
case CELL_GCM_TEXTURE_X16 :
2018-04-07 12:19:49 +02:00
case CELL_GCM_TEXTURE_DEPTH16 :
case CELL_GCM_TEXTURE_DEPTH16_FLOAT :
return ( ifmt = = gl : : texture : : internal_format : : depth16 | |
2020-08-15 13:07:18 +02:00
ifmt = = gl : : texture : : internal_format : : depth32f ) ;
2018-02-23 20:49:59 +01:00
}
}
2019-03-16 10:14:11 +01:00
void prepare_for_dma_transfers ( gl : : command_context & ) override
{ }
void cleanup_after_dma_transfers ( gl : : command_context & ) override
{ }
2017-09-08 16:52:13 +02:00
public :
2016-02-15 10:50:14 +01:00
2018-09-22 02:14:26 +02:00
using baseclass : : texture_cache ;
2016-02-15 10:50:14 +01:00
2017-09-08 16:52:13 +02:00
void initialize ( )
2017-02-16 19:29:56 +01:00
{
2017-09-08 16:52:13 +02:00
m_hw_blitter . init ( ) ;
2017-09-28 20:32:00 +02:00
g_hw_blitter = & m_hw_blitter ;
2016-02-15 10:50:14 +01:00
}
2017-03-29 21:27:29 +02:00
2017-09-08 16:52:13 +02:00
void destroy ( ) override
2017-08-07 23:54:40 +02:00
{
2017-09-08 16:52:13 +02:00
clear ( ) ;
2017-09-28 20:32:00 +02:00
g_hw_blitter = nullptr ;
2017-09-08 16:52:13 +02:00
m_hw_blitter . destroy ( ) ;
2017-08-07 23:54:40 +02:00
}
2017-09-28 20:32:00 +02:00
2017-12-18 10:02:19 +01:00
bool is_depth_texture ( u32 rsx_address , u32 rsx_size ) override
2017-09-04 12:05:02 +02:00
{
2017-09-14 13:37:14 +02:00
reader_lock lock ( m_cache_mutex ) ;
2018-09-22 02:14:26 +02:00
auto & block = m_storage . block_for ( rsx_address ) ;
2017-09-18 19:22:34 +02:00
2018-09-22 02:14:26 +02:00
if ( block . get_locked_count ( ) = = 0 )
return false ;
2017-09-18 19:22:34 +02:00
2018-09-22 02:14:26 +02:00
for ( auto & tex : block )
2017-09-18 19:22:34 +02:00
{
if ( tex . is_dirty ( ) )
continue ;
2018-09-22 02:14:26 +02:00
if ( ! tex . overlaps ( rsx_address , rsx : : section_bounds : : full_range ) )
2017-09-19 14:46:16 +02:00
continue ;
if ( ( rsx_address + rsx_size - tex . get_section_base ( ) ) < = tex . get_section_size ( ) )
return tex . is_depth_texture ( ) ;
}
2017-09-04 12:05:02 +02:00
return false ;
}
2017-09-08 16:52:13 +02:00
void on_frame_end ( ) override
2017-03-29 21:27:29 +02:00
{
2021-01-25 17:31:00 +01:00
trim_sections ( ) ;
2018-09-22 02:14:26 +02:00
if ( m_storage . m_unreleased_texture_objects > = m_max_zombie_objects )
2017-09-04 12:05:02 +02:00
{
2018-09-22 02:14:26 +02:00
purge_unreleased_sections ( ) ;
2017-09-04 12:05:02 +02:00
}
2018-02-09 15:49:37 +01:00
2017-09-08 16:52:13 +02:00
clear_temporary_subresources ( ) ;
2018-10-28 14:59:39 +01:00
baseclass : : on_frame_end ( ) ;
2017-09-08 16:52:13 +02:00
}
2017-03-29 21:27:29 +02:00
2018-12-29 14:28:12 +01:00
bool blit ( gl : : command_context & cmd , rsx : : blit_src_info & src , rsx : : blit_dst_info & dst , bool linear_interpolate , gl_render_targets & m_rtts )
2017-09-08 16:52:13 +02:00
{
2018-12-29 14:28:12 +01:00
auto result = upload_scaled_image ( src , dst , linear_interpolate , cmd , m_rtts , m_hw_blitter ) ;
2018-02-10 17:21:16 +01:00
if ( result . succeeded )
{
2018-02-12 09:26:57 +01:00
if ( result . real_dst_size )
2018-02-10 17:21:16 +01:00
{
2018-12-29 14:28:12 +01:00
flush_if_cache_miss_likely ( cmd , result . to_address_range ( ) ) ;
2018-02-10 17:21:16 +01:00
}
return true ;
}
return false ;
2017-09-08 16:52:13 +02:00
}
2016-02-15 10:50:14 +01:00
} ;
2017-04-04 18:14:36 +02:00
}