2020-12-05 13:08:24 +01:00
# include "stdafx.h"
2016-09-18 07:19:26 +02:00
# include "GLTexture.h"
2019-10-02 02:47:19 +02:00
# include "GLCompute.h"
2020-08-10 21:13:59 +02:00
# include "GLRenderTargets.h"
2022-05-31 00:47:55 +02:00
# include "GLOverlays.h"
2015-12-21 03:14:56 +01:00
# include "../GCM.h"
# include "../RSXThread.h"
# include "../RSXTexture.h"
2020-12-18 15:43:34 +01:00
# include "util/asm.hpp"
2017-02-16 19:29:56 +01:00
namespace gl
2016-03-05 16:55:17 +01:00
{
2021-06-05 16:27:03 +02:00
namespace debug
{
extern void set_vis_texture ( texture * ) ;
}
2019-10-12 00:05:05 +02:00
buffer g_typeless_transfer_buffer ;
2022-05-27 19:02:45 +02:00
buffer g_upload_transfer_buffer ;
buffer g_compute_decode_buffer ;
2022-05-28 18:38:29 +02:00
buffer g_deswizzle_scratch_buffer ;
2022-05-27 19:02:45 +02:00
std : : pair < buffer * , buffer * > prepare_compute_resources ( usz staging_data_length )
{
2022-05-27 22:42:57 +02:00
if ( g_upload_transfer_buffer . size ( ) < static_cast < GLsizeiptr > ( staging_data_length ) )
2022-05-27 19:02:45 +02:00
{
g_upload_transfer_buffer . remove ( ) ;
2022-05-28 18:38:29 +02:00
g_upload_transfer_buffer . create ( gl : : buffer : : target : : pixel_unpack , staging_data_length , nullptr , buffer : : memory_type : : host_visible , GL_STREAM_DRAW ) ;
2022-05-27 19:02:45 +02:00
}
2022-05-27 22:42:57 +02:00
if ( g_compute_decode_buffer . size ( ) < static_cast < GLsizeiptr > ( staging_data_length ) * 3 )
2022-05-27 19:02:45 +02:00
{
g_compute_decode_buffer . remove ( ) ;
2022-05-28 18:38:29 +02:00
g_compute_decode_buffer . create ( gl : : buffer : : target : : pixel_pack , std : : max < GLsizeiptr > ( 512 , staging_data_length * 3 ) , nullptr , buffer : : memory_type : : local , GL_STATIC_COPY ) ;
2022-05-27 19:02:45 +02:00
}
return { & g_upload_transfer_buffer , & g_compute_decode_buffer } ;
}
void destroy_global_texture_resources ( )
{
g_typeless_transfer_buffer . remove ( ) ;
g_upload_transfer_buffer . remove ( ) ;
g_compute_decode_buffer . remove ( ) ;
2022-05-28 18:38:29 +02:00
g_deswizzle_scratch_buffer . remove ( ) ;
2022-05-27 19:02:45 +02:00
}
2018-04-07 17:16:52 +02:00
2022-05-27 22:42:57 +02:00
template < typename WordType , bool SwapBytes >
void do_deswizzle_transformation ( gl : : command_context & cmd , u32 block_size , buffer * dst , buffer * src , u32 data_length , u16 width , u16 height , u16 depth )
{
switch ( block_size )
{
case 4 :
gl : : get_compute_task < gl : : cs_deswizzle_3d < u32 , WordType , SwapBytes > > ( ) - > run (
cmd , dst , 0 , src , 0 ,
data_length , width , height , depth , 1 ) ;
break ;
case 8 :
gl : : get_compute_task < gl : : cs_deswizzle_3d < u64 , WordType , SwapBytes > > ( ) - > run (
cmd , dst , 0 , src , 0 ,
data_length , width , height , depth , 1 ) ;
break ;
case 16 :
gl : : get_compute_task < gl : : cs_deswizzle_3d < u128 , WordType , SwapBytes > > ( ) - > run (
cmd , dst , 0 , src , 0 ,
data_length , width , height , depth , 1 ) ;
break ;
default :
fmt : : throw_exception ( " Unreachable " ) ;
}
}
2018-01-31 17:11:03 +01:00
GLenum get_target ( rsx : : texture_dimension_extended type )
{
switch ( type )
{
case rsx : : texture_dimension_extended : : texture_dimension_1d : return GL_TEXTURE_1D ;
case rsx : : texture_dimension_extended : : texture_dimension_2d : return GL_TEXTURE_2D ;
case rsx : : texture_dimension_extended : : texture_dimension_cubemap : return GL_TEXTURE_CUBE_MAP ;
case rsx : : texture_dimension_extended : : texture_dimension_3d : return GL_TEXTURE_3D ;
}
2020-12-09 16:04:52 +01:00
fmt : : throw_exception ( " Unknown texture target " ) ;
2018-01-31 17:11:03 +01:00
}
2016-03-13 20:11:19 +01:00
GLenum get_sized_internal_format ( u32 texture_format )
2016-03-05 16:55:17 +01:00
{
switch ( texture_format )
{
2016-03-13 20:11:19 +01:00
case CELL_GCM_TEXTURE_B8 : return GL_R8 ;
case CELL_GCM_TEXTURE_A1R5G5B5 : return GL_RGB5_A1 ;
case CELL_GCM_TEXTURE_A4R4G4B4 : return GL_RGBA4 ;
case CELL_GCM_TEXTURE_R5G6B5 : return GL_RGB565 ;
case CELL_GCM_TEXTURE_A8R8G8B8 : return GL_RGBA8 ;
case CELL_GCM_TEXTURE_G8B8 : return GL_RG8 ;
case CELL_GCM_TEXTURE_R6G5B5 : return GL_RGB565 ;
2017-09-19 14:46:16 +02:00
case CELL_GCM_TEXTURE_DEPTH24_D8 : return GL_DEPTH24_STENCIL8 ;
2020-08-15 11:20:43 +02:00
case CELL_GCM_TEXTURE_DEPTH24_D8_FLOAT : return GL_DEPTH32F_STENCIL8 ;
2016-03-13 20:11:19 +01:00
case CELL_GCM_TEXTURE_DEPTH16 : return GL_DEPTH_COMPONENT16 ;
2020-08-15 11:20:43 +02:00
case CELL_GCM_TEXTURE_DEPTH16_FLOAT : return GL_DEPTH_COMPONENT32F ;
2016-03-13 20:11:19 +01:00
case CELL_GCM_TEXTURE_X16 : return GL_R16 ;
case CELL_GCM_TEXTURE_Y16_X16 : return GL_RG16 ;
case CELL_GCM_TEXTURE_R5G5B5A1 : return GL_RGB5_A1 ;
case CELL_GCM_TEXTURE_W16_Z16_Y16_X16_FLOAT : return GL_RGBA16F ;
case CELL_GCM_TEXTURE_W32_Z32_Y32_X32_FLOAT : return GL_RGBA32F ;
case CELL_GCM_TEXTURE_X32_FLOAT : return GL_R32F ;
case CELL_GCM_TEXTURE_D1R5G5B5 : return GL_RGB5_A1 ;
case CELL_GCM_TEXTURE_D8R8G8B8 : return GL_RGBA8 ;
case CELL_GCM_TEXTURE_Y16_X16_FLOAT : return GL_RG16F ;
case CELL_GCM_TEXTURE_COMPRESSED_DXT1 : return GL_COMPRESSED_RGBA_S3TC_DXT1_EXT ;
case CELL_GCM_TEXTURE_COMPRESSED_DXT23 : return GL_COMPRESSED_RGBA_S3TC_DXT3_EXT ;
case CELL_GCM_TEXTURE_COMPRESSED_DXT45 : return GL_COMPRESSED_RGBA_S3TC_DXT5_EXT ;
2018-01-20 12:37:46 +01:00
case CELL_GCM_TEXTURE_COMPRESSED_HILO8 : return GL_RG8 ;
case CELL_GCM_TEXTURE_COMPRESSED_HILO_S8 : return GL_RG8 ;
2019-03-30 12:33:14 +01:00
case CELL_GCM_TEXTURE_COMPRESSED_B8R8_G8R8 : return GL_RGBA8 ;
case CELL_GCM_TEXTURE_COMPRESSED_R8B8_R8G8 : return GL_RGBA8 ;
2016-03-13 20:11:19 +01:00
}
2020-12-09 16:04:52 +01:00
fmt : : throw_exception ( " Unknown texture format 0x%x " , texture_format ) ;
2016-03-13 20:11:19 +01:00
}
std : : tuple < GLenum , GLenum > get_format_type ( u32 texture_format )
{
switch ( texture_format )
{
case CELL_GCM_TEXTURE_B8 : return std : : make_tuple ( GL_RED , GL_UNSIGNED_BYTE ) ;
case CELL_GCM_TEXTURE_A1R5G5B5 : return std : : make_tuple ( GL_BGRA , GL_UNSIGNED_SHORT_1_5_5_5_REV ) ;
case CELL_GCM_TEXTURE_A4R4G4B4 : return std : : make_tuple ( GL_BGRA , GL_UNSIGNED_SHORT_4_4_4_4 ) ;
case CELL_GCM_TEXTURE_R5G6B5 : return std : : make_tuple ( GL_RGB , GL_UNSIGNED_SHORT_5_6_5 ) ;
2021-01-20 18:46:21 +01:00
case CELL_GCM_TEXTURE_A8R8G8B8 : return std : : make_tuple ( GL_BGRA , GL_UNSIGNED_INT_8_8_8_8_REV ) ;
2016-03-13 20:11:19 +01:00
case CELL_GCM_TEXTURE_G8B8 : return std : : make_tuple ( GL_RG , GL_UNSIGNED_BYTE ) ;
2018-01-20 12:37:46 +01:00
case CELL_GCM_TEXTURE_R6G5B5 : return std : : make_tuple ( GL_RGB , GL_UNSIGNED_SHORT_5_6_5 ) ;
2018-04-13 22:59:29 +02:00
case CELL_GCM_TEXTURE_DEPTH24_D8 : return std : : make_tuple ( GL_DEPTH_STENCIL , GL_UNSIGNED_INT_24_8 ) ;
2020-09-05 17:27:24 +02:00
case CELL_GCM_TEXTURE_DEPTH24_D8_FLOAT : return std : : make_tuple ( GL_DEPTH_STENCIL , GL_FLOAT_32_UNSIGNED_INT_24_8_REV ) ;
2020-08-01 13:57:28 +02:00
case CELL_GCM_TEXTURE_DEPTH16 : return std : : make_tuple ( GL_DEPTH_COMPONENT , GL_UNSIGNED_SHORT ) ;
2020-09-05 17:27:24 +02:00
case CELL_GCM_TEXTURE_DEPTH16_FLOAT : return std : : make_tuple ( GL_DEPTH_COMPONENT , GL_FLOAT ) ;
2016-03-13 20:11:19 +01:00
case CELL_GCM_TEXTURE_X16 : return std : : make_tuple ( GL_RED , GL_UNSIGNED_SHORT ) ;
case CELL_GCM_TEXTURE_Y16_X16 : return std : : make_tuple ( GL_RG , GL_UNSIGNED_SHORT ) ;
case CELL_GCM_TEXTURE_R5G5B5A1 : return std : : make_tuple ( GL_RGBA , GL_UNSIGNED_SHORT_5_5_5_1 ) ;
case CELL_GCM_TEXTURE_W16_Z16_Y16_X16_FLOAT : return std : : make_tuple ( GL_RGBA , GL_HALF_FLOAT ) ;
case CELL_GCM_TEXTURE_W32_Z32_Y32_X32_FLOAT : return std : : make_tuple ( GL_RGBA , GL_FLOAT ) ;
case CELL_GCM_TEXTURE_X32_FLOAT : return std : : make_tuple ( GL_RED , GL_FLOAT ) ;
case CELL_GCM_TEXTURE_D1R5G5B5 : return std : : make_tuple ( GL_BGRA , GL_UNSIGNED_SHORT_1_5_5_5_REV ) ;
2021-01-20 18:46:21 +01:00
case CELL_GCM_TEXTURE_D8R8G8B8 : return std : : make_tuple ( GL_BGRA , GL_UNSIGNED_INT_8_8_8_8_REV ) ;
2016-03-13 20:11:19 +01:00
case CELL_GCM_TEXTURE_Y16_X16_FLOAT : return std : : make_tuple ( GL_RG , GL_HALF_FLOAT ) ;
2017-09-14 13:37:14 +02:00
case CELL_GCM_TEXTURE_COMPRESSED_DXT1 : return std : : make_tuple ( GL_COMPRESSED_RGBA_S3TC_DXT1_EXT , GL_UNSIGNED_BYTE ) ;
case CELL_GCM_TEXTURE_COMPRESSED_DXT23 : return std : : make_tuple ( GL_COMPRESSED_RGBA_S3TC_DXT3_EXT , GL_UNSIGNED_BYTE ) ;
case CELL_GCM_TEXTURE_COMPRESSED_DXT45 : return std : : make_tuple ( GL_COMPRESSED_RGBA_S3TC_DXT5_EXT , GL_UNSIGNED_BYTE ) ;
2018-01-20 12:37:46 +01:00
case CELL_GCM_TEXTURE_COMPRESSED_HILO8 : return std : : make_tuple ( GL_RG , GL_UNSIGNED_BYTE ) ;
case CELL_GCM_TEXTURE_COMPRESSED_HILO_S8 : return std : : make_tuple ( GL_RG , GL_BYTE ) ;
2019-03-30 12:33:14 +01:00
case CELL_GCM_TEXTURE_COMPRESSED_B8R8_G8R8 : return std : : make_tuple ( GL_BGRA , GL_UNSIGNED_BYTE ) ;
case CELL_GCM_TEXTURE_COMPRESSED_R8B8_R8G8 : return std : : make_tuple ( GL_BGRA , GL_UNSIGNED_BYTE ) ;
2016-03-05 16:55:17 +01:00
}
2020-12-09 16:04:52 +01:00
fmt : : throw_exception ( " Compressed or unknown texture format 0x%x " , texture_format ) ;
2016-03-05 16:55:17 +01:00
}
2017-03-29 21:27:29 +02:00
2019-10-02 02:47:19 +02:00
pixel_buffer_layout get_format_type ( texture : : internal_format format )
2018-04-07 17:16:52 +02:00
{
switch ( format )
{
case texture : : internal_format : : compressed_rgba_s3tc_dxt1 :
case texture : : internal_format : : compressed_rgba_s3tc_dxt3 :
case texture : : internal_format : : compressed_rgba_s3tc_dxt5 :
2019-10-02 02:47:19 +02:00
return { GL_RGBA , GL_UNSIGNED_BYTE , 1 , false } ;
2018-04-07 17:16:52 +02:00
case texture : : internal_format : : r8 :
2019-10-02 02:47:19 +02:00
return { GL_RED , GL_UNSIGNED_BYTE , 1 , false } ;
2019-07-08 19:49:50 +02:00
case texture : : internal_format : : r16 :
2019-10-02 02:47:19 +02:00
return { GL_RED , GL_UNSIGNED_SHORT , 2 , true } ;
2018-04-07 17:16:52 +02:00
case texture : : internal_format : : r32f :
2019-10-02 02:47:19 +02:00
return { GL_RED , GL_FLOAT , 4 , true } ;
2018-04-07 17:16:52 +02:00
case texture : : internal_format : : rg8 :
2020-06-16 20:24:05 +02:00
return { GL_RG , GL_UNSIGNED_SHORT , 2 , true } ;
2019-04-03 10:47:16 +02:00
case texture : : internal_format : : rg16 :
2019-10-02 02:47:19 +02:00
return { GL_RG , GL_UNSIGNED_SHORT , 2 , true } ;
2019-04-03 10:47:16 +02:00
case texture : : internal_format : : rg16f :
2019-10-02 02:47:19 +02:00
return { GL_RG , GL_HALF_FLOAT , 2 , true } ;
2019-08-13 15:29:30 +02:00
case texture : : internal_format : : rgb565 :
2019-10-02 02:47:19 +02:00
return { GL_RGB , GL_UNSIGNED_SHORT_5_6_5 , 2 , true } ;
2019-08-13 15:29:30 +02:00
case texture : : internal_format : : rgb5a1 :
2019-10-02 02:47:19 +02:00
return { GL_RGB , GL_UNSIGNED_SHORT_5_5_5_1 , 2 , true } ;
2019-08-13 14:56:25 +02:00
case texture : : internal_format : : rgba4 :
2019-10-02 02:47:19 +02:00
return { GL_BGRA , GL_UNSIGNED_SHORT_4_4_4_4 , 2 , false } ;
2018-04-07 17:16:52 +02:00
case texture : : internal_format : : rgba8 :
2019-10-02 02:47:19 +02:00
return { GL_BGRA , GL_UNSIGNED_INT_8_8_8_8 , 4 , false } ;
2018-04-07 17:16:52 +02:00
case texture : : internal_format : : rgba16f :
2019-10-02 02:47:19 +02:00
return { GL_RGBA , GL_HALF_FLOAT , 2 , true } ;
2018-04-07 17:16:52 +02:00
case texture : : internal_format : : rgba32f :
2019-10-02 02:47:19 +02:00
return { GL_RGBA , GL_FLOAT , 4 , true } ;
2018-04-07 17:16:52 +02:00
case texture : : internal_format : : depth16 :
2019-10-02 02:47:19 +02:00
return { GL_DEPTH_COMPONENT , GL_UNSIGNED_SHORT , 2 , true } ;
2020-09-05 17:27:24 +02:00
case texture : : internal_format : : depth32f :
return { GL_DEPTH_COMPONENT , GL_FLOAT , 2 , true } ;
2018-04-07 17:16:52 +02:00
case texture : : internal_format : : depth24_stencil8 :
case texture : : internal_format : : depth32f_stencil8 :
2019-10-02 02:47:19 +02:00
return { GL_DEPTH_STENCIL , GL_UNSIGNED_INT_24_8 , 4 , true } ;
2018-04-07 17:16:52 +02:00
default :
2020-12-09 16:04:52 +01:00
fmt : : throw_exception ( " Unexpected internal format 0x%X " , static_cast < u32 > ( format ) ) ;
2018-04-07 17:16:52 +02:00
}
}
2020-08-10 21:13:59 +02:00
pixel_buffer_layout get_format_type ( const gl : : texture * tex )
{
const auto ifmt = tex - > get_internal_format ( ) ;
if ( ifmt = = gl : : texture : : internal_format : : rgba8 )
{
// Multiple RTT layouts can map to this format. Override ABGR formats
if ( auto rtt = dynamic_cast < const gl : : render_target * > ( tex ) )
{
switch ( rtt - > format_info . gcm_color_format )
{
case rsx : : surface_color_format : : x8b8g8r8_z8b8g8r8 :
case rsx : : surface_color_format : : x8b8g8r8_o8b8g8r8 :
case rsx : : surface_color_format : : a8b8g8r8 :
return { GL_RGBA , GL_UNSIGNED_INT_8_8_8_8 , 4 , false } ;
default :
break ;
}
}
}
2020-09-05 17:27:24 +02:00
auto ret = get_format_type ( ifmt ) ;
if ( tex - > format_class ( ) = = RSX_FORMAT_CLASS_DEPTH24_FLOAT_X8_PACK32 )
{
ret . type = GL_FLOAT_32_UNSIGNED_INT_24_8_REV ;
}
return ret ;
2020-08-10 21:13:59 +02:00
}
2018-03-13 11:46:32 +01:00
GLenum get_srgb_format ( GLenum in_format )
{
switch ( in_format )
{
case GL_COMPRESSED_RGBA_S3TC_DXT1_EXT :
return GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT ;
case GL_COMPRESSED_RGBA_S3TC_DXT3_EXT :
return GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT ;
case GL_COMPRESSED_RGBA_S3TC_DXT5_EXT :
return GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT ;
case GL_RGBA8 :
return GL_SRGB8_ALPHA8 ;
default :
2020-02-01 09:07:25 +01:00
//rsx_log.error("No gamma conversion for format 0x%X", in_format);
2018-03-13 11:46:32 +01:00
return in_format ;
}
}
2017-03-29 21:27:29 +02:00
GLenum wrap_mode ( rsx : : texture_wrap_mode wrap )
{
switch ( wrap )
{
case rsx : : texture_wrap_mode : : wrap : return GL_REPEAT ;
case rsx : : texture_wrap_mode : : mirror : return GL_MIRRORED_REPEAT ;
case rsx : : texture_wrap_mode : : clamp_to_edge : return GL_CLAMP_TO_EDGE ;
case rsx : : texture_wrap_mode : : border : return GL_CLAMP_TO_BORDER ;
case rsx : : texture_wrap_mode : : clamp : return GL_CLAMP_TO_EDGE ;
case rsx : : texture_wrap_mode : : mirror_once_clamp_to_edge : return GL_MIRROR_CLAMP_TO_EDGE_EXT ;
case rsx : : texture_wrap_mode : : mirror_once_border : return GL_MIRROR_CLAMP_TO_BORDER_EXT ;
case rsx : : texture_wrap_mode : : mirror_once_clamp : return GL_MIRROR_CLAMP_EXT ;
}
2020-02-01 09:07:25 +01:00
rsx_log . error ( " Texture wrap error: bad wrap (%d) " , static_cast < u32 > ( wrap ) ) ;
2017-03-29 21:27:29 +02:00
return GL_REPEAT ;
}
float max_aniso ( rsx : : texture_max_anisotropy aniso )
{
switch ( aniso )
{
case rsx : : texture_max_anisotropy : : x1 : return 1.0f ;
case rsx : : texture_max_anisotropy : : x2 : return 2.0f ;
case rsx : : texture_max_anisotropy : : x4 : return 4.0f ;
case rsx : : texture_max_anisotropy : : x6 : return 6.0f ;
case rsx : : texture_max_anisotropy : : x8 : return 8.0f ;
case rsx : : texture_max_anisotropy : : x10 : return 10.0f ;
case rsx : : texture_max_anisotropy : : x12 : return 12.0f ;
case rsx : : texture_max_anisotropy : : x16 : return 16.0f ;
}
2020-02-01 09:07:25 +01:00
rsx_log . error ( " Texture anisotropy error: bad max aniso (%d) " , static_cast < u32 > ( aniso ) ) ;
2017-03-29 21:27:29 +02:00
return 1.0f ;
}
int tex_min_filter ( rsx : : texture_minify_filter min_filter )
{
switch ( min_filter )
{
case rsx : : texture_minify_filter : : nearest : return GL_NEAREST ;
case rsx : : texture_minify_filter : : linear : return GL_LINEAR ;
case rsx : : texture_minify_filter : : nearest_nearest : return GL_NEAREST_MIPMAP_NEAREST ;
case rsx : : texture_minify_filter : : linear_nearest : return GL_LINEAR_MIPMAP_NEAREST ;
case rsx : : texture_minify_filter : : nearest_linear : return GL_NEAREST_MIPMAP_LINEAR ;
case rsx : : texture_minify_filter : : linear_linear : return GL_LINEAR_MIPMAP_LINEAR ;
case rsx : : texture_minify_filter : : convolution_min : return GL_LINEAR_MIPMAP_LINEAR ;
}
2020-12-09 16:04:52 +01:00
fmt : : throw_exception ( " Unknown min filter " ) ;
2017-03-29 21:27:29 +02:00
}
int tex_mag_filter ( rsx : : texture_magnify_filter mag_filter )
{
switch ( mag_filter )
{
case rsx : : texture_magnify_filter : : nearest : return GL_NEAREST ;
case rsx : : texture_magnify_filter : : linear : return GL_LINEAR ;
case rsx : : texture_magnify_filter : : convolution_mag : return GL_LINEAR ;
}
2020-12-09 16:04:52 +01:00
fmt : : throw_exception ( " Unknown mag filter " ) ;
2017-03-29 21:27:29 +02:00
}
2020-04-14 19:13:52 +02:00
// Apply sampler state settings
2018-07-09 20:31:31 +02:00
void sampler_state : : apply ( const rsx : : fragment_texture & tex , const rsx : : sampled_image_descriptor_base * sampled_image )
2017-03-29 21:27:29 +02:00
{
2019-06-18 15:49:01 +02:00
set_parameteri ( GL_TEXTURE_WRAP_S , wrap_mode ( tex . wrap_s ( ) ) ) ;
set_parameteri ( GL_TEXTURE_WRAP_T , wrap_mode ( tex . wrap_t ( ) ) ) ;
set_parameteri ( GL_TEXTURE_WRAP_R , wrap_mode ( tex . wrap_r ( ) ) ) ;
2017-03-29 21:27:29 +02:00
2019-06-18 15:49:01 +02:00
if ( const auto color = tex . border_color ( ) ;
get_parameteri ( GL_TEXTURE_BORDER_COLOR ) ! = color )
{
m_propertiesi [ GL_TEXTURE_BORDER_COLOR ] = color ;
const color4f border_color = rsx : : decode_border_color ( color ) ;
glSamplerParameterfv ( samplerHandle , GL_TEXTURE_BORDER_COLOR , border_color . rgba ) ;
}
2017-03-29 21:27:29 +02:00
2017-12-09 10:21:44 +01:00
if ( sampled_image - > upload_context ! = rsx : : texture_upload_context : : shader_read | |
2018-10-01 06:57:12 +02:00
tex . get_exact_mipmap_count ( ) = = 1 )
2017-03-29 21:27:29 +02:00
{
GLint min_filter = tex_min_filter ( tex . min_filter ( ) ) ;
if ( min_filter ! = GL_LINEAR & & min_filter ! = GL_NEAREST )
{
switch ( min_filter )
{
case GL_NEAREST_MIPMAP_NEAREST :
case GL_NEAREST_MIPMAP_LINEAR :
min_filter = GL_NEAREST ; break ;
case GL_LINEAR_MIPMAP_NEAREST :
case GL_LINEAR_MIPMAP_LINEAR :
min_filter = GL_LINEAR ; break ;
default :
2020-02-01 09:07:25 +01:00
rsx_log . error ( " No mipmap fallback defined for rsx_min_filter = 0x%X " , static_cast < u32 > ( tex . min_filter ( ) ) ) ;
2017-03-29 21:27:29 +02:00
min_filter = GL_NEAREST ;
}
}
2019-06-18 15:49:01 +02:00
set_parameteri ( GL_TEXTURE_MIN_FILTER , min_filter ) ;
set_parameterf ( GL_TEXTURE_LOD_BIAS , 0.f ) ;
set_parameterf ( GL_TEXTURE_MIN_LOD , - 1000.f ) ;
set_parameterf ( GL_TEXTURE_MAX_LOD , 1000.f ) ;
2017-03-29 21:27:29 +02:00
}
else
{
2019-06-18 15:49:01 +02:00
set_parameteri ( GL_TEXTURE_MIN_FILTER , tex_min_filter ( tex . min_filter ( ) ) ) ;
set_parameterf ( GL_TEXTURE_LOD_BIAS , tex . bias ( ) ) ;
2019-10-16 22:21:30 +02:00
set_parameterf ( GL_TEXTURE_MIN_LOD , tex . min_lod ( ) ) ;
set_parameterf ( GL_TEXTURE_MAX_LOD , tex . max_lod ( ) ) ;
2017-03-29 21:27:29 +02:00
}
2020-12-24 15:56:37 +01:00
const f32 af_level = max_aniso ( tex . max_aniso ( ) ) ;
2019-06-18 15:49:01 +02:00
set_parameterf ( GL_TEXTURE_MAX_ANISOTROPY_EXT , af_level ) ;
set_parameteri ( GL_TEXTURE_MAG_FILTER , tex_mag_filter ( tex . mag_filter ( ) ) ) ;
2017-06-12 11:42:30 +02:00
const u32 texture_format = tex . format ( ) & ~ ( CELL_GCM_TEXTURE_UN | CELL_GCM_TEXTURE_LN ) ;
2018-04-13 22:59:29 +02:00
if ( texture_format = = CELL_GCM_TEXTURE_DEPTH16 | | texture_format = = CELL_GCM_TEXTURE_DEPTH24_D8 | |
texture_format = = CELL_GCM_TEXTURE_DEPTH16_FLOAT | | texture_format = = CELL_GCM_TEXTURE_DEPTH24_D8_FLOAT )
2017-06-12 11:42:30 +02:00
{
//NOTE: The stored texture function is reversed wrt the textureProj compare function
2019-12-03 23:34:23 +01:00
GLenum compare_mode = static_cast < GLenum > ( tex . zfunc ( ) ) | GL_NEVER ;
2017-06-12 11:42:30 +02:00
switch ( compare_mode )
{
case GL_GREATER : compare_mode = GL_LESS ; break ;
case GL_GEQUAL : compare_mode = GL_LEQUAL ; break ;
case GL_LESS : compare_mode = GL_GREATER ; break ;
case GL_LEQUAL : compare_mode = GL_GEQUAL ; break ;
}
2019-06-18 15:49:01 +02:00
set_parameteri ( GL_TEXTURE_COMPARE_MODE , GL_COMPARE_REF_TO_TEXTURE ) ;
set_parameteri ( GL_TEXTURE_COMPARE_FUNC , compare_mode ) ;
2017-06-12 11:42:30 +02:00
}
else
2019-06-18 15:49:01 +02:00
set_parameteri ( GL_TEXTURE_COMPARE_MODE , GL_NONE ) ;
2017-03-29 21:27:29 +02:00
}
2016-03-05 16:55:17 +01:00
2018-07-09 20:31:31 +02:00
void sampler_state : : apply ( const rsx : : vertex_texture & tex , const rsx : : sampled_image_descriptor_base * /*sampled_image*/ )
{
2019-06-18 15:49:01 +02:00
if ( const auto color = tex . border_color ( ) ;
get_parameteri ( GL_TEXTURE_BORDER_COLOR ) ! = color )
{
m_propertiesi [ GL_TEXTURE_BORDER_COLOR ] = color ;
const color4f border_color = rsx : : decode_border_color ( color ) ;
glSamplerParameterfv ( samplerHandle , GL_TEXTURE_BORDER_COLOR , border_color . rgba ) ;
}
2018-07-09 20:31:31 +02:00
2019-06-18 15:49:01 +02:00
set_parameteri ( GL_TEXTURE_WRAP_S , wrap_mode ( tex . wrap_s ( ) ) ) ;
set_parameteri ( GL_TEXTURE_WRAP_T , wrap_mode ( tex . wrap_t ( ) ) ) ;
set_parameteri ( GL_TEXTURE_WRAP_R , wrap_mode ( tex . wrap_r ( ) ) ) ;
set_parameteri ( GL_TEXTURE_MIN_FILTER , GL_NEAREST ) ;
set_parameteri ( GL_TEXTURE_MAG_FILTER , GL_NEAREST ) ;
set_parameterf ( GL_TEXTURE_LOD_BIAS , tex . bias ( ) ) ;
2019-10-16 22:21:30 +02:00
set_parameterf ( GL_TEXTURE_MIN_LOD , tex . min_lod ( ) ) ;
set_parameterf ( GL_TEXTURE_MAX_LOD , tex . max_lod ( ) ) ;
2019-06-18 15:49:01 +02:00
set_parameteri ( GL_TEXTURE_COMPARE_MODE , GL_NONE ) ;
2018-07-09 20:31:31 +02:00
}
2019-02-04 14:42:43 +01:00
void sampler_state : : apply_defaults ( GLenum default_filter )
2018-11-24 13:54:46 +01:00
{
2019-06-18 15:49:01 +02:00
set_parameteri ( GL_TEXTURE_WRAP_S , GL_REPEAT ) ;
set_parameteri ( GL_TEXTURE_WRAP_T , GL_REPEAT ) ;
set_parameteri ( GL_TEXTURE_WRAP_R , GL_REPEAT ) ;
set_parameteri ( GL_TEXTURE_MIN_FILTER , default_filter ) ;
set_parameteri ( GL_TEXTURE_MAG_FILTER , default_filter ) ;
set_parameterf ( GL_TEXTURE_LOD_BIAS , 0.f ) ;
set_parameteri ( GL_TEXTURE_MIN_LOD , 0 ) ;
set_parameteri ( GL_TEXTURE_MAX_LOD , 0 ) ;
set_parameteri ( GL_TEXTURE_COMPARE_MODE , GL_NONE ) ;
2018-11-24 13:54:46 +01:00
}
2016-03-05 16:55:17 +01:00
std : : array < GLenum , 4 > get_swizzle_remap ( u32 texture_format )
{
// NOTE: This must be in ARGB order in all forms below.
switch ( texture_format )
{
case CELL_GCM_TEXTURE_A1R5G5B5 :
2016-06-25 14:15:10 +02:00
case CELL_GCM_TEXTURE_R5G5B5A1 :
2016-03-05 16:55:17 +01:00
case CELL_GCM_TEXTURE_R6G5B5 :
2016-06-25 14:15:10 +02:00
case CELL_GCM_TEXTURE_R5G6B5 :
2019-08-27 16:01:36 +02:00
case CELL_GCM_TEXTURE_A8R8G8B8 :
2016-03-05 16:55:17 +01:00
case CELL_GCM_TEXTURE_COMPRESSED_DXT1 :
case CELL_GCM_TEXTURE_COMPRESSED_DXT23 :
case CELL_GCM_TEXTURE_COMPRESSED_DXT45 :
2019-03-30 12:33:14 +01:00
case CELL_GCM_TEXTURE_COMPRESSED_B8R8_G8R8 :
case CELL_GCM_TEXTURE_COMPRESSED_R8B8_R8G8 :
2017-09-08 16:52:13 +02:00
return { GL_ALPHA , GL_RED , GL_GREEN , GL_BLUE } ;
2016-06-25 14:15:10 +02:00
2018-04-13 22:59:29 +02:00
case CELL_GCM_TEXTURE_DEPTH24_D8 :
case CELL_GCM_TEXTURE_DEPTH24_D8_FLOAT :
case CELL_GCM_TEXTURE_DEPTH16 :
case CELL_GCM_TEXTURE_DEPTH16_FLOAT :
return { GL_RED , GL_RED , GL_RED , GL_RED } ;
2016-09-26 14:21:17 +02:00
case CELL_GCM_TEXTURE_A4R4G4B4 :
2017-09-08 16:52:13 +02:00
return { GL_BLUE , GL_GREEN , GL_RED , GL_ALPHA } ;
2016-06-25 14:15:10 +02:00
2016-09-26 14:21:17 +02:00
case CELL_GCM_TEXTURE_B8 :
2018-01-20 12:37:46 +01:00
return { GL_ONE , GL_RED , GL_RED , GL_RED } ;
2016-09-26 14:21:17 +02:00
case CELL_GCM_TEXTURE_X16 :
2020-05-12 23:09:20 +02:00
return { GL_RED , GL_ONE , GL_RED , GL_ONE } ;
2018-01-20 12:37:46 +01:00
2016-09-26 14:21:17 +02:00
case CELL_GCM_TEXTURE_X32_FLOAT :
2017-09-08 16:52:13 +02:00
return { GL_RED , GL_RED , GL_RED , GL_RED } ;
2016-06-25 14:15:10 +02:00
2017-09-08 16:52:13 +02:00
case CELL_GCM_TEXTURE_G8B8 :
2018-01-23 12:08:43 +01:00
return { GL_GREEN , GL_RED , GL_GREEN , GL_RED } ;
2016-06-25 14:15:10 +02:00
2016-10-11 02:55:42 +02:00
case CELL_GCM_TEXTURE_Y16_X16 :
2019-03-30 12:33:14 +01:00
return { GL_GREEN , GL_RED , GL_GREEN , GL_RED } ;
2016-06-25 14:15:10 +02:00
2018-01-20 12:37:46 +01:00
case CELL_GCM_TEXTURE_Y16_X16_FLOAT :
2019-03-30 12:33:14 +01:00
return { GL_RED , GL_GREEN , GL_RED , GL_GREEN } ;
2018-01-20 12:37:46 +01:00
2016-06-25 14:15:10 +02:00
case CELL_GCM_TEXTURE_W16_Z16_Y16_X16_FLOAT :
2016-09-20 16:23:56 +02:00
case CELL_GCM_TEXTURE_W32_Z32_Y32_X32_FLOAT :
2019-03-30 12:33:14 +01:00
return { GL_ALPHA , GL_RED , GL_GREEN , GL_BLUE } ;
2016-06-25 14:15:10 +02:00
case CELL_GCM_TEXTURE_D1R5G5B5 :
2017-09-08 16:52:13 +02:00
case CELL_GCM_TEXTURE_D8R8G8B8 :
return { GL_ONE , GL_RED , GL_GREEN , GL_BLUE } ;
2016-06-25 14:15:10 +02:00
case CELL_GCM_TEXTURE_COMPRESSED_HILO8 :
2017-09-08 16:52:13 +02:00
case CELL_GCM_TEXTURE_COMPRESSED_HILO_S8 :
return { GL_RED , GL_GREEN , GL_RED , GL_GREEN } ;
2016-03-05 16:55:17 +01:00
}
2020-12-09 16:04:52 +01:00
fmt : : throw_exception ( " Unknown format 0x%x " , texture_format ) ;
2016-03-05 16:55:17 +01:00
}
2020-09-06 17:17:08 +02:00
cs_shuffle_base * get_trivial_transform_job ( const pixel_buffer_layout & pack_info )
{
if ( ! pack_info . swap_bytes )
{
return nullptr ;
}
switch ( pack_info . size )
{
case 1 :
return nullptr ;
case 2 :
return get_compute_task < gl : : cs_shuffle_16 > ( ) ;
case 4 :
return get_compute_task < gl : : cs_shuffle_32 > ( ) ;
default :
fmt : : throw_exception ( " Unsupported format " ) ;
}
}
2022-05-26 00:50:42 +02:00
void * copy_image_to_buffer ( gl : : command_context & cmd , const pixel_buffer_layout & pack_info , const gl : : texture * src , gl : : buffer * dst ,
2020-09-06 17:17:08 +02:00
const int src_level , const coord3u & src_region , image_memory_requirements * mem_info )
{
auto initialize_scratch_mem = [ & ] ( )
{
const u64 max_mem = ( mem_info - > memory_required ) ? mem_info - > memory_required : mem_info - > image_size_in_bytes ;
if ( ! ( * dst ) | | max_mem > static_cast < u64 > ( dst - > size ( ) ) )
{
if ( * dst ) dst - > remove ( ) ;
dst - > create ( buffer : : target : : pixel_pack , max_mem , nullptr , buffer : : memory_type : : local , GL_STATIC_COPY ) ;
}
dst - > bind ( buffer : : target : : pixel_pack ) ;
src - > copy_to ( nullptr , static_cast < texture : : format > ( pack_info . format ) , static_cast < texture : : type > ( pack_info . type ) , src_level , src_region , { } ) ;
} ;
void * result = nullptr ;
if ( src - > aspect ( ) = = image_aspect : : color | |
pack_info . type = = GL_UNSIGNED_SHORT | |
pack_info . type = = GL_UNSIGNED_INT_24_8 )
{
initialize_scratch_mem ( ) ;
if ( auto job = get_trivial_transform_job ( pack_info ) )
{
2022-05-26 00:50:42 +02:00
job - > run ( cmd , dst , static_cast < u32 > ( mem_info - > image_size_in_bytes ) ) ;
2020-09-06 17:17:08 +02:00
}
}
else if ( pack_info . type = = GL_FLOAT )
{
2020-12-09 08:47:45 +01:00
ensure ( mem_info - > image_size_in_bytes = = ( mem_info - > image_size_in_texels * 4 ) ) ;
2020-09-06 17:17:08 +02:00
mem_info - > memory_required = ( mem_info - > image_size_in_texels * 6 ) ;
initialize_scratch_mem ( ) ;
2022-05-26 00:50:42 +02:00
get_compute_task < cs_fconvert_task < f32 , f16 , false , true > > ( ) - > run ( cmd , dst , 0 ,
2020-09-06 17:17:08 +02:00
static_cast < u32 > ( mem_info - > image_size_in_bytes ) , static_cast < u32 > ( mem_info - > image_size_in_bytes ) ) ;
result = reinterpret_cast < void * > ( mem_info - > image_size_in_bytes ) ;
}
else if ( pack_info . type = = GL_FLOAT_32_UNSIGNED_INT_24_8_REV )
{
2020-12-09 08:47:45 +01:00
ensure ( mem_info - > image_size_in_bytes = = ( mem_info - > image_size_in_texels * 8 ) ) ;
2020-09-06 17:17:08 +02:00
mem_info - > memory_required = ( mem_info - > image_size_in_texels * 12 ) ;
initialize_scratch_mem ( ) ;
2022-05-26 00:50:42 +02:00
get_compute_task < cs_shuffle_d32fx8_to_x8d24f > ( ) - > run ( cmd , dst , 0 ,
2020-09-06 17:17:08 +02:00
static_cast < u32 > ( mem_info - > image_size_in_bytes ) , static_cast < u32 > ( mem_info - > image_size_in_texels ) ) ;
result = reinterpret_cast < void * > ( mem_info - > image_size_in_bytes ) ;
}
else
{
2020-12-09 16:04:52 +01:00
fmt : : throw_exception ( " Invalid depth/stencil type 0x%x " , pack_info . type ) ;
2020-09-06 17:17:08 +02:00
}
glMemoryBarrier ( GL_SHADER_STORAGE_BARRIER_BIT | GL_PIXEL_BUFFER_BARRIER_BIT ) ;
return result ;
}
2022-05-26 00:50:42 +02:00
void copy_buffer_to_image ( gl : : command_context & cmd , const pixel_buffer_layout & unpack_info , gl : : buffer * src , gl : : texture * dst ,
2020-09-06 17:17:08 +02:00
const void * src_offset , const int dst_level , const coord3u & dst_region , image_memory_requirements * mem_info )
{
buffer scratch_mem ;
buffer * transfer_buf = src ;
bool skip_barrier = false ;
u32 in_offset = static_cast < u32 > ( reinterpret_cast < u64 > ( src_offset ) ) ;
u32 out_offset = in_offset ;
auto initialize_scratch_mem = [ & ] ( )
{
if ( in_offset > = mem_info - > memory_required )
{
return ;
}
const u64 max_mem = mem_info - > memory_required + mem_info - > image_size_in_bytes ;
if ( ( max_mem + in_offset ) < = static_cast < u64 > ( src - > size ( ) ) )
{
out_offset = static_cast < u32 > ( in_offset + mem_info - > image_size_in_bytes ) ;
return ;
}
scratch_mem . create ( buffer : : target : : pixel_pack , max_mem , nullptr , buffer : : memory_type : : local , GL_STATIC_COPY ) ;
glMemoryBarrier ( GL_BUFFER_UPDATE_BARRIER_BIT ) ;
src - > copy_to ( & scratch_mem , in_offset , 0 , mem_info - > image_size_in_bytes ) ;
in_offset = 0 ;
out_offset = static_cast < u32 > ( mem_info - > image_size_in_bytes ) ;
transfer_buf = & scratch_mem ;
} ;
if ( dst - > aspect ( ) = = image_aspect : : color | |
unpack_info . type = = GL_UNSIGNED_SHORT | |
unpack_info . type = = GL_UNSIGNED_INT_24_8 )
{
if ( auto job = get_trivial_transform_job ( unpack_info ) )
{
2022-05-26 00:50:42 +02:00
job - > run ( cmd , src , static_cast < u32 > ( mem_info - > image_size_in_bytes ) , in_offset ) ;
2020-09-06 17:17:08 +02:00
}
else
{
skip_barrier = true ;
}
}
else if ( unpack_info . type = = GL_FLOAT )
{
mem_info - > memory_required = ( mem_info - > image_size_in_texels * 4 ) ;
initialize_scratch_mem ( ) ;
2021-10-26 19:24:07 +02:00
if ( unpack_info . swap_bytes )
{
2022-05-26 00:50:42 +02:00
get_compute_task < cs_fconvert_task < f16 , f32 , true , false > > ( ) - > run ( cmd , transfer_buf , in_offset , static_cast < u32 > ( mem_info - > image_size_in_bytes ) , out_offset ) ;
2021-10-26 19:24:07 +02:00
}
else
{
2022-05-26 00:50:42 +02:00
get_compute_task < cs_fconvert_task < f16 , f32 , false , false > > ( ) - > run ( cmd , transfer_buf , in_offset , static_cast < u32 > ( mem_info - > image_size_in_bytes ) , out_offset ) ;
2021-10-26 19:24:07 +02:00
}
2020-09-06 17:17:08 +02:00
}
else if ( unpack_info . type = = GL_FLOAT_32_UNSIGNED_INT_24_8_REV )
{
mem_info - > memory_required = ( mem_info - > image_size_in_texels * 8 ) ;
initialize_scratch_mem ( ) ;
2022-05-26 00:50:42 +02:00
get_compute_task < cs_shuffle_x8d24f_to_d32fx8 > ( ) - > run ( cmd , transfer_buf , in_offset , out_offset , static_cast < u32 > ( mem_info - > image_size_in_texels ) ) ;
2020-09-06 17:17:08 +02:00
}
else
{
2020-12-09 16:04:52 +01:00
fmt : : throw_exception ( " Invalid depth/stencil type 0x%x " , unpack_info . type ) ;
2020-09-06 17:17:08 +02:00
}
2022-05-31 00:47:55 +02:00
const auto caps = gl : : get_driver_caps ( ) ;
if ( dst - > get_internal_format ( ) = = gl : : texture : : internal_format : : depth24_stencil8 & &
dst - > get_target ( ) = = gl : : texture : : target : : texture2D & & // Only 2D output supported for the moment.
! caps . vendor_NVIDIA & & // NVIDIA has native support for D24X8 data as they introduced this extension.
caps . ARB_shader_stencil_export_supported ) // The driver needs to support stencil export at the very least
2020-09-06 17:17:08 +02:00
{
2022-05-31 00:47:55 +02:00
// This optimized path handles the data load on the GPU without context switching to compute.
// The upside is that it is very fast if you have headroom.
// The downside is that it is linear. Not that it matters that much as most drivers seem to be downloading the entire data source and doing really slow things with it.
if ( ! skip_barrier )
{
glMemoryBarrier ( GL_SHADER_STORAGE_BARRIER_BIT ) ;
}
auto pass = gl : : get_overlay_pass < gl : : rp_ssbo_to_d24x8_texture > ( ) ;
pass - > run ( cmd , transfer_buf , dst , out_offset , { { dst_region . x , dst_region . y } , { dst_region . width , dst_region . height } } , { } ) ;
2020-09-06 17:17:08 +02:00
}
2022-05-31 00:47:55 +02:00
else
{
if ( ! skip_barrier )
{
glMemoryBarrier ( GL_PIXEL_BUFFER_BARRIER_BIT ) ;
}
2020-09-06 17:17:08 +02:00
2022-05-31 00:47:55 +02:00
glBindBuffer ( GL_SHADER_STORAGE_BUFFER , GL_NONE ) ;
transfer_buf - > bind ( buffer : : target : : pixel_unpack ) ;
2020-09-06 17:17:08 +02:00
2022-05-31 00:47:55 +02:00
dst - > copy_from ( reinterpret_cast < void * > ( u64 ( out_offset ) ) , static_cast < texture : : format > ( unpack_info . format ) ,
static_cast < texture : : type > ( unpack_info . type ) , dst_level , dst_region , { } ) ;
}
2020-09-06 17:17:08 +02:00
if ( scratch_mem ) scratch_mem . remove ( ) ;
}
2018-07-17 18:42:51 +02:00
gl : : viewable_image * create_texture ( u32 gcm_format , u16 width , u16 height , u16 depth , u16 mipmaps ,
rsx : : texture_dimension_extended type )
2015-12-21 03:14:56 +01:00
{
2021-01-26 21:46:32 +01:00
const GLenum target = get_target ( type ) ;
const GLenum internal_format = get_sized_internal_format ( gcm_format ) ;
const auto format_class = rsx : : classify_format ( gcm_format ) ;
2015-12-21 03:14:56 +01:00
2020-09-05 17:27:24 +02:00
return new gl : : viewable_image ( target , width , height , depth , mipmaps , internal_format , format_class ) ;
2017-09-08 16:52:13 +02:00
}
2022-05-26 00:50:42 +02:00
void fill_texture ( gl : : command_context & cmd , texture * dst , int format ,
2020-09-06 17:17:08 +02:00
const std : : vector < rsx : : subresource_layout > & input_layouts ,
bool is_swizzled , GLenum gl_format , GLenum gl_type , std : : vector < std : : byte > & staging_buffer )
2017-09-08 16:52:13 +02:00
{
2022-05-27 22:42:57 +02:00
const auto driver_caps = gl : : get_driver_caps ( ) ;
2021-10-26 19:24:07 +02:00
rsx : : texture_uploader_capabilities caps
{
. supports_byteswap = true ,
. supports_vtc_decoding = false ,
2022-05-27 22:42:57 +02:00
. supports_hw_deswizzle = driver_caps . ARB_compute_shader_supported ,
2021-10-26 19:24:07 +02:00
. supports_zero_copy = false ,
. alignment = 4
} ;
2019-08-27 16:01:36 +02:00
pixel_unpack_settings unpack_settings ;
unpack_settings . row_length ( 0 ) . alignment ( 4 ) ;
2018-03-22 05:52:01 +01:00
2022-05-27 22:42:57 +02:00
glBindBuffer ( GL_PIXEL_UNPACK_BUFFER , GL_NONE ) ;
glBindBuffer ( GL_PIXEL_PACK_BUFFER , GL_NONE ) ;
2021-04-10 22:11:26 +02:00
if ( rsx : : is_compressed_host_format ( format ) ) [[likely]]
2016-01-18 15:37:42 +01:00
{
2022-05-27 22:42:57 +02:00
caps . supports_vtc_decoding = driver_caps . vendor_NVIDIA ;
2019-08-27 16:01:36 +02:00
unpack_settings . apply ( ) ;
2020-02-05 16:40:31 +01:00
const GLsizei format_block_size = ( format = = CELL_GCM_TEXTURE_COMPRESSED_DXT1 ) ? 8 : 16 ;
2020-08-15 23:33:34 +02:00
for ( const rsx : : subresource_layout & layout : input_layouts )
2016-04-01 00:49:27 +02:00
{
2019-08-27 16:01:36 +02:00
upload_texture_subresource ( staging_buffer , layout , format , is_swizzled , caps ) ;
2020-09-06 17:17:08 +02:00
switch ( dst - > get_target ( ) )
2016-04-01 00:49:27 +02:00
{
2020-09-06 17:17:08 +02:00
case texture : : target : : texture1D :
2016-04-01 00:49:27 +02:00
{
2020-02-05 16:40:31 +01:00
const GLsizei size = layout . width_in_block * format_block_size ;
2022-01-02 10:01:31 +01:00
ensure ( usz ( size ) < = staging_buffer . size ( ) ) ;
2022-05-29 23:19:58 +02:00
DSA_CALL ( CompressedTextureSubImage1D , dst - > id ( ) , GL_TEXTURE_1D , layout . level , 0 , layout . width_in_texel , gl_format , size , staging_buffer . data ( ) ) ;
2019-08-27 16:01:36 +02:00
break ;
2016-04-01 00:49:27 +02:00
}
2020-09-06 17:17:08 +02:00
case texture : : target : : texture2D :
2016-04-01 00:49:27 +02:00
{
2020-02-05 16:40:31 +01:00
const GLsizei size = layout . width_in_block * layout . height_in_block * format_block_size ;
2022-01-02 10:01:31 +01:00
ensure ( usz ( size ) < = staging_buffer . size ( ) ) ;
2022-05-29 23:19:58 +02:00
DSA_CALL ( CompressedTextureSubImage2D , dst - > id ( ) , GL_TEXTURE_2D , layout . level , 0 , 0 , layout . width_in_texel , layout . height_in_texel , gl_format , size , staging_buffer . data ( ) ) ;
2019-08-27 16:01:36 +02:00
break ;
2016-04-01 00:49:27 +02:00
}
2020-09-06 17:17:08 +02:00
case texture : : target : : textureCUBE :
2016-04-01 00:49:27 +02:00
{
2020-02-05 16:40:31 +01:00
const GLsizei size = layout . width_in_block * layout . height_in_block * format_block_size ;
2022-01-02 10:01:31 +01:00
ensure ( usz ( size ) < = staging_buffer . size ( ) ) ;
2022-05-29 23:19:58 +02:00
if ( gl : : get_driver_caps ( ) . ARB_dsa_supported )
{
glCompressedTextureSubImage3D ( dst - > id ( ) , layout . level , 0 , 0 , layout . layer , layout . width_in_texel , layout . height_in_texel , 1 , gl_format , size , staging_buffer . data ( ) ) ;
}
else
{
glCompressedTextureSubImage2DEXT ( dst - > id ( ) , GL_TEXTURE_CUBE_MAP_POSITIVE_X + layout . layer , layout . level , 0 , 0 , layout . width_in_texel , layout . height_in_texel , gl_format , size , staging_buffer . data ( ) ) ;
}
2019-08-27 16:01:36 +02:00
break ;
}
2020-09-06 17:17:08 +02:00
case texture : : target : : texture3D :
2019-08-27 16:01:36 +02:00
{
2020-02-05 16:40:31 +01:00
const GLsizei size = layout . width_in_block * layout . height_in_block * layout . depth * format_block_size ;
2022-01-02 10:01:31 +01:00
ensure ( usz ( size ) < = staging_buffer . size ( ) ) ;
2022-05-29 23:19:58 +02:00
DSA_CALL ( CompressedTextureSubImage3D , dst - > id ( ) , GL_TEXTURE_3D , layout . level , 0 , 0 , 0 , layout . width_in_texel , layout . height_in_texel , layout . depth , gl_format , size , staging_buffer . data ( ) ) ;
2019-08-27 16:01:36 +02:00
break ;
}
default :
{
2020-12-09 16:04:52 +01:00
fmt : : throw_exception ( " Unreachable " ) ;
2019-08-27 16:01:36 +02:00
}
2016-04-01 00:49:27 +02:00
}
}
}
2019-08-27 16:01:36 +02:00
else
2015-12-21 03:14:56 +01:00
{
2019-08-27 16:01:36 +02:00
bool apply_settings = true ;
2022-05-27 22:42:57 +02:00
bool use_compute_transform = is_swizzled ;
2022-05-27 19:02:45 +02:00
buffer * upload_scratch_mem = nullptr , * compute_scratch_mem = nullptr ;
2020-09-06 17:17:08 +02:00
image_memory_requirements mem_info ;
pixel_buffer_layout mem_layout ;
2020-09-05 17:27:24 +02:00
2021-05-30 16:10:46 +02:00
std : : span < std : : byte > dst_buffer = staging_buffer ;
2020-09-05 17:27:24 +02:00
void * out_pointer = staging_buffer . data ( ) ;
u8 block_size_in_bytes = rsx : : get_format_block_size_in_bytes ( format ) ;
u64 image_linear_size ;
2019-08-27 16:01:36 +02:00
switch ( gl_type )
2016-06-21 00:27:14 +02:00
{
2019-08-27 16:01:36 +02:00
case GL_BYTE :
case GL_UNSIGNED_BYTE :
// Multi-channel format uploaded one byte at a time. This is due to poor driver support for formats like GL_UNSIGNED SHORT_8_8
// Do byteswapping in software for now until compute acceleration is available
apply_settings = ( gl_format = = GL_RED ) ;
caps . supports_byteswap = apply_settings ;
break ;
2020-09-05 17:27:24 +02:00
case GL_FLOAT :
2020-09-06 17:17:08 +02:00
case GL_UNSIGNED_INT_24_8 :
2020-09-05 17:27:24 +02:00
case GL_FLOAT_32_UNSIGNED_INT_24_8_REV :
2020-09-06 17:17:08 +02:00
mem_layout . swap_bytes = true ;
mem_layout . size = 4 ;
use_compute_transform = true ;
apply_settings = false ;
2019-08-27 16:01:36 +02:00
break ;
2017-09-08 16:52:13 +02:00
}
2019-08-27 16:01:36 +02:00
2020-09-06 17:17:08 +02:00
if ( use_compute_transform )
2020-09-05 17:27:24 +02:00
{
2022-05-27 19:02:45 +02:00
std : : tie ( upload_scratch_mem , compute_scratch_mem ) = prepare_compute_resources ( staging_buffer . size ( ) ) ;
2020-09-05 17:27:24 +02:00
out_pointer = nullptr ;
}
2020-08-15 23:33:34 +02:00
for ( const rsx : : subresource_layout & layout : input_layouts )
2017-09-08 16:52:13 +02:00
{
2020-09-06 17:17:08 +02:00
if ( use_compute_transform )
2020-09-05 17:27:24 +02:00
{
2020-09-06 17:17:08 +02:00
const u64 row_pitch = rsx : : align2 < u64 , u64 > ( layout . width_in_block * block_size_in_bytes , caps . alignment ) ;
2020-09-05 17:27:24 +02:00
image_linear_size = row_pitch * layout . height_in_block * layout . depth ;
2022-05-27 19:02:45 +02:00
dst_buffer = { reinterpret_cast < std : : byte * > ( upload_scratch_mem - > map ( 0 , image_linear_size , gl : : buffer : : access : : write ) ) , image_linear_size } ;
2020-09-05 17:27:24 +02:00
}
auto op = upload_texture_subresource ( dst_buffer , layout , format , is_swizzled , caps ) ;
2020-09-06 17:17:08 +02:00
// Define upload region
coord3u region ;
region . x = 0 ;
region . y = 0 ;
region . z = layout . layer ;
region . width = layout . width_in_texel ;
region . height = layout . height_in_texel ;
region . depth = layout . depth ;
if ( use_compute_transform )
2020-09-05 17:27:24 +02:00
{
2022-05-27 22:42:57 +02:00
// 0. Preconf
mem_layout . swap_bytes = op . require_swap ;
mem_layout . format = gl_format ;
mem_layout . type = gl_type ;
2020-09-05 17:27:24 +02:00
// 1. Unmap buffer
2022-05-27 19:02:45 +02:00
upload_scratch_mem - > unmap ( ) ;
2020-09-05 17:27:24 +02:00
2020-09-06 17:17:08 +02:00
// 2. Upload memory to GPU
2022-05-27 22:42:57 +02:00
if ( ! op . require_deswizzle )
{
upload_scratch_mem - > copy_to ( compute_scratch_mem , 0 , 0 , image_linear_size ) ;
}
else
{
// 2.1 Copy data to deswizzle buf
2022-05-28 22:32:09 +02:00
if ( g_deswizzle_scratch_buffer . size ( ) < static_cast < GLsizeiptr > ( image_linear_size ) )
2022-05-27 22:42:57 +02:00
{
2022-05-28 18:38:29 +02:00
g_deswizzle_scratch_buffer . remove ( ) ;
g_deswizzle_scratch_buffer . create ( gl : : buffer : : target : : ssbo , image_linear_size , nullptr , gl : : buffer : : memory_type : : local ) ;
2022-05-27 22:42:57 +02:00
}
2022-05-28 18:38:29 +02:00
upload_scratch_mem - > copy_to ( & g_deswizzle_scratch_buffer , 0 , 0 , image_linear_size ) ;
2022-05-27 22:42:57 +02:00
// 2.2 Apply compute transform to deswizzle input and dump it in compute_scratch_mem
ensure ( op . element_size = = 2 | | op . element_size = = 4 ) ;
const auto block_size = op . element_size * op . block_length ;
if ( op . require_swap )
{
mem_layout . swap_bytes = false ;
if ( op . element_size = = 4 ) [[ likely ]]
{
2022-05-29 23:19:58 +02:00
do_deswizzle_transformation < u32 , true > ( cmd , block_size , compute_scratch_mem , & g_deswizzle_scratch_buffer , static_cast < u32 > ( image_linear_size ) , layout . width_in_texel , layout . height_in_texel , layout . depth ) ;
2022-05-27 22:42:57 +02:00
}
else
{
2022-05-29 23:19:58 +02:00
do_deswizzle_transformation < u16 , true > ( cmd , block_size , compute_scratch_mem , & g_deswizzle_scratch_buffer , static_cast < u32 > ( image_linear_size ) , layout . width_in_texel , layout . height_in_texel , layout . depth ) ;
2022-05-27 22:42:57 +02:00
}
}
else
{
if ( op . element_size = = 4 ) [[ likely ]]
{
2022-05-29 23:19:58 +02:00
do_deswizzle_transformation < u32 , false > ( cmd , block_size , compute_scratch_mem , & g_deswizzle_scratch_buffer , static_cast < u32 > ( image_linear_size ) , layout . width_in_texel , layout . height_in_texel , layout . depth ) ;
2022-05-27 22:42:57 +02:00
}
else
{
2022-05-29 23:19:58 +02:00
do_deswizzle_transformation < u16 , false > ( cmd , block_size , compute_scratch_mem , & g_deswizzle_scratch_buffer , static_cast < u32 > ( image_linear_size ) , layout . width_in_texel , layout . height_in_texel , layout . depth ) ;
2022-05-27 22:42:57 +02:00
}
}
}
2020-09-05 17:27:24 +02:00
2021-10-26 19:24:07 +02:00
// 3. Update configuration
2020-09-06 17:17:08 +02:00
mem_info . image_size_in_texels = image_linear_size / block_size_in_bytes ;
mem_info . image_size_in_bytes = image_linear_size ;
mem_info . memory_required = 0 ;
2021-10-26 19:24:07 +02:00
// 4. Dispatch compute routines
2022-05-27 19:02:45 +02:00
copy_buffer_to_image ( cmd , mem_layout , compute_scratch_mem , dst , nullptr , layout . level , region , & mem_info ) ;
2020-09-05 17:27:24 +02:00
}
2020-09-06 17:17:08 +02:00
else
2016-06-21 00:27:14 +02:00
{
2020-09-06 17:17:08 +02:00
if ( apply_settings )
{
unpack_settings . swap_bytes ( op . require_swap ) ;
apply_settings = false ;
}
2019-08-27 16:01:36 +02:00
2020-09-06 17:17:08 +02:00
dst - > copy_from ( out_pointer , static_cast < texture : : format > ( gl_format ) , static_cast < texture : : type > ( gl_type ) , layout . level , region , unpack_settings ) ;
2017-09-08 16:52:13 +02:00
}
2016-06-20 23:39:44 +02:00
}
2017-09-08 16:52:13 +02:00
}
}
2015-12-21 03:14:56 +01:00
2018-04-07 12:19:49 +02:00
std : : array < GLenum , 4 > apply_swizzle_remap ( const std : : array < GLenum , 4 > & swizzle_remap , const std : : pair < std : : array < u8 , 4 > , std : : array < u8 , 4 > > & decoded_remap )
2017-12-07 10:09:07 +01:00
{
//Remapping tables; format is A-R-G-B
//Remap input table. Contains channel index to read color from
const auto remap_inputs = decoded_remap . first ;
//Remap control table. Controls whether the remap value is used, or force either 0 or 1
const auto remap_lookup = decoded_remap . second ;
2018-04-07 12:19:49 +02:00
std : : array < GLenum , 4 > remap_values ;
2017-12-07 10:09:07 +01:00
for ( u8 channel = 0 ; channel < 4 ; + + channel )
{
switch ( remap_lookup [ channel ] )
{
default :
2020-02-01 09:07:25 +01:00
rsx_log . error ( " Unknown remap function 0x%X " , remap_lookup [ channel ] ) ;
2018-09-06 13:28:12 +02:00
[[fallthrough]] ;
2017-12-07 10:09:07 +01:00
case CELL_GCM_TEXTURE_REMAP_REMAP :
remap_values [ channel ] = swizzle_remap [ remap_inputs [ channel ] ] ;
break ;
case CELL_GCM_TEXTURE_REMAP_ZERO :
remap_values [ channel ] = GL_ZERO ;
break ;
case CELL_GCM_TEXTURE_REMAP_ONE :
remap_values [ channel ] = GL_ONE ;
break ;
}
}
2018-04-07 12:19:49 +02:00
return remap_values ;
2017-12-07 10:09:07 +01:00
}
2022-05-26 00:50:42 +02:00
void upload_texture ( gl : : command_context & cmd , texture * dst , u32 gcm_format , bool is_swizzled , const std : : vector < rsx : : subresource_layout > & subresources_layout )
2017-09-08 16:52:13 +02:00
{
2019-03-30 12:33:14 +01:00
// Calculate staging buffer size
2022-01-02 10:01:31 +01:00
std : : vector < std : : byte > data_upload_buf ;
if ( rsx : : is_compressed_host_format ( gcm_format ) )
{
const auto & desc = subresources_layout [ 0 ] ;
const usz texture_data_sz = desc . width_in_block * desc . height_in_block * desc . depth * rsx : : get_format_block_size_in_bytes ( gcm_format ) ;
data_upload_buf . resize ( texture_data_sz ) ;
}
else
{
const auto aligned_pitch = utils : : align < u32 > ( dst - > pitch ( ) , 4 ) ;
const usz texture_data_sz = dst - > depth ( ) * dst - > height ( ) * aligned_pitch ;
data_upload_buf . resize ( texture_data_sz ) ;
}
2015-12-21 03:14:56 +01:00
2019-08-27 16:01:36 +02:00
// TODO: GL drivers support byteswapping and this should be used instead of doing so manually
2017-09-14 13:37:14 +02:00
const auto format_type = get_format_type ( gcm_format ) ;
2018-07-17 18:42:51 +02:00
const GLenum gl_format = std : : get < 0 > ( format_type ) ;
2017-09-14 13:37:14 +02:00
const GLenum gl_type = std : : get < 1 > ( format_type ) ;
2022-05-26 00:50:42 +02:00
fill_texture ( cmd , dst , gcm_format , subresources_layout , is_swizzled , gl_format , gl_type , data_upload_buf ) ;
2015-12-21 03:14:56 +01:00
}
2018-04-07 17:16:52 +02:00
2019-04-05 13:39:43 +02:00
u32 get_format_texel_width ( GLenum format )
{
switch ( format )
{
case GL_R8 :
return 1 ;
case GL_R32F :
case GL_RG16 :
case GL_RG16F :
case GL_RGBA8 :
case GL_COMPRESSED_RGBA_S3TC_DXT1_EXT :
case GL_COMPRESSED_RGBA_S3TC_DXT3_EXT :
case GL_COMPRESSED_RGBA_S3TC_DXT5_EXT :
return 4 ;
case GL_R16 :
case GL_RG8 :
case GL_RGB565 :
return 2 ;
case GL_RGBA16F :
return 8 ;
case GL_RGBA32F :
return 16 ;
case GL_DEPTH_COMPONENT16 :
2022-01-25 19:53:53 +01:00
case GL_DEPTH_COMPONENT32F :
2019-04-05 13:39:43 +02:00
return 2 ;
case GL_DEPTH24_STENCIL8 :
case GL_DEPTH32F_STENCIL8 :
return 4 ;
default :
2020-12-09 16:04:52 +01:00
fmt : : throw_exception ( " Unexpected internal format 0x%X " , static_cast < u32 > ( format ) ) ;
2019-04-05 13:39:43 +02:00
}
}
std : : pair < bool , u32 > get_format_convert_flags ( GLenum format )
{
2022-02-13 11:53:17 +01:00
const auto texel_width = get_format_texel_width ( format ) ;
return { ( texel_width > 1 ) , texel_width } ;
2019-04-05 13:39:43 +02:00
}
bool formats_are_bitcast_compatible ( GLenum format1 , GLenum format2 )
{
2020-02-05 08:00:08 +01:00
if ( format1 = = format2 ) [[likely]]
2019-04-05 13:39:43 +02:00
{
return true ;
}
// Formats are compatible if the following conditions are met:
// 1. Texel sizes must match
// 2. Both formats require no transforms (basic memcpy) or...
// 3. Both formats have the same transform (e.g RG16_UNORM to RG16_SFLOAT, both are down and uploaded with a 2-byte byteswap)
if ( get_format_texel_width ( format1 ) ! = get_format_texel_width ( format2 ) )
{
return false ;
}
const auto transform_a = get_format_convert_flags ( format1 ) ;
const auto transform_b = get_format_convert_flags ( format2 ) ;
if ( transform_a . first = = transform_b . first )
{
return ! transform_a . first | | ( transform_a . second = = transform_b . second ) ;
}
return false ;
}
2022-01-25 19:53:53 +01:00
bool formats_are_bitcast_compatible ( const texture * texture1 , const texture * texture2 )
{
if ( const u32 transfer_class = texture1 - > format_class ( ) | texture2 - > format_class ( ) ;
2022-05-25 20:05:53 +02:00
transfer_class > RSX_FORMAT_CLASS_COLOR )
2022-01-25 19:53:53 +01:00
{
2022-05-25 20:05:53 +02:00
// If any one of the two images is a depth format, the other must match exactly or bust
2022-01-25 19:53:53 +01:00
return ( texture1 - > format_class ( ) = = texture2 - > format_class ( ) ) ;
}
return formats_are_bitcast_compatible ( static_cast < GLenum > ( texture1 - > get_internal_format ( ) ) , static_cast < GLenum > ( texture2 - > get_internal_format ( ) ) ) ;
}
2022-05-26 00:50:42 +02:00
void copy_typeless ( gl : : command_context & cmd , texture * dst , const texture * src , const coord3u & dst_region , const coord3u & src_region )
2018-04-07 17:16:52 +02:00
{
2020-09-06 17:17:08 +02:00
const auto src_bpp = src - > pitch ( ) / src - > width ( ) ;
const auto dst_bpp = dst - > pitch ( ) / dst - > width ( ) ;
image_memory_requirements src_mem = { src_region . width * src_region . height , src_region . width * src_bpp * src_region . height , 0ull } ;
image_memory_requirements dst_mem = { dst_region . width * dst_region . height , dst_region . width * dst_bpp * dst_region . height , 0ull } ;
2018-04-07 17:16:52 +02:00
2019-10-02 12:39:30 +02:00
const auto & caps = gl : : get_driver_caps ( ) ;
2020-08-10 21:13:59 +02:00
auto pack_info = get_format_type ( src ) ;
auto unpack_info = get_format_type ( dst ) ;
2019-10-02 02:47:19 +02:00
2020-09-07 21:42:30 +02:00
// D32FS8 can be read back as D24S8 or D32S8X24. In case of the latter, double memory requirements
if ( pack_info . type = = GL_FLOAT_32_UNSIGNED_INT_24_8_REV )
2020-09-05 17:27:24 +02:00
{
2020-09-07 21:42:30 +02:00
src_mem . image_size_in_bytes * = 2 ;
}
2020-09-05 17:27:24 +02:00
2020-09-07 21:42:30 +02:00
if ( unpack_info . type = = GL_FLOAT_32_UNSIGNED_INT_24_8_REV )
{
dst_mem . image_size_in_bytes * = 2 ;
2020-09-05 17:27:24 +02:00
}
2020-02-05 08:00:08 +01:00
if ( caps . ARB_compute_shader_supported ) [[likely]]
2019-10-02 02:47:19 +02:00
{
2020-09-06 17:17:08 +02:00
bool skip_transform = false ;
if ( ( src - > aspect ( ) | dst - > aspect ( ) ) = = gl : : image_aspect : : color )
2019-10-02 02:47:19 +02:00
{
2020-09-06 17:17:08 +02:00
skip_transform = ( pack_info . format = = unpack_info . format & &
pack_info . type = = unpack_info . type & &
pack_info . swap_bytes = = unpack_info . swap_bytes & &
pack_info . size = = unpack_info . size ) ;
2019-10-02 02:47:19 +02:00
}
2019-10-02 12:39:30 +02:00
2020-09-06 17:17:08 +02:00
if ( skip_transform ) [[likely]]
2019-10-02 02:47:19 +02:00
{
2020-09-07 21:42:30 +02:00
// Disable byteswap to make the transport operation passthrough
2020-09-06 17:17:08 +02:00
pack_info . swap_bytes = false ;
2020-09-07 21:42:30 +02:00
unpack_info . swap_bytes = false ;
2019-10-02 02:47:19 +02:00
}
2022-05-26 00:50:42 +02:00
void * data_ptr = copy_image_to_buffer ( cmd , pack_info , src , & g_typeless_transfer_buffer , 0 , src_region , & src_mem ) ;
copy_buffer_to_image ( cmd , unpack_info , & g_typeless_transfer_buffer , dst , data_ptr , 0 , dst_region , & dst_mem ) ;
2020-09-07 21:42:30 +02:00
// Cleanup
2019-10-02 12:39:30 +02:00
// NOTE: glBindBufferRange also binds the buffer to the old-school target.
// Unbind it to avoid glitching later
glBindBuffer ( GL_SHADER_STORAGE_BUFFER , GL_NONE ) ;
2020-09-07 21:42:30 +02:00
glBindBuffer ( GL_PIXEL_PACK_BUFFER , GL_NONE ) ;
glBindBuffer ( GL_PIXEL_UNPACK_BUFFER , GL_NONE ) ;
2019-10-02 12:39:30 +02:00
}
else
2020-09-06 17:17:08 +02:00
{
const u64 max_mem = std : : max ( src_mem . image_size_in_bytes , dst_mem . image_size_in_bytes ) ;
if ( ! g_typeless_transfer_buffer | | max_mem > static_cast < u64 > ( g_typeless_transfer_buffer . size ( ) ) )
{
if ( g_typeless_transfer_buffer ) g_typeless_transfer_buffer . remove ( ) ;
g_typeless_transfer_buffer . create ( buffer : : target : : pixel_pack , max_mem , nullptr , buffer : : memory_type : : local , GL_STATIC_COPY ) ;
}
2020-09-07 21:42:30 +02:00
// Simplify pack/unpack information to something OpenGL can natively digest
auto remove_depth_transformation = [ ] ( const texture * tex , pixel_buffer_layout & pack_info )
{
if ( tex - > aspect ( ) & image_aspect : : depth )
{
switch ( pack_info . type )
{
case GL_UNSIGNED_INT_24_8 :
pack_info . swap_bytes = false ;
break ;
case GL_FLOAT_32_UNSIGNED_INT_24_8_REV :
pack_info . type = GL_UNSIGNED_INT_24_8 ;
pack_info . swap_bytes = false ;
break ;
case GL_FLOAT :
pack_info . type = GL_HALF_FLOAT ;
break ;
2021-04-11 15:55:14 +02:00
default : break ;
2020-09-07 21:42:30 +02:00
}
}
} ;
remove_depth_transformation ( src , pack_info ) ;
remove_depth_transformation ( dst , unpack_info ) ;
// Attempt to compensate for the lack of compute shader modifiers
// If crossing the aspect boundary between color and depth
// and one image is depth, invert byteswap for the other one to compensate
const auto cross_aspect_test = ( image_aspect : : color | image_aspect : : depth ) ;
const auto test = ( src - > aspect ( ) | dst - > aspect ( ) ) & cross_aspect_test ;
if ( test = = cross_aspect_test )
{
if ( src - > aspect ( ) & image_aspect : : depth )
{
// Source is depth, modify unpack rule
if ( pack_info . size = = 4 & & unpack_info . size = = 4 )
{
unpack_info . swap_bytes = ! unpack_info . swap_bytes ;
}
}
else
{
// Dest is depth, modify pack rule
if ( pack_info . size = = 4 & & unpack_info . size = = 4 )
{
pack_info . swap_bytes = ! pack_info . swap_bytes ;
}
}
}
// Start pack operation
2020-09-06 17:17:08 +02:00
pixel_pack_settings pack_settings { } ;
pack_settings . swap_bytes ( pack_info . swap_bytes ) ;
g_typeless_transfer_buffer . bind ( buffer : : target : : pixel_pack ) ;
src - > copy_to ( nullptr , static_cast < texture : : format > ( pack_info . format ) , static_cast < texture : : type > ( pack_info . type ) , 0 , src_region , pack_settings ) ;
2020-09-07 21:42:30 +02:00
glBindBuffer ( GL_PIXEL_PACK_BUFFER , GL_NONE ) ;
2020-09-06 17:17:08 +02:00
2020-09-07 21:42:30 +02:00
// Start unpack operation
pixel_unpack_settings unpack_settings { } ;
2019-10-02 12:39:30 +02:00
unpack_settings . swap_bytes ( unpack_info . swap_bytes ) ;
2019-10-02 02:47:19 +02:00
2020-09-07 21:42:30 +02:00
g_typeless_transfer_buffer . bind ( buffer : : target : : pixel_unpack ) ;
dst - > copy_from ( nullptr , static_cast < texture : : format > ( unpack_info . format ) , static_cast < texture : : type > ( unpack_info . type ) , 0 , dst_region , unpack_settings ) ;
glBindBuffer ( GL_PIXEL_UNPACK_BUFFER , GL_NONE ) ;
}
2018-04-07 17:16:52 +02:00
}
2019-10-12 00:05:05 +02:00
2022-05-26 00:50:42 +02:00
void copy_typeless ( gl : : command_context & cmd , texture * dst , const texture * src )
2019-10-12 00:05:05 +02:00
{
const coord3u src_area = { { } , src - > size3D ( ) } ;
const coord3u dst_area = { { } , dst - > size3D ( ) } ;
2022-05-26 00:50:42 +02:00
copy_typeless ( cmd , dst , src , dst_area , src_area ) ;
2019-10-12 00:05:05 +02:00
}
2015-12-21 04:35:56 +01:00
}