2019-06-28 20:35:43 +02:00
# pragma once
2016-02-21 16:50:49 +01:00
# include "stdafx.h"
# include "VKRenderTargets.h"
# include "VKGSRender.h"
2018-06-22 21:09:20 +02:00
# include "VKCompute.h"
2019-06-28 19:33:03 +02:00
# include "VKResourceManager.h"
2019-08-17 13:06:38 +02:00
# include "VKDMA.h"
2020-03-10 12:05:50 +01:00
# include "VKRenderPass.h"
2016-02-21 16:50:49 +01:00
# include "../Common/TextureUtils.h"
2017-07-24 19:50:32 +02:00
# include "Utilities/mutex.h"
2017-09-08 16:52:13 +02:00
# include "../Common/texture_cache.h"
2016-02-21 16:50:49 +01:00
2017-08-14 17:50:50 +02:00
extern u64 get_system_time ( ) ;
2016-02-21 16:50:49 +01:00
namespace vk
{
2018-10-28 14:59:39 +01:00
class cached_texture_section ;
class texture_cache ;
struct texture_cache_traits
{
using commandbuffer_type = vk : : command_buffer ;
using section_storage_type = vk : : cached_texture_section ;
using texture_cache_type = vk : : texture_cache ;
using texture_cache_base_type = rsx : : texture_cache < texture_cache_type , texture_cache_traits > ;
using image_resource_type = vk : : image * ;
using image_view_type = vk : : image_view * ;
using image_storage_type = vk : : image ;
using texture_format = VkFormat ;
} ;
class cached_texture_section : public rsx : : cached_texture_section < vk : : cached_texture_section , vk : : texture_cache_traits >
2016-02-21 16:50:49 +01:00
{
2018-10-28 14:59:39 +01:00
using baseclass = typename rsx : : cached_texture_section < vk : : cached_texture_section , vk : : texture_cache_traits > ;
friend baseclass ;
2018-09-22 02:14:26 +02:00
2018-07-17 18:42:51 +02:00
std : : unique_ptr < vk : : viewable_image > managed_texture = nullptr ;
2017-04-21 15:35:13 +02:00
//DMA relevant data
2020-02-29 15:44:21 +01:00
std : : unique_ptr < vk : : event > dma_fence ;
2017-04-21 15:35:13 +02:00
vk : : render_device * m_device = nullptr ;
2018-07-17 18:42:51 +02:00
vk : : viewable_image * vram_texture = nullptr ;
2016-02-21 16:50:49 +01:00
2017-02-16 19:29:56 +01:00
public :
2018-09-22 02:14:26 +02:00
using baseclass : : cached_texture_section ;
2018-08-25 00:47:36 +02:00
2018-07-17 18:42:51 +02:00
void create ( u16 w , u16 h , u16 depth , u16 mipmaps , vk : : image * image , u32 rsx_pitch , bool managed , u32 gcm_format , bool pack_swap_bytes = false )
2016-02-21 16:50:49 +01:00
{
2018-10-19 00:22:00 +02:00
auto new_texture = static_cast < vk : : viewable_image * > ( image ) ;
ASSERT ( ! exists ( ) | | ! is_managed ( ) | | vram_texture = = new_texture ) ;
vram_texture = new_texture ;
2018-12-18 18:04:03 +01:00
verify ( HERE ) , rsx_pitch ;
2017-02-16 19:29:56 +01:00
width = w ;
height = h ;
this - > depth = depth ;
this - > mipmaps = mipmaps ;
2018-12-18 18:04:03 +01:00
this - > rsx_pitch = rsx_pitch ;
2016-02-21 16:50:49 +01:00
2017-11-09 17:47:38 +01:00
this - > gcm_format = gcm_format ;
this - > pack_unpack_swap_bytes = pack_swap_bytes ;
2017-10-30 13:27:22 +01:00
if ( managed )
{
2018-07-17 18:42:51 +02:00
managed_texture . reset ( vram_texture ) ;
2017-10-30 13:27:22 +01:00
}
2017-04-21 15:35:13 +02:00
2019-03-16 10:14:11 +01:00
if ( synchronized )
{
// Even if we are managing the same vram section, we cannot guarantee contents are static
// The create method is only invoked when a new managed session is required
2020-02-29 21:30:52 +01:00
release_dma_resources ( ) ;
2019-03-16 10:14:11 +01:00
synchronized = false ;
flushed = false ;
sync_timestamp = 0ull ;
}
2018-09-22 02:14:26 +02:00
// Notify baseclass
baseclass : : on_section_resources_created ( ) ;
2017-04-21 15:35:13 +02:00
}
void release_dma_resources ( )
{
2019-08-17 13:06:38 +02:00
if ( dma_fence )
2017-04-21 15:35:13 +02:00
{
2019-06-28 19:33:03 +02:00
auto gc = vk : : get_resource_manager ( ) ;
gc - > dispose ( dma_fence ) ;
2017-04-21 15:35:13 +02:00
}
2016-02-21 16:50:49 +01:00
}
2019-07-01 21:52:02 +02:00
void dma_abort ( ) override
{
// Called if a reset occurs, usually via reprotect path after a bad prediction.
// Discard the sync event, the next sync, if any, will properly recreate this.
verify ( HERE ) , synchronized , ! flushed , dma_fence ;
vk : : get_resource_manager ( ) - > dispose ( dma_fence ) ;
}
2017-10-23 14:39:24 +02:00
void destroy ( )
{
2020-03-10 12:30:09 +01:00
if ( ! exists ( ) & & context ! = rsx : : texture_upload_context : : dma )
2018-10-19 00:22:00 +02:00
return ;
2018-09-22 02:14:26 +02:00
m_tex_cache - > on_section_destroyed ( * this ) ;
2018-10-28 14:59:39 +01:00
2017-10-23 14:39:24 +02:00
vram_texture = nullptr ;
2019-06-08 08:25:58 +02:00
ASSERT ( ! managed_texture ) ;
2017-10-23 14:39:24 +02:00
release_dma_resources ( ) ;
2018-09-22 02:14:26 +02:00
baseclass : : on_section_resources_destroyed ( ) ;
2017-10-23 14:39:24 +02:00
}
2018-10-19 00:22:00 +02:00
bool exists ( ) const
2017-02-16 19:29:56 +01:00
{
2017-04-21 15:35:13 +02:00
return ( vram_texture ! = nullptr ) ;
2017-02-16 19:29:56 +01:00
}
2016-02-21 16:50:49 +01:00
2018-10-19 00:22:00 +02:00
bool is_managed ( ) const
{
2019-06-08 08:25:58 +02:00
return ! exists ( ) | | managed_texture ;
2018-10-19 00:22:00 +02:00
}
2018-07-17 18:42:51 +02:00
vk : : image_view * get_view ( u32 remap_encoding , const std : : pair < std : : array < u8 , 4 > , std : : array < u8 , 4 > > & remap )
2016-02-21 16:50:49 +01:00
{
2018-10-28 14:59:39 +01:00
ASSERT ( vram_texture ! = nullptr ) ;
2018-07-17 18:42:51 +02:00
return vram_texture - > get_view ( remap_encoding , remap ) ;
2017-04-21 15:35:13 +02:00
}
2017-09-08 16:52:13 +02:00
vk : : image_view * get_raw_view ( )
{
2018-10-28 14:59:39 +01:00
ASSERT ( vram_texture ! = nullptr ) ;
2018-07-17 18:42:51 +02:00
return vram_texture - > get_view ( 0xAAE4 , rsx : : default_remap_vector ) ;
2017-09-08 16:52:13 +02:00
}
vk : : image * get_raw_texture ( )
{
return managed_texture . get ( ) ;
}
2019-05-11 07:36:16 +02:00
std : : unique_ptr < vk : : viewable_image > & get_texture ( )
2018-07-17 18:42:51 +02:00
{
return managed_texture ;
}
2018-10-28 14:59:39 +01:00
VkFormat get_format ( ) const
2017-07-23 15:34:17 +02:00
{
2019-09-04 21:19:58 +02:00
if ( context = = rsx : : texture_upload_context : : dma )
{
return VK_FORMAT_R32_UINT ;
}
2018-10-28 14:59:39 +01:00
ASSERT ( vram_texture ! = nullptr ) ;
2019-09-04 21:19:58 +02:00
return vram_texture - > format ( ) ;
2017-07-23 15:34:17 +02:00
}
2017-04-21 21:55:05 +02:00
bool is_flushed ( ) const
{
//This memory section was flushable, but a flush has already removed protection
2018-07-17 18:42:51 +02:00
return flushed ;
2017-04-21 15:35:13 +02:00
}
2019-09-04 21:19:58 +02:00
void dma_transfer ( vk : : command_buffer & cmd , vk : : image * src , const areai & src_area , const utils : : address_range & valid_range , u32 pitch )
2017-04-21 15:35:13 +02:00
{
2019-09-04 21:19:58 +02:00
verify ( HERE ) , src - > samples ( ) = = 1 ;
2018-10-28 14:59:39 +01:00
2020-02-29 15:44:21 +01:00
if ( ! m_device )
2017-04-21 15:35:13 +02:00
{
m_device = & cmd . get_command_pool ( ) . get_owner ( ) ;
}
2020-02-29 15:44:21 +01:00
if ( dma_fence )
2017-04-21 15:35:13 +02:00
{
2020-02-29 21:30:52 +01:00
// NOTE: This can be reached if previously synchronized, or a special path happens.
// If a hard flush occurred while this surface was flush_always the cache would have reset its protection afterwards.
// DMA resource would still be present but already used to flush previously.
2020-02-29 15:44:21 +01:00
vk : : get_resource_manager ( ) - > dispose ( dma_fence ) ;
2017-04-21 15:35:13 +02:00
}
2020-03-10 12:05:50 +01:00
if ( vk : : is_renderpass_open ( cmd ) )
{
vk : : end_renderpass ( cmd ) ;
}
2019-09-04 21:19:58 +02:00
src - > push_layout ( cmd , VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ) ;
2018-06-21 17:28:53 +02:00
2019-09-04 21:19:58 +02:00
const auto internal_bpp = vk : : get_format_texel_width ( src - > format ( ) ) ;
2019-12-03 23:34:23 +01:00
const auto transfer_width = static_cast < u32 > ( src_area . width ( ) ) ;
const auto transfer_height = static_cast < u32 > ( src_area . height ( ) ) ;
2019-08-17 13:06:38 +02:00
real_pitch = internal_bpp * transfer_width ;
2019-09-04 21:19:58 +02:00
rsx_pitch = pitch ;
2018-06-22 21:22:20 +02:00
2019-09-04 21:19:58 +02:00
const bool is_depth_stencil = ! ! ( src - > aspect ( ) & VK_IMAGE_ASPECT_STENCIL_BIT ) ;
if ( is_depth_stencil | | pack_unpack_swap_bytes )
2018-06-22 21:09:20 +02:00
{
2019-08-17 13:06:38 +02:00
const auto section_length = valid_range . length ( ) ;
2019-09-04 21:19:58 +02:00
const auto transfer_pitch = real_pitch ;
const auto task_length = transfer_pitch * src_area . height ( ) ;
2019-08-17 13:06:38 +02:00
2020-01-14 14:21:44 +01:00
auto working_buffer = vk : : get_scratch_buffer ( task_length ) ;
2019-08-17 13:06:38 +02:00
auto final_mapping = vk : : map_dma ( cmd , valid_range . start , section_length ) ;
VkBufferImageCopy region = { } ;
2019-09-04 21:19:58 +02:00
region . imageSubresource = { src - > aspect ( ) , 0 , 0 , 1 } ;
region . imageOffset = { src_area . x1 , src_area . y1 , 0 } ;
2019-08-17 13:06:38 +02:00
region . imageExtent = { transfer_width , transfer_height , 1 } ;
2019-09-04 21:19:58 +02:00
vk : : copy_image_to_buffer ( cmd , src , working_buffer , region , ( is_depth_stencil & & pack_unpack_swap_bytes ) ) ;
2018-06-22 21:09:20 +02:00
2019-09-04 21:19:58 +02:00
// NOTE: For depth-stencil formats, copying to buffer and byteswap are combined into one step above
if ( pack_unpack_swap_bytes & & ! is_depth_stencil )
2018-06-22 21:09:20 +02:00
{
2019-09-04 21:19:58 +02:00
const auto texel_layout = vk : : get_format_element_size ( src - > format ( ) ) ;
const auto elem_size = texel_layout . first ;
vk : : cs_shuffle_base * shuffle_kernel ;
if ( elem_size = = 2 )
{
shuffle_kernel = vk : : get_compute_task < vk : : cs_shuffle_16 > ( ) ;
}
else if ( elem_size = = 4 )
{
shuffle_kernel = vk : : get_compute_task < vk : : cs_shuffle_32 > ( ) ;
}
else
{
fmt : : throw_exception ( " Unreachable " HERE ) ;
}
2018-06-23 14:15:55 +02:00
2019-09-04 21:19:58 +02:00
vk : : insert_buffer_memory_barrier ( cmd , working_buffer - > value , 0 , task_length ,
VK_PIPELINE_STAGE_TRANSFER_BIT , VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ,
VK_ACCESS_TRANSFER_WRITE_BIT , VK_ACCESS_SHADER_READ_BIT ) ;
2018-06-22 21:22:20 +02:00
2019-09-04 21:19:58 +02:00
shuffle_kernel - > run ( cmd , working_buffer , task_length ) ;
2018-06-23 14:15:55 +02:00
2019-09-04 21:19:58 +02:00
vk : : insert_buffer_memory_barrier ( cmd , working_buffer - > value , 0 , task_length ,
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT , VK_PIPELINE_STAGE_TRANSFER_BIT ,
VK_ACCESS_SHADER_WRITE_BIT , VK_ACCESS_TRANSFER_READ_BIT ) ;
}
2018-06-23 14:15:55 +02:00
2020-02-05 08:00:08 +01:00
if ( rsx_pitch = = real_pitch ) [ [ likely ] ]
2019-08-17 13:06:38 +02:00
{
VkBufferCopy copy = { } ;
copy . dstOffset = final_mapping . first ;
copy . size = section_length ;
vkCmdCopyBuffer ( cmd , working_buffer - > value , final_mapping . second - > value , 1 , & copy ) ;
}
else
{
2020-01-24 18:01:43 +01:00
if ( context ! = rsx : : texture_upload_context : : dma )
{
// Partial load for the bits outside the existing image
// NOTE: A true DMA section would have been prepped beforehand
// TODO: Parial range load/flush
vk : : load_dma ( valid_range . start , section_length ) ;
}
2019-08-17 13:06:38 +02:00
std : : vector < VkBufferCopy > copy ;
copy . reserve ( transfer_height ) ;
u32 dst_offset = final_mapping . first ;
u32 src_offset = 0 ;
for ( unsigned row = 0 ; row < transfer_height ; + + row )
{
2019-09-04 21:19:58 +02:00
copy . push_back ( { src_offset , dst_offset , transfer_pitch } ) ;
2019-08-17 13:06:38 +02:00
src_offset + = real_pitch ;
dst_offset + = rsx_pitch ;
}
vkCmdCopyBuffer ( cmd , working_buffer - > value , final_mapping . second - > value , transfer_height , copy . data ( ) ) ;
}
}
else
{
VkBufferImageCopy region = { } ;
region . bufferRowLength = ( rsx_pitch / internal_bpp ) ;
2019-09-04 21:19:58 +02:00
region . imageSubresource = { src - > aspect ( ) , 0 , 0 , 1 } ;
region . imageOffset = { src_area . x1 , src_area . y1 , 0 } ;
2019-08-17 13:06:38 +02:00
region . imageExtent = { transfer_width , transfer_height , 1 } ;
auto mapping = vk : : map_dma ( cmd , valid_range . start , valid_range . length ( ) ) ;
region . bufferOffset = mapping . first ;
2019-09-04 21:19:58 +02:00
vkCmdCopyImageToBuffer ( cmd , src - > value , src - > current_layout , mapping . second - > value , 1 , & region ) ;
2018-06-22 21:22:20 +02:00
}
2019-09-04 21:19:58 +02:00
src - > pop_layout ( cmd ) ;
2019-08-17 13:06:38 +02:00
2020-02-29 15:44:21 +01:00
// Create event object for this transfer and queue signal op
dma_fence = std : : make_unique < vk : : event > ( * m_device ) ;
dma_fence - > signal ( cmd , VK_PIPELINE_STAGE_TRANSFER_BIT ) ;
2017-04-23 14:00:38 +02:00
2020-02-29 15:44:21 +01:00
// Set cb flag for queued dma operations
2019-03-16 10:14:11 +01:00
cmd . set_flag ( vk : : command_buffer : : cb_has_dma_transfer ) ;
2017-04-23 14:00:38 +02:00
synchronized = true ;
2017-08-14 17:50:50 +02:00
sync_timestamp = get_system_time ( ) ;
2017-04-21 15:35:13 +02:00
}
2019-09-04 21:19:58 +02:00
void copy_texture ( vk : : command_buffer & cmd , bool miss )
{
ASSERT ( exists ( ) ) ;
2020-02-05 08:00:08 +01:00
if ( ! miss ) [ [ likely ] ]
2019-09-04 21:19:58 +02:00
{
verify ( HERE ) , ! synchronized ;
baseclass : : on_speculative_flush ( ) ;
}
else
{
baseclass : : on_miss ( ) ;
}
if ( m_device = = nullptr )
{
m_device = & cmd . get_command_pool ( ) . get_owner ( ) ;
}
vk : : image * locked_resource = vram_texture ;
u32 transfer_width = width ;
u32 transfer_height = height ;
u32 transfer_x = 0 , transfer_y = 0 ;
if ( context = = rsx : : texture_upload_context : : framebuffer_storage )
{
auto surface = vk : : as_rtt ( vram_texture ) ;
surface - > read_barrier ( cmd ) ;
locked_resource = surface - > get_surface ( rsx : : surface_access : : read ) ;
transfer_width * = surface - > samples_x ;
transfer_height * = surface - > samples_y ;
}
vk : : image * target = locked_resource ;
if ( transfer_width ! = locked_resource - > width ( ) | | transfer_height ! = locked_resource - > height ( ) )
{
// TODO: Synchronize access to typeles textures
target = vk : : get_typeless_helper ( vram_texture - > info . format , transfer_width , transfer_height ) ;
target - > change_layout ( cmd , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ) ;
// Allow bilinear filtering on color textures where compatibility is likely
const auto filter = ( target - > aspect ( ) = = VK_IMAGE_ASPECT_COLOR_BIT ) ? VK_FILTER_LINEAR : VK_FILTER_NEAREST ;
vk : : copy_scaled_image ( cmd , locked_resource - > value , target - > value , locked_resource - > current_layout , target - > current_layout ,
2019-12-03 23:34:23 +01:00
{ 0 , 0 , static_cast < s32 > ( locked_resource - > width ( ) ) , static_cast < s32 > ( locked_resource - > height ( ) ) } , { 0 , 0 , static_cast < s32 > ( transfer_width ) , static_cast < s32 > ( transfer_height ) } ,
2019-09-04 21:19:58 +02:00
1 , target - > aspect ( ) , true , filter , vram_texture - > format ( ) , target - > format ( ) ) ;
target - > change_layout ( cmd , VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ) ;
}
const auto internal_bpp = vk : : get_format_texel_width ( vram_texture - > format ( ) ) ;
const auto valid_range = get_confirmed_range ( ) ;
if ( const auto section_range = get_section_range ( ) ; section_range ! = valid_range )
{
if ( const auto offset = ( valid_range . start - get_section_base ( ) ) )
{
transfer_y = offset / rsx_pitch ;
transfer_x = ( offset % rsx_pitch ) / internal_bpp ;
verify ( HERE ) , transfer_width > = transfer_x , transfer_height > = transfer_y ;
transfer_width - = transfer_x ;
transfer_height - = transfer_y ;
}
if ( const auto tail = ( section_range . end - valid_range . end ) )
{
const auto row_count = tail / rsx_pitch ;
verify ( HERE ) , transfer_height > = row_count ;
transfer_height - = row_count ;
}
}
areai src_area ;
2019-12-03 23:34:23 +01:00
src_area . x1 = static_cast < s32 > ( transfer_x ) ;
src_area . y1 = static_cast < s32 > ( transfer_y ) ;
2019-09-04 21:19:58 +02:00
src_area . x2 = s32 ( transfer_x + transfer_width ) ;
src_area . y2 = s32 ( transfer_y + transfer_height ) ;
dma_transfer ( cmd , target , src_area , valid_range , rsx_pitch ) ;
}
2018-11-01 02:31:12 +01:00
/**
* Flush
*/
2019-08-17 13:06:38 +02:00
void imp_flush ( ) override
2018-11-01 02:31:12 +01:00
{
AUDIT ( synchronized ) ;
2018-06-22 21:09:20 +02:00
2019-03-14 13:27:50 +01:00
// Synchronize, reset dma_fence after waiting
2020-02-29 15:44:21 +01:00
vk : : wait_for_event ( dma_fence . get ( ) , GENERAL_WAIT_TIMEOUT ) ;
2019-03-14 13:27:50 +01:00
2019-08-17 13:06:38 +02:00
const auto range = get_confirmed_range ( ) ;
vk : : flush_dma ( range . start , range . length ( ) ) ;
2019-01-05 11:12:36 +01:00
if ( context = = rsx : : texture_upload_context : : framebuffer_storage )
{
// Update memory tag
static_cast < vk : : render_target * > ( vram_texture ) - > sync_tag ( ) ;
}
2017-02-16 19:29:56 +01:00
}
2017-04-23 14:00:38 +02:00
2019-08-17 13:06:38 +02:00
void * map_synchronized ( u32 , u32 )
{ return nullptr ; }
void finish_flush ( )
{ }
2018-11-01 02:31:12 +01:00
/**
* Misc
*/
2018-02-03 09:37:42 +01:00
void set_unpack_swap_bytes ( bool swap_bytes )
{
pack_unpack_swap_bytes = swap_bytes ;
}
2017-04-23 14:00:38 +02:00
bool is_synchronized ( ) const
{
return synchronized ;
}
2017-08-14 17:50:50 +02:00
2017-09-08 16:52:13 +02:00
bool has_compatible_format ( vk : : image * tex ) const
{
return vram_texture - > info . format = = tex - > info . format ;
}
2017-10-30 13:27:22 +01:00
bool is_depth_texture ( ) const
{
switch ( vram_texture - > info . format )
{
case VK_FORMAT_D16_UNORM :
case VK_FORMAT_D32_SFLOAT_S8_UINT :
case VK_FORMAT_D24_UNORM_S8_UINT :
return true ;
default :
return false ;
}
}
2017-02-16 19:29:56 +01:00
} ;
2018-08-25 00:47:36 +02:00
2019-07-01 13:09:44 +02:00
struct temporary_storage
2017-09-22 19:29:12 +02:00
{
2018-07-17 18:42:51 +02:00
std : : unique_ptr < vk : : viewable_image > combined_image ;
2019-10-17 20:35:04 +02:00
bool can_reuse = false ;
2017-09-22 19:29:12 +02:00
2019-07-01 13:09:44 +02:00
// Memory held by this temp storage object
2017-10-24 14:58:53 +02:00
u32 block_size = 0 ;
2019-07-01 13:09:44 +02:00
// Frame id tag
2017-09-22 19:29:12 +02:00
const u64 frame_tag = vk : : get_current_frame_id ( ) ;
2019-07-01 13:09:44 +02:00
temporary_storage ( std : : unique_ptr < vk : : viewable_image > & _img )
2017-09-22 19:29:12 +02:00
{
2019-07-01 13:09:44 +02:00
combined_image = std : : move ( _img ) ;
2017-09-22 19:29:12 +02:00
}
2019-07-01 13:09:44 +02:00
temporary_storage ( vk : : cached_texture_section & tex )
2017-09-22 19:29:12 +02:00
{
2018-07-17 18:42:51 +02:00
combined_image = std : : move ( tex . get_texture ( ) ) ;
2017-10-24 14:58:53 +02:00
block_size = tex . get_section_size ( ) ;
2017-09-22 19:29:12 +02:00
}
const bool test ( u64 ref_frame ) const
{
return ref_frame > 0 & & frame_tag < = ref_frame ;
}
2019-07-01 13:09:44 +02:00
2019-10-09 21:29:23 +02:00
bool matches ( VkFormat format , u16 w , u16 h , u16 d , u16 mipmaps , VkFlags flags ) const
2019-07-01 13:09:44 +02:00
{
if ( combined_image & &
combined_image - > info . flags = = flags & &
combined_image - > format ( ) = = format & &
combined_image - > width ( ) = = w & &
combined_image - > height ( ) = = h & &
2019-10-09 21:29:23 +02:00
combined_image - > depth ( ) = = d & &
combined_image - > mipmaps ( ) = = mipmaps )
2019-07-01 13:09:44 +02:00
{
return true ;
}
return false ;
}
2017-09-22 19:29:12 +02:00
} ;
2017-02-16 19:29:56 +01:00
2018-10-28 14:59:39 +01:00
class texture_cache : public rsx : : texture_cache < vk : : texture_cache , vk : : texture_cache_traits >
2017-02-16 19:29:56 +01:00
{
2018-10-28 14:59:39 +01:00
private :
using baseclass = rsx : : texture_cache < vk : : texture_cache , vk : : texture_cache_traits > ;
friend baseclass ;
2018-09-22 02:14:26 +02:00
public :
2018-10-28 14:59:39 +01:00
void on_section_destroyed ( cached_texture_section & tex ) override
2018-09-22 02:14:26 +02:00
{
2018-10-19 00:22:00 +02:00
if ( tex . is_managed ( ) )
{
2020-03-11 10:45:50 +01:00
vk : : get_resource_manager ( ) - > dispose ( tex . get_texture ( ) ) ;
2018-10-19 00:22:00 +02:00
}
2018-09-22 02:14:26 +02:00
}
2017-02-16 19:29:56 +01:00
private :
2018-09-22 02:14:26 +02:00
2017-09-08 16:52:13 +02:00
//Vulkan internals
vk : : render_device * m_device ;
vk : : memory_type_mapping m_memory_types ;
vk : : gpu_formats_support m_formats_support ;
VkQueue m_submit_queue ;
2018-10-20 16:43:00 +02:00
vk : : data_heap * m_texture_upload_heap ;
2017-07-24 19:50:32 +02:00
2017-08-04 23:11:14 +02:00
//Stuff that has been dereferenced goes into these
2019-07-01 13:09:44 +02:00
std : : list < temporary_storage > m_temporary_storage ;
std : : atomic < u32 > m_temporary_memory_size = { 0 } ;
2017-11-03 12:16:55 +01:00
2018-09-22 02:14:26 +02:00
void clear ( )
2016-08-24 02:50:07 +02:00
{
2018-09-22 02:14:26 +02:00
baseclass : : clear ( ) ;
2016-08-24 02:50:07 +02:00
2019-07-01 13:09:44 +02:00
m_temporary_storage . clear ( ) ;
m_temporary_memory_size = 0 ;
2016-08-24 02:50:07 +02:00
}
2017-12-07 10:09:07 +01:00
2019-10-08 17:07:36 +02:00
VkComponentMapping apply_component_mapping_flags ( u32 gcm_format , rsx : : texture_create_flags flags , const rsx : : texture_channel_remap_t & remap_vector ) const
2017-12-07 13:08:11 +01:00
{
2018-04-22 21:08:53 +02:00
switch ( gcm_format )
{
case CELL_GCM_TEXTURE_DEPTH24_D8 :
case CELL_GCM_TEXTURE_DEPTH24_D8_FLOAT :
case CELL_GCM_TEXTURE_DEPTH16 :
case CELL_GCM_TEXTURE_DEPTH16_FLOAT :
//Dont bother letting this propagate
return { VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_R } ;
default :
break ;
}
2017-12-07 13:08:11 +01:00
VkComponentMapping mapping = { } ;
switch ( flags )
{
case rsx : : texture_create_flags : : default_component_order :
{
2018-03-18 12:40:26 +01:00
mapping = vk : : apply_swizzle_remap ( vk : : get_component_mapping ( gcm_format ) , remap_vector ) ;
2017-12-07 13:08:11 +01:00
break ;
}
case rsx : : texture_create_flags : : native_component_order :
{
mapping = { VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_G , VK_COMPONENT_SWIZZLE_B , VK_COMPONENT_SWIZZLE_A } ;
break ;
}
case rsx : : texture_create_flags : : swapped_native_component_order :
{
2017-12-07 18:46:49 +01:00
mapping = { VK_COMPONENT_SWIZZLE_A , VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_G , VK_COMPONENT_SWIZZLE_B } ;
2017-12-07 13:08:11 +01:00
break ;
}
default :
break ;
}
return mapping ;
}
2018-08-25 00:47:36 +02:00
2019-02-25 16:03:14 +01:00
void copy_transfer_regions_impl ( vk : : command_buffer & cmd , vk : : image * dst , const std : : vector < copy_region_descriptor > & sections_to_transfer ) const
2019-02-02 20:44:18 +01:00
{
2019-03-29 20:04:54 +01:00
const auto dst_aspect = dst - > aspect ( ) ;
const auto dst_bpp = vk : : get_format_texel_width ( dst - > format ( ) ) ;
2019-02-02 20:44:18 +01:00
for ( const auto & section : sections_to_transfer )
{
if ( ! section . src )
continue ;
2019-04-05 13:39:43 +02:00
const bool typeless = section . src - > aspect ( ) ! = dst_aspect | |
! formats_are_bitcast_compatible ( dst - > format ( ) , section . src - > format ( ) ) ;
2019-03-29 20:04:54 +01:00
2019-10-17 22:16:08 +02:00
// Avoid inserting unnecessary barrier GENERAL->TRANSFER_SRC->GENERAL in active render targets
const auto preferred_layout = ( section . src - > current_layout ! = VK_IMAGE_LAYOUT_GENERAL ) ?
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL ;
section . src - > push_layout ( cmd , preferred_layout ) ;
2019-02-02 20:44:18 +01:00
2019-03-29 20:04:54 +01:00
auto src_image = section . src ;
2019-05-20 16:14:02 +02:00
auto src_x = section . src_x ;
auto src_y = section . src_y ;
auto src_w = section . src_w ;
auto src_h = section . src_h ;
2019-10-02 18:27:48 +02:00
rsx : : flags32_t transform = section . xform ;
2019-10-08 17:07:36 +02:00
if ( section . xform = = rsx : : surface_transform : : coordinate_transform )
2019-10-02 18:27:48 +02:00
{
// Dimensions were given in 'dst' space. Work out the real source coordinates
const auto src_bpp = vk : : get_format_texel_width ( section . src - > format ( ) ) ;
src_x = ( src_x * dst_bpp ) / src_bpp ;
src_w = ( src_w * dst_bpp ) / src_bpp ;
2019-10-08 17:07:36 +02:00
transform & = ~ ( rsx : : surface_transform : : coordinate_transform ) ;
2019-10-02 18:27:48 +02:00
}
2019-05-20 16:14:02 +02:00
if ( auto surface = dynamic_cast < vk : : render_target * > ( section . src ) )
{
2019-06-11 17:30:20 +02:00
surface - > transform_samples_to_pixels ( src_x , src_w , src_y , src_h ) ;
2019-05-20 16:14:02 +02:00
}
2020-02-05 08:00:08 +01:00
if ( typeless ) [ [ unlikely ] ]
2019-02-02 20:44:18 +01:00
{
2019-10-02 18:27:48 +02:00
const auto src_bpp = vk : : get_format_texel_width ( section . src - > format ( ) ) ;
const u16 convert_w = u16 ( src_w * src_bpp ) / dst_bpp ;
const u16 convert_x = u16 ( src_x * src_bpp ) / dst_bpp ;
2020-01-16 15:48:55 +01:00
if ( convert_w = = section . dst_w & & src_h = = section . dst_h & &
transform = = rsx : : surface_transform : : identity & &
section . level = = 0 & & section . dst_z = = 0 )
{
// Optimization to avoid double transfer
// TODO: Handle level and layer offsets
const areai src_rect = coordi { { src_x , src_y } , { src_w , src_h } } ;
const areai dst_rect = coordi { { section . dst_x , section . dst_y } , { section . dst_w , section . dst_h } } ;
vk : : copy_image_typeless ( cmd , section . src , dst , src_rect , dst_rect , 1 , section . src - > aspect ( ) , dst_aspect ) ;
section . src - > pop_layout ( cmd ) ;
continue ;
}
2019-10-02 18:27:48 +02:00
src_image = vk : : get_typeless_helper ( dst - > info . format , convert_x + convert_w , src_y + src_h ) ;
2019-03-29 20:04:54 +01:00
src_image - > change_layout ( cmd , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ) ;
2019-10-02 18:27:48 +02:00
const areai src_rect = coordi { { src_x , src_y } , { src_w , src_h } } ;
const areai dst_rect = coordi { { convert_x , src_y } , { convert_w , src_h } } ;
2019-03-29 20:04:54 +01:00
vk : : copy_image_typeless ( cmd , section . src , src_image , src_rect , dst_rect , 1 , section . src - > aspect ( ) , dst_aspect ) ;
src_image - > change_layout ( cmd , VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ) ;
2019-10-02 18:27:48 +02:00
src_x = convert_x ;
src_w = convert_w ;
2019-03-29 20:04:54 +01:00
}
2019-02-02 20:44:18 +01:00
2019-10-17 22:16:08 +02:00
verify ( HERE ) , src_image - > current_layout = = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL | | src_image - > current_layout = = VK_IMAGE_LAYOUT_GENERAL ;
2019-03-29 20:04:54 +01:00
// Final aspect mask of the 'final' transfer source
const auto new_src_aspect = src_image - > aspect ( ) ;
2020-02-05 08:00:08 +01:00
if ( src_w = = section . dst_w & & src_h = = section . dst_h & & transform = = rsx : : surface_transform : : identity ) [ [ likely ] ]
2019-03-29 20:04:54 +01:00
{
VkImageCopy copy_rgn ;
2019-05-20 16:14:02 +02:00
copy_rgn . srcOffset = { src_x , src_y , 0 } ;
2019-02-02 20:44:18 +01:00
copy_rgn . dstOffset = { section . dst_x , section . dst_y , 0 } ;
2019-03-29 20:04:54 +01:00
copy_rgn . dstSubresource = { dst_aspect , 0 , 0 , 1 } ;
copy_rgn . srcSubresource = { new_src_aspect , 0 , 0 , 1 } ;
2019-05-20 16:14:02 +02:00
copy_rgn . extent = { src_w , src_h , 1 } ;
2019-02-02 20:44:18 +01:00
2019-02-25 16:03:14 +01:00
if ( dst - > info . imageType = = VK_IMAGE_TYPE_3D )
{
copy_rgn . dstOffset . z = section . dst_z ;
}
else
{
copy_rgn . dstSubresource . baseArrayLayer = section . dst_z ;
2019-10-09 21:29:23 +02:00
copy_rgn . dstSubresource . mipLevel = section . level ;
2019-02-25 16:03:14 +01:00
}
2019-03-29 20:04:54 +01:00
vkCmdCopyImage ( cmd , src_image - > value , src_image - > current_layout , dst - > value , dst - > current_layout , 1 , & copy_rgn ) ;
2019-02-02 20:44:18 +01:00
}
else
{
verify ( HERE ) , section . dst_z = = 0 ;
2019-02-27 19:28:49 +01:00
2019-03-16 11:58:15 +01:00
u16 dst_x = section . dst_x , dst_y = section . dst_y ;
2019-02-27 19:28:49 +01:00
vk : : image * _dst ;
2020-02-05 08:00:08 +01:00
if ( src_image - > info . format = = dst - > info . format & & section . level = = 0 ) [ [ likely ] ]
2019-02-27 19:28:49 +01:00
{
_dst = dst ;
}
else
{
2019-10-09 21:29:23 +02:00
// Either a bitcast is required or a scale+copy to mipmap level
2019-03-29 20:04:54 +01:00
_dst = vk : : get_typeless_helper ( src_image - > info . format , dst - > width ( ) , dst - > height ( ) * 2 ) ;
_dst - > change_layout ( cmd , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ) ;
2019-02-27 19:28:49 +01:00
}
2019-10-08 17:07:36 +02:00
if ( transform = = rsx : : surface_transform : : identity )
2019-02-02 20:44:18 +01:00
{
2019-03-29 20:04:54 +01:00
vk : : copy_scaled_image ( cmd , src_image - > value , _dst - > value , section . src - > current_layout , _dst - > current_layout ,
2019-05-20 16:14:02 +02:00
coordi { { src_x , src_y } , { src_w , src_h } } ,
2019-03-16 11:58:15 +01:00
coordi { { section . dst_x , section . dst_y } , { section . dst_w , section . dst_h } } ,
2019-03-29 20:04:54 +01:00
1 , src_image - > aspect ( ) , src_image - > info . format = = _dst - > info . format ,
VK_FILTER_NEAREST , src_image - > info . format , _dst - > info . format ) ;
2019-02-02 20:44:18 +01:00
}
2019-10-08 17:07:36 +02:00
else if ( transform = = rsx : : surface_transform : : argb_to_bgra )
2019-02-02 20:44:18 +01:00
{
VkBufferImageCopy copy { } ;
2019-05-20 16:14:02 +02:00
copy . imageExtent = { src_w , src_h , 1 } ;
copy . imageOffset = { src_x , src_y , 0 } ;
2019-03-29 20:04:54 +01:00
copy . imageSubresource = { src_image - > aspect ( ) , 0 , 0 , 1 } ;
2019-02-02 20:44:18 +01:00
2020-01-14 14:21:44 +01:00
const auto mem_length = src_w * src_h * dst_bpp ;
auto scratch_buf = vk : : get_scratch_buffer ( mem_length ) ;
2019-10-17 22:16:08 +02:00
vkCmdCopyImageToBuffer ( cmd , src_image - > value , src_image - > current_layout , scratch_buf - > value , 1 , & copy ) ;
2019-02-02 20:44:18 +01:00
2019-03-29 20:04:54 +01:00
vk : : insert_buffer_memory_barrier ( cmd , scratch_buf - > value , 0 , mem_length , VK_PIPELINE_STAGE_TRANSFER_BIT , VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ,
2019-02-02 20:44:18 +01:00
VK_ACCESS_TRANSFER_WRITE_BIT , VK_ACCESS_SHADER_READ_BIT ) ;
auto shuffle_kernel = vk : : get_compute_task < vk : : cs_shuffle_32 > ( ) ;
2019-03-29 20:04:54 +01:00
shuffle_kernel - > run ( cmd , scratch_buf , mem_length ) ;
2019-02-02 20:44:18 +01:00
2019-03-29 20:04:54 +01:00
vk : : insert_buffer_memory_barrier ( cmd , scratch_buf - > value , 0 , mem_length , VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT , VK_PIPELINE_STAGE_TRANSFER_BIT ,
2019-02-02 20:44:18 +01:00
VK_ACCESS_SHADER_WRITE_BIT , VK_ACCESS_TRANSFER_READ_BIT ) ;
2019-03-29 20:04:54 +01:00
auto tmp = vk : : get_typeless_helper ( src_image - > info . format , section . dst_x + section . dst_w , section . dst_y + section . dst_h ) ;
tmp - > change_layout ( cmd , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ) ;
2019-02-02 20:44:18 +01:00
2019-03-29 20:04:54 +01:00
copy . imageOffset = { 0 , 0 , 0 } ;
2019-02-02 20:44:18 +01:00
vkCmdCopyBufferToImage ( cmd , scratch_buf - > value , tmp - > value , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , 1 , & copy ) ;
2019-03-29 20:04:54 +01:00
dst_x = 0 ;
dst_y = 0 ;
2019-05-20 16:14:02 +02:00
if ( src_w ! = section . dst_w | | src_h ! = section . dst_h )
2019-02-27 19:28:49 +01:00
{
2019-03-29 20:04:54 +01:00
// Optionally scale if needed
2020-02-05 08:00:08 +01:00
if ( tmp = = _dst ) [ [ unlikely ] ]
2019-03-29 20:04:54 +01:00
{
2019-05-20 16:14:02 +02:00
dst_y = src_h ;
2019-03-29 20:04:54 +01:00
}
vk : : copy_scaled_image ( cmd , tmp - > value , _dst - > value , tmp - > current_layout , _dst - > current_layout ,
2019-12-03 23:34:23 +01:00
areai { 0 , 0 , src_w , static_cast < s32 > ( src_h ) } ,
2019-03-29 20:04:54 +01:00
coordi { { dst_x , dst_y } , { section . dst_w , section . dst_h } } ,
1 , new_src_aspect , tmp - > info . format = = _dst - > info . format ,
VK_FILTER_NEAREST , tmp - > info . format , _dst - > info . format ) ;
2019-02-27 19:28:49 +01:00
}
2019-09-07 17:32:15 +02:00
else
{
_dst = tmp ;
}
2019-02-02 20:44:18 +01:00
}
else
{
fmt : : throw_exception ( " Unreachable " HERE ) ;
}
2019-02-27 19:28:49 +01:00
2020-02-05 08:00:08 +01:00
if ( _dst ! = dst ) [ [ unlikely ] ]
2019-02-27 19:28:49 +01:00
{
// Casting comes after the scaling!
VkImageCopy copy_rgn ;
copy_rgn . srcOffset = { s32 ( dst_x ) , s32 ( dst_y ) , 0 } ;
copy_rgn . dstOffset = { section . dst_x , section . dst_y , 0 } ;
2019-10-09 21:29:23 +02:00
copy_rgn . dstSubresource = { dst_aspect , section . level , 0 , 1 } ;
2019-03-29 20:04:54 +01:00
copy_rgn . srcSubresource = { _dst - > aspect ( ) , 0 , 0 , 1 } ;
2019-02-27 19:28:49 +01:00
copy_rgn . extent = { section . dst_w , section . dst_h , 1 } ;
2019-03-29 20:04:54 +01:00
_dst - > change_layout ( cmd , VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ) ;
2019-02-27 19:28:49 +01:00
vkCmdCopyImage ( cmd , _dst - > value , _dst - > current_layout , dst - > value , dst - > current_layout , 1 , & copy_rgn ) ;
}
2019-02-02 20:44:18 +01:00
}
2019-03-29 20:04:54 +01:00
section . src - > pop_layout ( cmd ) ;
2019-02-02 20:44:18 +01:00
}
}
2019-02-25 16:03:14 +01:00
vk : : image * get_template_from_collection_impl ( const std : : vector < copy_region_descriptor > & sections_to_transfer ) const
{
2020-02-05 08:00:08 +01:00
if ( sections_to_transfer . size ( ) = = 1 ) [ [ likely ] ]
2019-02-26 11:31:32 +01:00
{
return sections_to_transfer . front ( ) . src ;
}
2019-02-25 16:03:14 +01:00
vk : : image * result = nullptr ;
for ( const auto & section : sections_to_transfer )
{
if ( ! section . src )
continue ;
if ( ! result )
{
result = section . src ;
}
else
{
if ( section . src - > native_component_map . a ! = result - > native_component_map . a | |
section . src - > native_component_map . r ! = result - > native_component_map . r | |
section . src - > native_component_map . g ! = result - > native_component_map . g | |
section . src - > native_component_map . b ! = result - > native_component_map . b )
{
// TODO
// This requires a far more complex setup as its not always possible to mix and match without compute assistance
return nullptr ;
}
}
}
return result ;
}
2019-10-09 21:29:23 +02:00
std : : unique_ptr < vk : : viewable_image > find_temporary_image ( VkFormat format , u16 w , u16 h , u16 d , u8 mipmaps )
2019-07-01 13:09:44 +02:00
{
const auto current_frame = vk : : get_current_frame_id ( ) ;
for ( auto & e : m_temporary_storage )
{
2019-10-17 20:35:04 +02:00
if ( e . can_reuse & & e . matches ( format , w , h , d , mipmaps , 0 ) )
2019-07-01 13:09:44 +02:00
{
m_temporary_memory_size - = e . block_size ;
e . block_size = 0 ;
return std : : move ( e . combined_image ) ;
}
}
return { } ;
}
std : : unique_ptr < vk : : viewable_image > find_temporary_cubemap ( VkFormat format , u16 size )
{
const auto current_frame = vk : : get_current_frame_id ( ) ;
for ( auto & e : m_temporary_storage )
{
2019-10-17 20:35:04 +02:00
if ( e . can_reuse & & e . matches ( format , size , size , 1 , 1 , VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT ) )
2019-07-01 13:09:44 +02:00
{
m_temporary_memory_size - = e . block_size ;
e . block_size = 0 ;
return std : : move ( e . combined_image ) ;
}
}
return { } ;
}
2017-09-08 16:52:13 +02:00
protected :
2018-03-18 12:40:26 +01:00
vk : : image_view * create_temporary_subresource_view_impl ( vk : : command_buffer & cmd , vk : : image * source , VkImageType image_type , VkImageViewType view_type ,
2019-10-08 17:07:36 +02:00
u32 gcm_format , u16 x , u16 y , u16 w , u16 h , const rsx : : texture_channel_remap_t & remap_vector , bool copy )
2017-06-14 00:38:39 +02:00
{
2019-07-01 13:09:44 +02:00
std : : unique_ptr < vk : : viewable_image > image ;
2018-04-13 22:59:29 +02:00
2019-02-25 16:03:14 +01:00
VkImageCreateFlags image_flags = ( view_type = = VK_IMAGE_VIEW_TYPE_CUBE ) ? VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT : 0 ;
2018-04-22 13:22:40 +02:00
VkFormat dst_format = vk : : get_compatible_sampler_format ( m_formats_support , gcm_format ) ;
2017-12-01 14:07:13 +01:00
2020-02-05 08:00:08 +01:00
if ( ! image_flags ) [ [ likely ] ]
2019-07-01 13:09:44 +02:00
{
2019-10-09 21:29:23 +02:00
image = find_temporary_image ( dst_format , w , h , 1 , 1 ) ;
2019-07-01 13:09:44 +02:00
}
else
{
image = find_temporary_cubemap ( dst_format , w ) ;
}
if ( ! image )
{
image = std : : make_unique < vk : : viewable_image > ( * vk : : get_current_renderer ( ) , m_memory_types . device_local , VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT ,
image_type ,
dst_format ,
w , h , 1 , 1 , 1 , VK_SAMPLE_COUNT_1_BIT , VK_IMAGE_LAYOUT_UNDEFINED ,
VK_IMAGE_TILING_OPTIMAL , VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT , image_flags ) ;
}
2017-06-14 00:38:39 +02:00
2018-03-18 12:40:26 +01:00
//This method is almost exclusively used to work on framebuffer resources
//Keep the original swizzle layout unless there is data format conversion
2018-04-13 22:59:29 +02:00
VkComponentMapping view_swizzle ;
if ( ! source | | dst_format ! = source - > info . format )
2018-02-23 20:49:59 +01:00
{
2019-03-30 12:33:14 +01:00
// This is a data cast operation
// Use native mapping for the new type
// TODO: Also simulate the readback+reupload step (very tricky)
2018-02-23 20:49:59 +01:00
const auto remap = get_component_mapping ( gcm_format ) ;
view_swizzle = { remap [ 1 ] , remap [ 2 ] , remap [ 3 ] , remap [ 0 ] } ;
}
2018-04-13 22:59:29 +02:00
else
{
view_swizzle = source - > native_component_map ;
}
2018-02-23 20:49:59 +01:00
2019-07-01 13:09:44 +02:00
image - > set_native_component_layout ( view_swizzle ) ;
auto view = image - > get_view ( get_remap_encoding ( remap_vector ) , remap_vector ) ;
2017-06-14 00:38:39 +02:00
2018-02-21 11:46:23 +01:00
if ( copy )
{
2019-03-29 20:04:54 +01:00
std : : vector < copy_region_descriptor > region =
{ {
source ,
2019-10-08 17:07:36 +02:00
rsx : : surface_transform : : coordinate_transform ,
2019-10-09 21:29:23 +02:00
0 ,
2019-03-29 20:04:54 +01:00
x , y , 0 , 0 , 0 ,
w , h , w , h
} } ;
vk : : change_image_layout ( cmd , image . get ( ) , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ) ;
copy_transfer_regions_impl ( cmd , image . get ( ) , region ) ;
vk : : change_image_layout ( cmd , image . get ( ) , VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ) ;
2018-02-21 11:46:23 +01:00
}
2017-06-14 00:38:39 +02:00
2017-10-30 13:27:22 +01:00
const u32 resource_memory = w * h * 4 ; //Rough approximate
2019-07-01 13:09:44 +02:00
m_temporary_storage . emplace_back ( image ) ;
m_temporary_storage . back ( ) . block_size = resource_memory ;
m_temporary_memory_size + = resource_memory ;
2017-10-30 13:27:22 +01:00
2019-07-01 13:09:44 +02:00
return view ;
2017-06-14 00:38:39 +02:00
}
2018-03-18 12:40:26 +01:00
vk : : image_view * create_temporary_subresource_view ( vk : : command_buffer & cmd , vk : : image * source , u32 gcm_format ,
2019-10-08 17:07:36 +02:00
u16 x , u16 y , u16 w , u16 h , const rsx : : texture_channel_remap_t & remap_vector ) override
2017-11-02 16:54:57 +01:00
{
2018-03-18 12:40:26 +01:00
return create_temporary_subresource_view_impl ( cmd , source , source - > info . imageType , VK_IMAGE_VIEW_TYPE_2D ,
gcm_format , x , y , w , h , remap_vector , true ) ;
2017-11-02 16:54:57 +01:00
}
2018-03-18 12:40:26 +01:00
vk : : image_view * create_temporary_subresource_view ( vk : : command_buffer & cmd , vk : : image * * source , u32 gcm_format ,
2019-10-08 17:07:36 +02:00
u16 x , u16 y , u16 w , u16 h , const rsx : : texture_channel_remap_t & remap_vector ) override
2017-08-27 15:22:59 +02:00
{
2018-03-18 12:40:26 +01:00
return create_temporary_subresource_view ( cmd , * source , gcm_format , x , y , w , h , remap_vector ) ;
2017-08-27 15:22:59 +02:00
}
2018-03-18 12:40:26 +01:00
vk : : image_view * generate_cubemap_from_images ( vk : : command_buffer & cmd , u32 gcm_format , u16 size ,
2019-10-08 17:07:36 +02:00
const std : : vector < copy_region_descriptor > & sections_to_copy , const rsx : : texture_channel_remap_t & /*remap_vector*/ ) override
2017-11-02 16:54:57 +01:00
{
2019-07-01 13:09:44 +02:00
std : : unique_ptr < vk : : viewable_image > image ;
2018-04-22 13:22:40 +02:00
VkFormat dst_format = vk : : get_compatible_sampler_format ( m_formats_support , gcm_format ) ;
2018-04-13 22:59:29 +02:00
VkImageAspectFlags dst_aspect = vk : : get_aspect_flags ( dst_format ) ;
2018-04-03 18:43:48 +02:00
2019-07-01 13:09:44 +02:00
if ( image = find_temporary_cubemap ( dst_format , size ) ; ! image )
{
image = std : : make_unique < vk : : viewable_image > ( * vk : : get_current_renderer ( ) , m_memory_types . device_local , VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT ,
VK_IMAGE_TYPE_2D ,
dst_format ,
size , size , 1 , 1 , 6 , VK_SAMPLE_COUNT_1_BIT , VK_IMAGE_LAYOUT_UNDEFINED ,
VK_IMAGE_TILING_OPTIMAL , VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT , VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT ) ;
}
2019-07-08 19:48:31 +02:00
else if ( auto src = sections_to_copy [ 0 ] . src ; src & & src - > format ( ) = = dst_format )
{
image - > set_native_component_layout ( src - > native_component_map ) ;
}
else
{
image - > set_native_component_layout ( { VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_G , VK_COMPONENT_SWIZZLE_B , VK_COMPONENT_SWIZZLE_A } ) ;
}
2017-11-02 19:54:19 +01:00
2019-07-01 13:09:44 +02:00
auto view = image - > get_view ( 0xAAE4 , rsx : : default_remap_vector ) ;
2017-11-02 19:54:19 +01:00
2018-04-03 18:43:48 +02:00
VkImageSubresourceRange dst_range = { dst_aspect , 0 , 1 , 0 , 6 } ;
vk : : change_image_layout ( cmd , image . get ( ) , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , dst_range ) ;
if ( ! ( dst_aspect & VK_IMAGE_ASPECT_DEPTH_BIT ) )
2018-03-30 12:28:46 +02:00
{
VkClearColorValue clear = { } ;
2018-04-03 18:43:48 +02:00
vkCmdClearColorImage ( cmd , image - > value , image - > current_layout , & clear , 1 , & dst_range ) ;
2018-03-30 12:28:46 +02:00
}
else
{
VkClearDepthStencilValue clear = { 1.f , 0 } ;
2018-04-03 18:43:48 +02:00
vkCmdClearDepthStencilImage ( cmd , image - > value , image - > current_layout , & clear , 1 , & dst_range ) ;
2018-03-30 12:28:46 +02:00
}
2019-02-02 20:44:18 +01:00
copy_transfer_regions_impl ( cmd , image . get ( ) , sections_to_copy ) ;
2017-11-02 19:54:19 +01:00
2018-04-03 18:43:48 +02:00
vk : : change_image_layout ( cmd , image . get ( ) , VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL , dst_range ) ;
2017-11-02 19:54:19 +01:00
const u32 resource_memory = size * size * 6 * 4 ; //Rough approximate
2019-07-01 13:09:44 +02:00
m_temporary_storage . emplace_back ( image ) ;
m_temporary_storage . back ( ) . block_size = resource_memory ;
m_temporary_memory_size + = resource_memory ;
2018-03-30 12:28:46 +02:00
2019-07-01 13:09:44 +02:00
return view ;
2018-03-30 12:28:46 +02:00
}
vk : : image_view * generate_3d_from_2d_images ( vk : : command_buffer & cmd , u32 gcm_format , u16 width , u16 height , u16 depth ,
2019-10-08 17:07:36 +02:00
const std : : vector < copy_region_descriptor > & sections_to_copy , const rsx : : texture_channel_remap_t & /*remap_vector*/ ) override
2018-03-30 12:28:46 +02:00
{
2019-07-01 13:09:44 +02:00
std : : unique_ptr < vk : : viewable_image > image ;
2018-04-22 13:22:40 +02:00
VkFormat dst_format = vk : : get_compatible_sampler_format ( m_formats_support , gcm_format ) ;
2018-04-13 22:59:29 +02:00
VkImageAspectFlags dst_aspect = vk : : get_aspect_flags ( dst_format ) ;
2018-04-03 18:43:48 +02:00
2019-10-09 21:29:23 +02:00
if ( image = find_temporary_image ( dst_format , width , height , depth , 1 ) ; ! image )
2019-07-01 13:09:44 +02:00
{
image = std : : make_unique < vk : : viewable_image > ( * vk : : get_current_renderer ( ) , m_memory_types . device_local , VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT ,
VK_IMAGE_TYPE_3D ,
dst_format ,
width , height , depth , 1 , 1 , VK_SAMPLE_COUNT_1_BIT , VK_IMAGE_LAYOUT_UNDEFINED ,
VK_IMAGE_TILING_OPTIMAL , VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT , 0 ) ;
}
2019-07-08 19:48:31 +02:00
else if ( auto src = sections_to_copy [ 0 ] . src ; src & & src - > format ( ) = = dst_format )
{
image - > set_native_component_layout ( src - > native_component_map ) ;
}
else
{
image - > set_native_component_layout ( { VK_COMPONENT_SWIZZLE_R , VK_COMPONENT_SWIZZLE_G , VK_COMPONENT_SWIZZLE_B , VK_COMPONENT_SWIZZLE_A } ) ;
}
2018-03-30 12:28:46 +02:00
2019-07-01 13:09:44 +02:00
auto view = image - > get_view ( 0xAAE4 , rsx : : default_remap_vector ) ;
2018-03-30 12:28:46 +02:00
2018-04-03 18:43:48 +02:00
VkImageSubresourceRange dst_range = { dst_aspect , 0 , 1 , 0 , 1 } ;
vk : : change_image_layout ( cmd , image . get ( ) , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , dst_range ) ;
if ( ! ( dst_aspect & VK_IMAGE_ASPECT_DEPTH_BIT ) )
2018-03-30 12:28:46 +02:00
{
VkClearColorValue clear = { } ;
2018-04-03 18:43:48 +02:00
vkCmdClearColorImage ( cmd , image - > value , image - > current_layout , & clear , 1 , & dst_range ) ;
2018-03-30 12:28:46 +02:00
}
else
{
VkClearDepthStencilValue clear = { 1.f , 0 } ;
2018-04-03 18:43:48 +02:00
vkCmdClearDepthStencilImage ( cmd , image - > value , image - > current_layout , & clear , 1 , & dst_range ) ;
2018-03-30 12:28:46 +02:00
}
2019-02-02 20:44:18 +01:00
copy_transfer_regions_impl ( cmd , image . get ( ) , sections_to_copy ) ;
2018-03-30 12:28:46 +02:00
2018-04-03 18:43:48 +02:00
vk : : change_image_layout ( cmd , image . get ( ) , VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL , dst_range ) ;
2018-03-30 12:28:46 +02:00
const u32 resource_memory = width * height * depth * 4 ; //Rough approximate
2019-07-01 13:09:44 +02:00
m_temporary_storage . emplace_back ( image ) ;
m_temporary_storage . back ( ) . block_size = resource_memory ;
m_temporary_memory_size + = resource_memory ;
2017-11-02 19:54:19 +01:00
2019-07-01 13:09:44 +02:00
return view ;
2017-11-02 16:54:57 +01:00
}
2018-03-18 12:40:26 +01:00
vk : : image_view * generate_atlas_from_images ( vk : : command_buffer & cmd , u32 gcm_format , u16 width , u16 height ,
2019-10-08 17:07:36 +02:00
const std : : vector < copy_region_descriptor > & sections_to_copy , const rsx : : texture_channel_remap_t & remap_vector ) override
2018-02-21 11:46:23 +01:00
{
2019-02-26 11:31:32 +01:00
auto _template = get_template_from_collection_impl ( sections_to_copy ) ;
auto result = create_temporary_subresource_view_impl ( cmd , _template , VK_IMAGE_TYPE_2D ,
2018-03-24 12:13:11 +01:00
VK_IMAGE_VIEW_TYPE_2D , gcm_format , 0 , 0 , width , height , remap_vector , false ) ;
2018-02-21 11:46:23 +01:00
2019-02-25 16:03:14 +01:00
const auto image = result - > image ( ) ;
2018-04-16 00:59:45 +02:00
VkImageAspectFlags dst_aspect = vk : : get_aspect_flags ( result - > info . format ) ;
2018-04-13 22:59:29 +02:00
VkImageSubresourceRange dst_range = { dst_aspect , 0 , 1 , 0 , 1 } ;
2019-02-25 16:03:14 +01:00
vk : : change_image_layout ( cmd , image , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , dst_range ) ;
if ( ! ( dst_aspect & VK_IMAGE_ASPECT_DEPTH_BIT ) )
{
VkClearColorValue clear = { } ;
vkCmdClearColorImage ( cmd , image - > value , image - > current_layout , & clear , 1 , & dst_range ) ;
}
else
{
VkClearDepthStencilValue clear = { 1.f , 0 } ;
vkCmdClearDepthStencilImage ( cmd , image - > value , image - > current_layout , & clear , 1 , & dst_range ) ;
}
2018-02-21 11:46:23 +01:00
2019-02-25 16:03:14 +01:00
copy_transfer_regions_impl ( cmd , image , sections_to_copy ) ;
2018-02-21 11:46:23 +01:00
2019-02-25 16:03:14 +01:00
vk : : change_image_layout ( cmd , image , VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL , dst_range ) ;
2018-02-21 11:46:23 +01:00
return result ;
}
2019-12-26 21:01:48 +01:00
vk : : image_view * generate_2d_mipmaps_from_images ( vk : : command_buffer & cmd , u32 /*gcm_format*/ , u16 width , u16 height ,
2019-10-09 21:29:23 +02:00
const std : : vector < copy_region_descriptor > & sections_to_copy , const rsx : : texture_channel_remap_t & remap_vector ) override
{
const auto _template = sections_to_copy . front ( ) . src ;
2019-12-03 23:34:23 +01:00
const auto mipmaps = : : narrow < u8 > ( sections_to_copy . size ( ) ) ;
2019-10-09 21:29:23 +02:00
std : : unique_ptr < vk : : viewable_image > image ;
if ( image = find_temporary_image ( _template - > format ( ) , width , height , 1 , mipmaps ) ; ! image )
{
image = std : : make_unique < vk : : viewable_image > ( * vk : : get_current_renderer ( ) , m_memory_types . device_local , VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT ,
_template - > info . imageType ,
_template - > info . format ,
width , height , 1 , mipmaps , 1 , VK_SAMPLE_COUNT_1_BIT , VK_IMAGE_LAYOUT_UNDEFINED ,
VK_IMAGE_TILING_OPTIMAL , VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT , 0 ) ;
image - > set_native_component_layout ( _template - > native_component_map ) ;
}
auto view = image - > get_view ( get_remap_encoding ( remap_vector ) , remap_vector ) ;
VkImageSubresourceRange dst_range = { _template - > aspect ( ) , 0 , mipmaps , 0 , 1 } ;
vk : : change_image_layout ( cmd , image . get ( ) , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , dst_range ) ;
copy_transfer_regions_impl ( cmd , image . get ( ) , sections_to_copy ) ;
vk : : change_image_layout ( cmd , image . get ( ) , VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL , dst_range ) ;
const u32 resource_memory = width * height * 2 * 4 ; // Rough approximate
m_temporary_storage . emplace_back ( image ) ;
m_temporary_storage . back ( ) . block_size = resource_memory ;
m_temporary_memory_size + = resource_memory ;
return view ;
}
2019-10-17 20:35:04 +02:00
void release_temporary_subresource ( vk : : image_view * view ) override
{
auto handle = dynamic_cast < vk : : viewable_image * > ( view - > image ( ) ) ;
for ( auto & e : m_temporary_storage )
{
if ( e . combined_image . get ( ) = = handle )
{
e . can_reuse = true ;
return ;
}
}
}
2018-02-21 11:46:23 +01:00
void update_image_contents ( vk : : command_buffer & cmd , vk : : image_view * dst_view , vk : : image * src , u16 width , u16 height ) override
{
2019-04-05 13:39:43 +02:00
std : : vector < copy_region_descriptor > region =
2019-10-08 17:07:36 +02:00
{ {
2019-04-05 13:39:43 +02:00
src ,
2019-10-08 17:07:36 +02:00
rsx : : surface_transform : : identity ,
2019-10-09 21:29:23 +02:00
0 ,
2019-04-05 13:39:43 +02:00
0 , 0 , 0 , 0 , 0 ,
width , height , width , height
} } ;
auto dst = dst_view - > image ( ) ;
dst - > push_layout ( cmd , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ) ;
copy_transfer_regions_impl ( cmd , dst , region ) ;
dst - > pop_layout ( cmd ) ;
2018-02-21 11:46:23 +01:00
}
2018-12-18 18:04:03 +01:00
cached_texture_section * create_new_texture ( vk : : command_buffer & cmd , const utils : : address_range & rsx_range , u16 width , u16 height , u16 depth , u16 mipmaps , u16 pitch ,
u32 gcm_format , rsx : : texture_upload_context context , rsx : : texture_dimension_extended type , rsx : : texture_create_flags flags ) override
2016-02-21 16:50:49 +01:00
{
2017-09-22 15:12:10 +02:00
const u16 section_depth = depth ;
2017-09-08 16:52:13 +02:00
const bool is_cubemap = type = = rsx : : texture_dimension_extended : : texture_dimension_cubemap ;
VkFormat vk_format ;
VkImageAspectFlags aspect_flags ;
2016-03-31 18:44:42 +02:00
VkImageType image_type ;
VkImageViewType image_view_type ;
2018-02-03 14:42:02 +01:00
VkImageUsageFlags usage_flags = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT ;
2016-08-03 21:33:52 +02:00
u8 layer = 0 ;
2017-02-16 19:29:56 +01:00
2017-09-08 16:52:13 +02:00
switch ( type )
2016-03-31 18:44:42 +02:00
{
case rsx : : texture_dimension_extended : : texture_dimension_1d :
image_type = VK_IMAGE_TYPE_1D ;
image_view_type = VK_IMAGE_VIEW_TYPE_1D ;
height = 1 ;
depth = 1 ;
layer = 1 ;
break ;
case rsx : : texture_dimension_extended : : texture_dimension_2d :
image_type = VK_IMAGE_TYPE_2D ;
image_view_type = VK_IMAGE_VIEW_TYPE_2D ;
depth = 1 ;
layer = 1 ;
break ;
case rsx : : texture_dimension_extended : : texture_dimension_cubemap :
image_type = VK_IMAGE_TYPE_2D ;
image_view_type = VK_IMAGE_VIEW_TYPE_CUBE ;
depth = 1 ;
layer = 6 ;
break ;
case rsx : : texture_dimension_extended : : texture_dimension_3d :
image_type = VK_IMAGE_TYPE_3D ;
image_view_type = VK_IMAGE_VIEW_TYPE_3D ;
layer = 1 ;
break ;
2019-05-11 07:36:16 +02:00
default :
ASSUME ( 0 ) ;
break ;
2016-03-31 18:44:42 +02:00
}
2017-09-08 16:52:13 +02:00
switch ( gcm_format )
2017-02-16 19:29:56 +01:00
{
2017-09-08 16:52:13 +02:00
case CELL_GCM_TEXTURE_DEPTH24_D8 :
2018-04-13 22:59:29 +02:00
case CELL_GCM_TEXTURE_DEPTH24_D8_FLOAT :
2017-09-08 16:52:13 +02:00
aspect_flags = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT ;
2018-02-03 14:42:02 +01:00
usage_flags | = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ;
2017-09-08 16:52:13 +02:00
vk_format = m_formats_support . d24_unorm_s8 ? VK_FORMAT_D24_UNORM_S8_UINT : VK_FORMAT_D32_SFLOAT_S8_UINT ;
break ;
case CELL_GCM_TEXTURE_DEPTH16 :
2018-04-13 22:59:29 +02:00
case CELL_GCM_TEXTURE_DEPTH16_FLOAT :
2017-09-08 16:52:13 +02:00
aspect_flags = VK_IMAGE_ASPECT_DEPTH_BIT ;
2018-02-03 14:42:02 +01:00
usage_flags | = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT ;
2017-09-08 16:52:13 +02:00
vk_format = VK_FORMAT_D16_UNORM ;
break ;
default :
aspect_flags = VK_IMAGE_ASPECT_COLOR_BIT ;
2018-04-22 13:22:40 +02:00
vk_format = get_compatible_sampler_format ( m_formats_support , gcm_format ) ;
2017-09-08 16:52:13 +02:00
break ;
2016-08-03 21:33:52 +02:00
}
2018-07-17 18:42:51 +02:00
auto * image = new vk : : viewable_image ( * m_device , m_memory_types . device_local , VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT ,
2016-03-31 18:44:42 +02:00
image_type ,
2016-03-16 00:42:40 +01:00
vk_format ,
2017-09-08 16:52:13 +02:00
width , height , depth , mipmaps , layer , VK_SAMPLE_COUNT_1_BIT , VK_IMAGE_LAYOUT_UNDEFINED ,
2018-02-03 14:42:02 +01:00
VK_IMAGE_TILING_OPTIMAL , usage_flags , is_cubemap ? VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT : 0 ) ;
2016-03-16 00:42:40 +01:00
2018-07-17 18:42:51 +02:00
image - > native_component_map = apply_component_mapping_flags ( gcm_format , flags , rsx : : default_remap_vector ) ;
2016-02-21 16:50:49 +01:00
2017-09-08 16:52:13 +02:00
change_image_layout ( cmd , image , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , { aspect_flags , 0 , mipmaps , 0 , layer } ) ;
2017-04-23 16:17:05 +02:00
2019-09-09 18:14:57 +02:00
cached_texture_section & region = * find_cached_texture ( rsx_range , gcm_format , true , true , width , height , section_depth ) ;
2018-09-22 02:14:26 +02:00
ASSERT ( ! region . is_locked ( ) ) ;
// New section, we must prepare it
region . reset ( rsx_range ) ;
2017-09-18 19:22:34 +02:00
region . set_context ( context ) ;
2018-04-07 17:16:52 +02:00
region . set_gcm_format ( gcm_format ) ;
2017-11-02 16:54:57 +01:00
region . set_image_type ( type ) ;
2016-02-21 16:50:49 +01:00
2018-12-18 18:04:03 +01:00
region . create ( width , height , section_depth , mipmaps , image , pitch , true , gcm_format ) ;
2018-09-22 02:14:26 +02:00
region . set_dirty ( false ) ;
2019-09-04 21:19:58 +02:00
// Its not necessary to lock blit dst textures as they are just reused as necessary
switch ( context )
2017-10-28 21:17:27 +02:00
{
2019-09-04 21:19:58 +02:00
case rsx : : texture_upload_context : : shader_read :
case rsx : : texture_upload_context : : blit_engine_src :
2017-09-19 14:46:16 +02:00
region . protect ( utils : : protection : : ro ) ;
2018-09-22 02:14:26 +02:00
read_only_range = region . get_min_max ( read_only_range , rsx : : section_bounds : : locked_range ) ;
2019-09-04 21:19:58 +02:00
break ;
case rsx : : texture_upload_context : : blit_engine_dst :
region . set_unpack_swap_bytes ( true ) ;
no_access_range = region . get_min_max ( no_access_range , rsx : : section_bounds : : locked_range ) ;
break ;
case rsx : : texture_upload_context : : dma :
case rsx : : texture_upload_context : : framebuffer_storage :
// Should not initialized with this method
default :
fmt : : throw_exception ( " Unexpected upload context 0x%x " , u32 ( context ) ) ;
2018-02-03 09:37:42 +01:00
}
2019-09-04 21:19:58 +02:00
update_cache_tag ( ) ;
return & region ;
}
cached_texture_section * create_nul_section ( vk : : command_buffer & cmd , const utils : : address_range & rsx_range , bool memory_load ) override
{
auto & region = * find_cached_texture ( rsx_range , RSX_GCM_FORMAT_IGNORED , true , false ) ;
ASSERT ( ! region . is_locked ( ) ) ;
// Prepare section
region . reset ( rsx_range ) ;
region . set_context ( rsx : : texture_upload_context : : dma ) ;
region . set_dirty ( false ) ;
region . set_unpack_swap_bytes ( true ) ;
if ( memory_load )
2018-02-03 09:37:42 +01:00
{
2019-09-04 21:19:58 +02:00
vk : : map_dma ( cmd , rsx_range . start , rsx_range . length ( ) ) ;
vk : : load_dma ( rsx_range . start , rsx_range . length ( ) ) ;
2017-10-28 21:17:27 +02:00
}
2017-09-19 14:46:16 +02:00
2019-09-04 21:19:58 +02:00
no_access_range = region . get_min_max ( no_access_range , rsx : : section_bounds : : locked_range ) ;
2018-02-03 09:37:42 +01:00
update_cache_tag ( ) ;
2017-09-08 16:52:13 +02:00
return & region ;
2016-02-21 16:50:49 +01:00
}
2018-12-13 11:23:58 +01:00
cached_texture_section * upload_image_from_cpu ( vk : : command_buffer & cmd , const utils : : address_range & rsx_range , u16 width , u16 height , u16 depth , u16 mipmaps , u16 pitch , u32 gcm_format ,
2018-07-17 18:42:51 +02:00
rsx : : texture_upload_context context , const std : : vector < rsx_subresource_layout > & subresource_layout , rsx : : texture_dimension_extended type , bool swizzled ) override
2017-04-21 15:35:13 +02:00
{
2018-12-18 18:04:03 +01:00
auto section = create_new_texture ( cmd , rsx_range , width , height , depth , mipmaps , pitch , gcm_format , context , type ,
2018-07-17 18:42:51 +02:00
rsx : : texture_create_flags : : default_component_order ) ;
2017-07-24 19:50:32 +02:00
2017-09-08 16:52:13 +02:00
auto image = section - > get_raw_texture ( ) ;
auto subres_range = section - > get_raw_view ( ) - > info . subresourceRange ;
2017-04-21 21:55:05 +02:00
2017-09-21 17:40:47 +02:00
switch ( image - > info . format )
{
case VK_FORMAT_D32_SFLOAT_S8_UINT :
case VK_FORMAT_D24_UNORM_S8_UINT :
subres_range . aspectMask | = VK_IMAGE_ASPECT_STENCIL_BIT ;
break ;
2019-05-11 07:36:16 +02:00
default :
break ;
2017-09-21 17:40:47 +02:00
}
2017-09-08 16:52:13 +02:00
change_image_layout ( cmd , image , VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL , subres_range ) ;
2017-04-21 15:35:13 +02:00
2017-09-08 16:52:13 +02:00
vk : : enter_uninterruptible ( ) ;
2017-04-21 15:35:13 +02:00
2017-12-07 10:09:07 +01:00
bool input_swizzled = swizzled ;
if ( context = = rsx : : texture_upload_context : : blit_engine_src )
{
2019-08-19 21:40:24 +02:00
// Swizzling is ignored for blit engine copy and emulated using remapping
2017-12-07 10:09:07 +01:00
input_swizzled = false ;
}
2017-09-14 13:37:14 +02:00
2018-04-13 22:59:29 +02:00
vk : : copy_mipmaped_image_using_buffer ( cmd , image , subresource_layout , gcm_format , input_swizzled , mipmaps , subres_range . aspectMask ,
2018-01-17 17:14:00 +01:00
* m_texture_upload_heap ) ;
2017-09-08 16:52:13 +02:00
vk : : leave_uninterruptible ( ) ;
2020-01-24 17:53:03 +01:00
// Insert appropriate barrier depending on use
VkImageLayout preferred_layout ;
switch ( context )
{
default :
preferred_layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL ;
break ;
case rsx : : texture_upload_context : : blit_engine_dst :
preferred_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ;
break ;
case rsx : : texture_upload_context : : blit_engine_src :
preferred_layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ;
break ;
}
if ( preferred_layout ! = image - > current_layout )
{
change_image_layout ( cmd , image , preferred_layout , subres_range ) ;
}
else
{
// Insert ordering barrier
verify ( HERE ) , preferred_layout = = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ;
insert_image_memory_barrier ( cmd , image - > value , image - > current_layout , preferred_layout ,
VK_PIPELINE_STAGE_TRANSFER_BIT , VK_PIPELINE_STAGE_TRANSFER_BIT ,
VK_ACCESS_TRANSFER_WRITE_BIT , VK_ACCESS_TRANSFER_WRITE_BIT ,
subres_range ) ;
}
2017-07-23 16:45:20 +02:00
2019-02-25 16:03:14 +01:00
section - > last_write_tag = rsx : : get_shared_tag ( ) ;
2017-09-08 16:52:13 +02:00
return section ;
2017-04-21 15:35:13 +02:00
}
2017-12-18 10:02:19 +01:00
void enforce_surface_creation_type ( cached_texture_section & section , u32 gcm_format , rsx : : texture_create_flags expected_flags ) override
2017-04-21 21:55:05 +02:00
{
2017-12-07 13:08:11 +01:00
if ( expected_flags = = section . get_view_flags ( ) )
return ;
2018-07-17 18:42:51 +02:00
const VkComponentMapping mapping = apply_component_mapping_flags ( gcm_format , expected_flags , rsx : : default_remap_vector ) ;
2018-09-18 20:06:34 +02:00
auto image = static_cast < vk : : viewable_image * > ( section . get_raw_texture ( ) ) ;
verify ( HERE ) , image ! = nullptr ;
image - > set_native_component_layout ( mapping ) ;
2017-09-21 20:00:42 +02:00
section . set_view_flags ( expected_flags ) ;
2017-12-07 10:09:07 +01:00
}
2018-02-01 08:44:57 +01:00
void insert_texture_barrier ( vk : : command_buffer & cmd , vk : : image * tex ) override
{
2019-05-20 09:27:09 +02:00
vk : : insert_texture_barrier ( cmd , tex , VK_IMAGE_LAYOUT_GENERAL ) ;
2018-02-01 08:44:57 +01:00
}
2017-07-24 19:50:32 +02:00
2018-02-23 20:49:59 +01:00
bool render_target_format_is_compatible ( vk : : image * tex , u32 gcm_format ) override
{
auto vk_format = tex - > info . format ;
switch ( gcm_format )
{
default :
//TODO
2019-12-03 23:34:23 +01:00
warn_once ( " Format incompatibility detected, reporting failure to force data copy (VK_FORMAT=0x%X, GCM_FORMAT=0x%X) " , static_cast < u32 > ( vk_format ) , gcm_format ) ;
2018-02-23 20:49:59 +01:00
return false ;
case CELL_GCM_TEXTURE_W16_Z16_Y16_X16_FLOAT :
return ( vk_format = = VK_FORMAT_R16G16B16A16_SFLOAT ) ;
case CELL_GCM_TEXTURE_W32_Z32_Y32_X32_FLOAT :
return ( vk_format = = VK_FORMAT_R32G32B32A32_SFLOAT ) ;
case CELL_GCM_TEXTURE_X32_FLOAT :
return ( vk_format = = VK_FORMAT_R32_SFLOAT ) ;
case CELL_GCM_TEXTURE_R5G6B5 :
return ( vk_format = = VK_FORMAT_R5G6B5_UNORM_PACK16 ) ;
case CELL_GCM_TEXTURE_A8R8G8B8 :
return ( vk_format = = VK_FORMAT_B8G8R8A8_UNORM | | vk_format = = VK_FORMAT_D24_UNORM_S8_UINT | | vk_format = = VK_FORMAT_D32_SFLOAT_S8_UINT ) ;
2018-02-25 10:48:32 +01:00
case CELL_GCM_TEXTURE_B8 :
return ( vk_format = = VK_FORMAT_R8_UNORM ) ;
case CELL_GCM_TEXTURE_G8B8 :
return ( vk_format = = VK_FORMAT_R8G8_UNORM ) ;
case CELL_GCM_TEXTURE_DEPTH24_D8 :
case CELL_GCM_TEXTURE_DEPTH24_D8_FLOAT :
return ( vk_format = = VK_FORMAT_D24_UNORM_S8_UINT | | vk_format = = VK_FORMAT_D32_SFLOAT_S8_UINT ) ;
2019-02-25 16:03:14 +01:00
case CELL_GCM_TEXTURE_X16 :
2018-02-25 10:48:32 +01:00
case CELL_GCM_TEXTURE_DEPTH16 :
case CELL_GCM_TEXTURE_DEPTH16_FLOAT :
return ( vk_format = = VK_FORMAT_D16_UNORM ) ;
2018-02-23 20:49:59 +01:00
}
}
2019-03-16 10:14:11 +01:00
void prepare_for_dma_transfers ( vk : : command_buffer & cmd ) override
{
if ( ! cmd . is_recording ( ) )
{
cmd . begin ( ) ;
}
}
void cleanup_after_dma_transfers ( vk : : command_buffer & cmd ) override
{
2019-06-28 20:35:43 +02:00
bool occlusion_query_active = ! ! ( cmd . flags & vk : : command_buffer : : cb_has_open_query ) ;
if ( occlusion_query_active )
{
// We really stepped in it
vk : : do_query_cleanup ( cmd ) ;
}
2019-03-16 10:14:11 +01:00
// End recording
cmd . end ( ) ;
if ( cmd . access_hint ! = vk : : command_buffer : : access_type_hint : : all )
{
// Primary access command queue, must restart it after
2019-12-07 14:28:35 +01:00
vk : : fence submit_fence ( * m_device ) ;
2020-01-13 18:46:58 +01:00
cmd . submit ( m_submit_queue , VK_NULL_HANDLE , VK_NULL_HANDLE , & submit_fence , VK_PIPELINE_STAGE_ALL_COMMANDS_BIT , VK_TRUE ) ;
2019-03-16 10:14:11 +01:00
2019-12-07 14:28:35 +01:00
vk : : wait_for_fence ( & submit_fence , GENERAL_WAIT_TIMEOUT ) ;
2019-03-16 10:14:11 +01:00
CHECK_RESULT ( vkResetCommandBuffer ( cmd , 0 ) ) ;
cmd . begin ( ) ;
}
else
{
// Auxilliary command queue with auto-restart capability
2020-01-13 18:46:58 +01:00
cmd . submit ( m_submit_queue , VK_NULL_HANDLE , VK_NULL_HANDLE , VK_NULL_HANDLE , VK_PIPELINE_STAGE_ALL_COMMANDS_BIT , VK_TRUE ) ;
2019-03-16 10:14:11 +01:00
}
verify ( HERE ) , cmd . flags = = 0 ;
2019-06-28 20:35:43 +02:00
if ( occlusion_query_active )
{
verify ( HERE ) , cmd . is_recording ( ) ;
cmd . flags | = vk : : command_buffer : : cb_load_occluson_task ;
}
2019-03-16 10:14:11 +01:00
}
2017-09-08 16:52:13 +02:00
public :
2018-09-22 02:14:26 +02:00
using baseclass : : texture_cache ;
2017-04-21 15:35:13 +02:00
2018-10-20 16:43:00 +02:00
void initialize ( vk : : render_device & device , VkQueue submit_queue , vk : : data_heap & upload_heap )
2017-09-08 16:52:13 +02:00
{
m_device = & device ;
2018-04-22 13:22:40 +02:00
m_memory_types = device . get_memory_mapping ( ) ;
m_formats_support = device . get_formats_support ( ) ;
2017-09-08 16:52:13 +02:00
m_submit_queue = submit_queue ;
m_texture_upload_heap = & upload_heap ;
2017-04-21 15:35:13 +02:00
}
2017-09-08 16:52:13 +02:00
void destroy ( ) override
2016-02-21 16:50:49 +01:00
{
2018-09-22 02:14:26 +02:00
clear ( ) ;
2017-08-07 23:54:40 +02:00
}
2017-12-18 10:02:19 +01:00
bool is_depth_texture ( u32 rsx_address , u32 rsx_size ) override
2017-08-07 23:54:40 +02:00
{
2017-09-08 16:52:13 +02:00
reader_lock lock ( m_cache_mutex ) ;
2016-06-09 14:57:05 +02:00
2018-09-22 02:14:26 +02:00
auto & block = m_storage . block_for ( rsx_address ) ;
2017-07-24 19:50:32 +02:00
2018-09-22 02:14:26 +02:00
if ( block . get_locked_count ( ) = = 0 )
return false ;
2017-02-16 19:29:56 +01:00
2018-09-22 02:14:26 +02:00
for ( auto & tex : block )
2016-02-21 16:50:49 +01:00
{
2017-09-08 16:52:13 +02:00
if ( tex . is_dirty ( ) )
2017-07-24 19:50:32 +02:00
continue ;
2018-09-22 02:14:26 +02:00
if ( ! tex . overlaps ( rsx_address , rsx : : section_bounds : : full_range ) )
2017-09-19 14:46:16 +02:00
continue ;
if ( ( rsx_address + rsx_size - tex . get_section_base ( ) ) < = tex . get_section_size ( ) )
2017-07-24 19:50:32 +02:00
{
2017-09-19 14:46:16 +02:00
switch ( tex . get_format ( ) )
{
case VK_FORMAT_D16_UNORM :
case VK_FORMAT_D32_SFLOAT_S8_UINT :
case VK_FORMAT_D24_UNORM_S8_UINT :
return true ;
default :
return false ;
}
2017-07-24 19:50:32 +02:00
}
2016-02-21 16:50:49 +01:00
}
2017-09-08 16:52:13 +02:00
//Unreachable; silence compiler warning anyway
return false ;
2016-02-21 16:50:49 +01:00
}
2017-09-08 16:52:13 +02:00
void on_frame_end ( ) override
2016-02-21 16:50:49 +01:00
{
2018-09-22 02:14:26 +02:00
if ( m_storage . m_unreleased_texture_objects > = m_max_zombie_objects | |
2019-07-01 13:09:44 +02:00
m_temporary_memory_size > 0x4000000 ) //If already holding over 64M in discardable memory, be frugal with memory resources
2017-08-07 23:54:40 +02:00
{
2018-09-22 02:14:26 +02:00
purge_unreleased_sections ( ) ;
2017-08-07 23:54:40 +02:00
}
2017-09-22 19:29:12 +02:00
const u64 last_complete_frame = vk : : get_last_completed_frame_id ( ) ;
2019-07-01 13:09:44 +02:00
m_temporary_storage . remove_if ( [ & ] ( const temporary_storage & o )
2017-10-24 14:58:53 +02:00
{
2019-07-01 13:09:44 +02:00
if ( ! o . block_size | | o . test ( last_complete_frame ) )
2017-10-24 14:58:53 +02:00
{
2019-07-01 13:09:44 +02:00
m_temporary_memory_size - = o . block_size ;
2017-10-24 14:58:53 +02:00
return true ;
}
return false ;
} ) ;
2017-11-03 12:16:55 +01:00
m_temporary_subresource_cache . clear ( ) ;
2018-02-11 13:48:36 +01:00
reset_frame_statistics ( ) ;
2018-10-28 14:59:39 +01:00
baseclass : : on_frame_end ( ) ;
2016-02-21 16:50:49 +01:00
}
2017-07-23 15:34:17 +02:00
2020-02-29 16:59:59 +01:00
vk : : image * upload_image_simple ( vk : : command_buffer & cmd , u32 address , u32 width , u32 height , u32 pitch )
2018-03-09 18:55:19 +01:00
{
2018-09-06 21:39:45 +02:00
if ( ! m_formats_support . bgra8_linear )
{
return nullptr ;
}
// Uploads a linear memory range as a BGRA8 texture
2019-07-01 13:09:44 +02:00
auto image = std : : make_unique < vk : : viewable_image > ( * m_device , m_memory_types . host_visible_coherent ,
2018-03-09 18:55:19 +01:00
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT ,
VK_IMAGE_TYPE_2D ,
VK_FORMAT_B8G8R8A8_UNORM ,
2018-06-04 18:57:16 +02:00
width , height , 1 , 1 , 1 , VK_SAMPLE_COUNT_1_BIT , VK_IMAGE_LAYOUT_PREINITIALIZED ,
2020-03-04 19:59:51 +01:00
VK_IMAGE_TILING_LINEAR , VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT , 0 ) ;
2018-03-09 18:55:19 +01:00
VkImageSubresource subresource { } ;
subresource . aspectMask = VK_IMAGE_ASPECT_COLOR_BIT ;
VkSubresourceLayout layout { } ;
vkGetImageSubresourceLayout ( * m_device , image - > value , & subresource , & layout ) ;
2018-05-23 16:02:35 +02:00
void * mem = image - > memory - > map ( 0 , layout . rowPitch * height ) ;
2018-03-09 18:55:19 +01:00
2019-12-03 23:34:23 +01:00
auto src = vm : : _ptr < const char > ( address ) ;
auto dst = static_cast < char * > ( mem ) ;
2018-03-09 18:55:19 +01:00
//TODO: SSE optimization
for ( u32 row = 0 ; row < height ; + + row )
{
2019-12-03 23:34:23 +01:00
auto casted_src = reinterpret_cast < const be_t < u32 > * > ( src ) ;
auto casted_dst = reinterpret_cast < u32 * > ( dst ) ;
2018-03-09 18:55:19 +01:00
2018-03-23 16:05:56 +01:00
for ( u32 col = 0 ; col < width ; + + col )
2018-03-09 18:55:19 +01:00
casted_dst [ col ] = casted_src [ col ] ;
2020-02-29 16:59:59 +01:00
src + = pitch ;
2018-03-09 18:55:19 +01:00
dst + = layout . rowPitch ;
}
2018-05-23 16:02:35 +02:00
image - > memory - > unmap ( ) ;
2018-03-09 18:55:19 +01:00
2018-06-04 18:57:16 +02:00
vk : : change_image_layout ( cmd , image . get ( ) , VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ) ;
2018-03-09 18:55:19 +01:00
auto result = image . get ( ) ;
const u32 resource_memory = width * height * 4 ; //Rough approximate
2019-07-01 13:09:44 +02:00
m_temporary_storage . emplace_back ( image ) ;
m_temporary_storage . back ( ) . block_size = resource_memory ;
m_temporary_memory_size + = resource_memory ;
2018-03-09 18:55:19 +01:00
return result ;
}
2018-06-04 18:57:16 +02:00
bool blit ( rsx : : blit_src_info & src , rsx : : blit_dst_info & dst , bool interpolate , rsx : : vk_render_targets & m_rtts , vk : : command_buffer & cmd )
2017-07-23 15:34:17 +02:00
{
2018-12-29 14:28:12 +01:00
blitter helper ;
2019-03-16 10:14:11 +01:00
auto reply = upload_scaled_image ( src , dst , interpolate , cmd , m_rtts , helper ) ;
2018-02-10 17:21:16 +01:00
2018-06-04 18:57:16 +02:00
if ( reply . succeeded )
{
if ( reply . real_dst_size )
{
2019-03-16 10:14:11 +01:00
flush_if_cache_miss_likely ( cmd , reply . to_address_range ( ) ) ;
2018-06-04 18:57:16 +02:00
}
2018-02-03 14:42:02 +01:00
2018-06-04 18:57:16 +02:00
return true ;
}
2018-02-03 14:42:02 +01:00
2018-06-04 18:57:16 +02:00
return false ;
2017-08-13 23:27:19 +02:00
}
2017-09-22 22:43:01 +02:00
const u32 get_unreleased_textures_count ( ) const override
{
2019-12-03 23:34:23 +01:00
return baseclass : : get_unreleased_textures_count ( ) + : : size32 ( m_temporary_storage ) ;
2017-11-01 13:34:38 +01:00
}
const u32 get_temporary_memory_in_use ( )
{
2019-07-01 13:09:44 +02:00
return m_temporary_memory_size ;
2017-10-24 14:58:53 +02:00
}
2016-02-21 16:50:49 +01:00
} ;
}