2020-12-05 13:08:24 +01:00
|
|
|
#pragma once
|
2015-12-02 10:23:25 +01:00
|
|
|
|
2021-04-09 21:12:47 +02:00
|
|
|
#include <thread>
|
2020-03-26 08:05:20 +01:00
|
|
|
#include <queue>
|
2016-02-01 22:50:02 +01:00
|
|
|
#include <deque>
|
2018-08-29 19:20:52 +02:00
|
|
|
#include <variant>
|
2019-11-30 13:44:47 +01:00
|
|
|
#include <stack>
|
2020-12-22 16:04:08 +01:00
|
|
|
#include <unordered_map>
|
2019-11-30 13:44:47 +01:00
|
|
|
|
2012-11-15 00:39:56 +01:00
|
|
|
#include "GCM.h"
|
2016-06-16 19:19:45 +02:00
|
|
|
#include "rsx_cache.h"
|
2018-09-24 15:03:25 +02:00
|
|
|
#include "RSXFIFO.h"
|
2019-06-18 18:47:23 +02:00
|
|
|
#include "RSXOffload.h"
|
2022-05-09 20:10:40 +02:00
|
|
|
#include "RSXZCULL.h"
|
2017-11-16 22:52:21 +01:00
|
|
|
#include "rsx_utils.h"
|
2021-09-17 22:01:53 +02:00
|
|
|
#include "Common/bitfield.hpp"
|
|
|
|
|
#include "Common/profiling_timer.hpp"
|
2020-12-13 12:54:43 +01:00
|
|
|
#include "Common/texture_cache_types.h"
|
2021-05-12 23:56:01 +02:00
|
|
|
#include "Program/RSXVertexProgram.h"
|
|
|
|
|
#include "Program/RSXFragmentProgram.h"
|
2012-11-15 00:39:56 +01:00
|
|
|
|
2014-08-25 16:56:13 +02:00
|
|
|
#include "Utilities/Thread.h"
|
2016-04-27 00:27:24 +02:00
|
|
|
#include "Utilities/geometry.h"
|
2018-05-13 11:18:05 +02:00
|
|
|
#include "Capture/rsx_trace.h"
|
|
|
|
|
#include "Capture/rsx_replay.h"
|
2015-11-06 00:56:27 +01:00
|
|
|
|
2017-07-26 04:33:32 +02:00
|
|
|
#include "Emu/Cell/lv2/sys_rsx.h"
|
2019-09-26 17:32:31 +02:00
|
|
|
#include "Emu/IdManager.h"
|
2020-04-07 19:29:11 +02:00
|
|
|
#include "Emu/system_config.h"
|
2017-07-26 04:33:32 +02:00
|
|
|
|
2020-12-06 13:15:19 +01:00
|
|
|
extern atomic_t<bool> g_user_asked_for_frame_capture;
|
2020-05-11 12:27:00 +02:00
|
|
|
extern atomic_t<bool> g_disable_frame_limit;
|
2018-05-13 11:18:05 +02:00
|
|
|
extern rsx::frame_trace_data frame_debug;
|
|
|
|
|
extern rsx::frame_capture_data frame_capture;
|
2015-11-06 00:56:27 +01:00
|
|
|
|
2015-10-04 00:45:26 +02:00
|
|
|
namespace rsx
|
2013-11-09 22:29:49 +01:00
|
|
|
{
|
2020-02-25 22:42:29 +01:00
|
|
|
namespace overlays
|
|
|
|
|
{
|
|
|
|
|
class display_manager;
|
|
|
|
|
}
|
|
|
|
|
|
2020-01-16 21:40:47 +01:00
|
|
|
struct rsx_iomap_table
|
|
|
|
|
{
|
|
|
|
|
std::array<atomic_t<u32>, 4096> ea;
|
|
|
|
|
std::array<atomic_t<u32>, 4096> io;
|
2021-04-19 10:11:24 +02:00
|
|
|
std::array<shared_mutex, 4096> rs{};
|
2020-01-16 21:40:47 +01:00
|
|
|
|
|
|
|
|
rsx_iomap_table() noexcept
|
2021-04-19 10:11:24 +02:00
|
|
|
: ea(fill_array(-1))
|
|
|
|
|
, io(fill_array(-1))
|
2020-01-16 21:40:47 +01:00
|
|
|
{
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Try to get the real address given a mapped address
|
|
|
|
|
// Returns -1 on failure
|
|
|
|
|
u32 get_addr(u32 offs) const noexcept
|
|
|
|
|
{
|
|
|
|
|
return this->ea[offs >> 20] | (offs & 0xFFFFF);
|
|
|
|
|
}
|
2020-09-14 22:38:17 +02:00
|
|
|
|
|
|
|
|
template<bool IsFullLock>
|
2022-05-20 21:35:26 +02:00
|
|
|
bool lock(u32 addr, u32 len, cpu_thread* self = nullptr) noexcept
|
2020-09-14 22:38:17 +02:00
|
|
|
{
|
|
|
|
|
if (len <= 1) return false;
|
|
|
|
|
const u32 end = addr + len - 1;
|
|
|
|
|
|
|
|
|
|
for (u32 block = (addr >> 20); block <= (end >> 20); ++block)
|
|
|
|
|
{
|
2022-05-20 21:35:26 +02:00
|
|
|
auto& mutex_ = rs[block];
|
|
|
|
|
|
2020-09-14 22:38:17 +02:00
|
|
|
if constexpr (IsFullLock)
|
|
|
|
|
{
|
2022-05-20 21:35:26 +02:00
|
|
|
if (self) [[ likely ]]
|
|
|
|
|
{
|
|
|
|
|
while (!mutex_.try_lock())
|
|
|
|
|
{
|
|
|
|
|
self->cpu_wait({});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
mutex_.lock();
|
|
|
|
|
}
|
2020-09-14 22:38:17 +02:00
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
2022-05-20 21:35:26 +02:00
|
|
|
if (!self) [[ likely ]]
|
|
|
|
|
{
|
|
|
|
|
mutex_.lock_shared();
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
while (!mutex_.try_lock_shared())
|
|
|
|
|
{
|
|
|
|
|
self->cpu_wait({});
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-09-14 22:38:17 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
template<bool IsFullLock>
|
|
|
|
|
void unlock(u32 addr, u32 len) noexcept
|
|
|
|
|
{
|
2020-12-09 16:04:52 +01:00
|
|
|
ensure(len >= 1);
|
2020-09-14 22:38:17 +02:00
|
|
|
const u32 end = addr + len - 1;
|
|
|
|
|
|
|
|
|
|
for (u32 block = (addr >> 20); block <= (end >> 20); ++block)
|
|
|
|
|
{
|
|
|
|
|
if constexpr (IsFullLock)
|
|
|
|
|
{
|
|
|
|
|
rs[block].unlock();
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
rs[block].unlock_shared();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-01-16 21:40:47 +01:00
|
|
|
};
|
|
|
|
|
|
2017-11-15 16:50:41 +01:00
|
|
|
enum framebuffer_creation_context : u8
|
|
|
|
|
{
|
|
|
|
|
context_draw = 0,
|
|
|
|
|
context_clear_color = 1,
|
|
|
|
|
context_clear_depth = 2,
|
|
|
|
|
context_clear_all = context_clear_color | context_clear_depth
|
|
|
|
|
};
|
|
|
|
|
|
2018-10-20 16:43:00 +02:00
|
|
|
enum pipeline_state : u32
|
2018-04-20 22:44:34 +02:00
|
|
|
{
|
2020-12-03 19:11:32 +01:00
|
|
|
fragment_program_ucode_dirty = 0x1, // Fragment program ucode changed
|
|
|
|
|
vertex_program_ucode_dirty = 0x2, // Vertex program ucode changed
|
|
|
|
|
fragment_program_state_dirty = 0x4, // Fragment program state changed
|
|
|
|
|
vertex_program_state_dirty = 0x8, // Vertex program state changed
|
|
|
|
|
fragment_state_dirty = 0x10, // Fragment state changed (alpha test, etc)
|
|
|
|
|
vertex_state_dirty = 0x20, // Vertex state changed (scale_offset, clip planes, etc)
|
|
|
|
|
transform_constants_dirty = 0x40, // Transform constants changed
|
|
|
|
|
fragment_constants_dirty = 0x80, // Fragment constants changed
|
|
|
|
|
framebuffer_reads_dirty = 0x100, // Framebuffer contents changed
|
|
|
|
|
fragment_texture_state_dirty = 0x200, // Fragment texture parameters changed
|
|
|
|
|
vertex_texture_state_dirty = 0x400, // Fragment texture parameters changed
|
|
|
|
|
scissor_config_state_dirty = 0x800, // Scissor region changed
|
|
|
|
|
zclip_config_state_dirty = 0x1000, // Viewport Z clip changed
|
|
|
|
|
|
|
|
|
|
scissor_setup_invalid = 0x2000, // Scissor configuration is broken
|
|
|
|
|
scissor_setup_clipped = 0x4000, // Scissor region is cropped by viewport constraint
|
|
|
|
|
|
|
|
|
|
polygon_stipple_pattern_dirty = 0x8000, // Rasterizer stippling pattern changed
|
|
|
|
|
line_stipple_pattern_dirty = 0x10000, // Line stippling pattern changed
|
|
|
|
|
|
2022-03-06 13:09:28 +01:00
|
|
|
push_buffer_arrays_dirty = 0x20000, // Push buffers have data written to them (immediate mode vertex buffers)
|
|
|
|
|
|
2020-12-03 19:11:32 +01:00
|
|
|
fragment_program_dirty = fragment_program_ucode_dirty | fragment_program_state_dirty,
|
|
|
|
|
vertex_program_dirty = vertex_program_ucode_dirty | vertex_program_state_dirty,
|
2018-04-20 22:44:34 +02:00
|
|
|
invalidate_pipeline_bits = fragment_program_dirty | vertex_program_dirty,
|
2020-08-18 18:51:11 +02:00
|
|
|
invalidate_zclip_bits = vertex_state_dirty | zclip_config_state_dirty,
|
2018-06-23 16:50:34 +02:00
|
|
|
memory_barrier_bits = framebuffer_reads_dirty,
|
2022-05-14 17:41:33 +02:00
|
|
|
|
2018-12-02 14:22:05 +01:00
|
|
|
all_dirty = ~0u
|
2018-04-20 22:44:34 +02:00
|
|
|
};
|
|
|
|
|
|
2022-05-16 22:38:11 +02:00
|
|
|
enum eng_interrupt_reason : u32
|
|
|
|
|
{
|
|
|
|
|
backend_interrupt = 0x0001, // Backend-related interrupt
|
|
|
|
|
memory_config_interrupt = 0x0002, // Memory configuration changed
|
|
|
|
|
display_interrupt = 0x0004, // Display handling
|
2022-05-20 23:53:48 +02:00
|
|
|
pipe_flush_interrupt = 0x0008, // Flush pipelines
|
2022-05-16 22:38:11 +02:00
|
|
|
|
2022-05-20 23:53:48 +02:00
|
|
|
all_interrupt_bits = memory_config_interrupt | backend_interrupt | display_interrupt | pipe_flush_interrupt
|
2022-05-16 22:38:11 +02:00
|
|
|
};
|
|
|
|
|
|
2018-05-06 12:37:28 +02:00
|
|
|
enum FIFO_state : u8
|
|
|
|
|
{
|
|
|
|
|
running = 0,
|
2018-05-23 11:55:14 +02:00
|
|
|
empty = 1, // PUT == GET
|
|
|
|
|
spinning = 2, // Puller continuously jumps to self addr (synchronization technique)
|
|
|
|
|
nop = 3, // Puller is processing a NOP command
|
2018-05-29 13:53:16 +02:00
|
|
|
lock_wait = 4 // Puller is processing a lock acquire
|
2018-05-06 12:37:28 +02:00
|
|
|
};
|
|
|
|
|
|
2018-07-19 18:57:01 +02:00
|
|
|
enum FIFO_hint : u8
|
|
|
|
|
{
|
2019-10-23 16:27:23 +02:00
|
|
|
hint_conditional_render_eval = 1,
|
|
|
|
|
hint_zcull_sync = 2
|
2018-07-19 18:57:01 +02:00
|
|
|
};
|
|
|
|
|
|
2019-10-24 22:02:42 +02:00
|
|
|
enum result_flags: u8
|
|
|
|
|
{
|
|
|
|
|
result_none = 0,
|
|
|
|
|
result_error = 1,
|
|
|
|
|
result_zcull_intr = 2
|
|
|
|
|
};
|
|
|
|
|
|
2020-06-03 20:19:38 +02:00
|
|
|
enum ROP_control : u32
|
|
|
|
|
{
|
|
|
|
|
alpha_test_enable = (1u << 0),
|
|
|
|
|
framebuffer_srgb_enable = (1u << 1),
|
|
|
|
|
csaa_enable = (1u << 4),
|
|
|
|
|
msaa_mask_enable = (1u << 5),
|
|
|
|
|
msaa_config_mask = (3u << 6),
|
|
|
|
|
polygon_stipple_enable = (1u << 9),
|
|
|
|
|
alpha_func_mask = (7u << 16)
|
|
|
|
|
};
|
|
|
|
|
|
2016-01-20 14:23:25 +01:00
|
|
|
u32 get_vertex_type_size_on_host(vertex_base_type type, u32 size);
|
2015-12-01 14:55:15 +01:00
|
|
|
|
2021-08-13 18:24:50 +02:00
|
|
|
u32 get_address(u32 offset, u32 location, u32 size_to_check = 0,
|
2020-12-09 16:04:52 +01:00
|
|
|
u32 line = __builtin_LINE(),
|
|
|
|
|
u32 col = __builtin_COLUMN(),
|
|
|
|
|
const char* file = __builtin_FILE(),
|
|
|
|
|
const char* func = __builtin_FUNCTION());
|
2014-02-02 21:47:17 +01:00
|
|
|
|
2016-01-05 21:55:43 +01:00
|
|
|
struct tiled_region
|
2016-01-05 17:26:44 +01:00
|
|
|
{
|
|
|
|
|
u32 address;
|
|
|
|
|
u32 base;
|
|
|
|
|
GcmTileInfo *tile;
|
|
|
|
|
u8 *ptr;
|
|
|
|
|
|
|
|
|
|
void write(const void *src, u32 width, u32 height, u32 pitch);
|
|
|
|
|
void read(void *dst, u32 width, u32 height, u32 pitch);
|
|
|
|
|
};
|
|
|
|
|
|
2016-08-21 19:17:09 +02:00
|
|
|
struct vertex_array_buffer
|
|
|
|
|
{
|
|
|
|
|
rsx::vertex_base_type type;
|
|
|
|
|
u8 attribute_size;
|
|
|
|
|
u8 stride;
|
2021-05-30 16:10:46 +02:00
|
|
|
std::span<const std::byte> data;
|
2016-08-21 19:17:09 +02:00
|
|
|
u8 index;
|
2018-08-28 17:19:26 +02:00
|
|
|
bool is_be;
|
2016-08-21 19:17:09 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
struct vertex_array_register
|
|
|
|
|
{
|
|
|
|
|
rsx::vertex_base_type type;
|
|
|
|
|
u8 attribute_size;
|
|
|
|
|
std::array<u32, 4> data;
|
|
|
|
|
u8 index;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
struct empty_vertex_array
|
|
|
|
|
{
|
|
|
|
|
u8 index;
|
|
|
|
|
};
|
|
|
|
|
|
2016-08-28 17:00:02 +02:00
|
|
|
struct draw_array_command
|
|
|
|
|
{
|
2018-09-24 15:03:25 +02:00
|
|
|
u32 __dummy;
|
2016-08-28 17:00:02 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
struct draw_indexed_array_command
|
|
|
|
|
{
|
2021-05-30 16:10:46 +02:00
|
|
|
std::span<const std::byte> raw_index_buffer;
|
2016-08-28 17:00:02 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
struct draw_inlined_array
|
|
|
|
|
{
|
2018-09-24 15:03:25 +02:00
|
|
|
u32 __dummy;
|
|
|
|
|
u32 __dummy2;
|
2016-08-28 17:00:02 +02:00
|
|
|
};
|
|
|
|
|
|
2019-01-21 19:07:27 +01:00
|
|
|
struct interleaved_attribute_t
|
|
|
|
|
{
|
|
|
|
|
u8 index;
|
|
|
|
|
bool modulo;
|
|
|
|
|
u16 frequency;
|
|
|
|
|
};
|
|
|
|
|
|
2017-07-31 13:38:28 +02:00
|
|
|
struct interleaved_range_info
|
|
|
|
|
{
|
|
|
|
|
bool interleaved = false;
|
2017-08-13 21:09:26 +02:00
|
|
|
bool single_vertex = false;
|
2017-07-31 13:38:28 +02:00
|
|
|
u32 base_offset = 0;
|
|
|
|
|
u32 real_offset_address = 0;
|
|
|
|
|
u8 memory_location = 0;
|
|
|
|
|
u8 attribute_stride = 0;
|
|
|
|
|
|
2019-01-21 19:07:27 +01:00
|
|
|
rsx::simple_array<interleaved_attribute_t> locations;
|
|
|
|
|
|
|
|
|
|
// Check if we need to upload a full unoptimized range, i.e [0-max_index]
|
2020-12-18 15:43:34 +01:00
|
|
|
std::pair<u32, u32> calculate_required_range(u32 first, u32 count) const;
|
2017-07-31 13:38:28 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
enum attribute_buffer_placement : u8
|
|
|
|
|
{
|
|
|
|
|
none = 0,
|
|
|
|
|
persistent = 1,
|
|
|
|
|
transient = 2
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
struct vertex_input_layout
|
|
|
|
|
{
|
2021-04-19 13:22:03 +02:00
|
|
|
std::vector<interleaved_range_info> interleaved_blocks{}; // Interleaved blocks to be uploaded as-is
|
|
|
|
|
std::vector<std::pair<u8, u32>> volatile_blocks{}; // Volatile data blocks (immediate draw vertex data for example)
|
|
|
|
|
rsx::simple_array<u8> referenced_registers{}; // Volatile register data
|
2017-07-31 13:38:28 +02:00
|
|
|
|
2021-04-19 13:22:03 +02:00
|
|
|
std::array<attribute_buffer_placement, 16> attribute_placement = fill_array(attribute_buffer_placement::none);
|
2018-09-25 19:29:24 +02:00
|
|
|
|
2021-04-19 13:22:03 +02:00
|
|
|
vertex_input_layout() = default;
|
2018-09-25 19:29:24 +02:00
|
|
|
|
2019-01-21 19:07:27 +01:00
|
|
|
void clear()
|
|
|
|
|
{
|
2019-06-01 14:41:02 +02:00
|
|
|
interleaved_blocks.clear();
|
|
|
|
|
volatile_blocks.clear();
|
|
|
|
|
referenced_registers.clear();
|
2019-01-21 19:07:27 +01:00
|
|
|
}
|
|
|
|
|
|
2018-09-25 19:29:24 +02:00
|
|
|
bool validate() const
|
|
|
|
|
{
|
2018-09-26 16:25:10 +02:00
|
|
|
// Criteria: At least one array stream has to be defined to feed vertex positions
|
|
|
|
|
// This stream cannot be a const register as the vertices cannot create a zero-area primitive
|
2018-09-25 19:29:24 +02:00
|
|
|
|
2018-09-26 16:25:10 +02:00
|
|
|
if (!interleaved_blocks.empty() && interleaved_blocks.front().attribute_stride != 0)
|
2018-09-25 19:29:24 +02:00
|
|
|
return true;
|
2018-09-26 16:25:10 +02:00
|
|
|
|
|
|
|
|
if (!volatile_blocks.empty())
|
2018-09-25 19:29:24 +02:00
|
|
|
return true;
|
2018-09-26 16:25:10 +02:00
|
|
|
|
|
|
|
|
for (u8 index = 0; index < limits::vertex_count; ++index)
|
2018-09-25 19:29:24 +02:00
|
|
|
{
|
2018-09-26 16:25:10 +02:00
|
|
|
switch (attribute_placement[index])
|
|
|
|
|
{
|
|
|
|
|
case attribute_buffer_placement::transient:
|
|
|
|
|
{
|
|
|
|
|
// Ignore register reference
|
|
|
|
|
if (std::find(referenced_registers.begin(), referenced_registers.end(), index) != referenced_registers.end())
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
// The source is inline array or immediate draw push buffer
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
case attribute_buffer_placement::persistent:
|
|
|
|
|
{
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
case attribute_buffer_placement::none:
|
|
|
|
|
{
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
default:
|
|
|
|
|
{
|
2020-12-09 16:04:52 +01:00
|
|
|
fmt::throw_exception("Unreachable");
|
2018-09-26 16:25:10 +02:00
|
|
|
}
|
|
|
|
|
}
|
2018-09-25 19:29:24 +02:00
|
|
|
}
|
2018-09-26 16:25:10 +02:00
|
|
|
|
|
|
|
|
return false;
|
2018-09-25 19:29:24 +02:00
|
|
|
}
|
2019-01-21 19:07:27 +01:00
|
|
|
|
|
|
|
|
u32 calculate_interleaved_memory_requirements(u32 first_vertex, u32 vertex_count) const
|
|
|
|
|
{
|
|
|
|
|
u32 mem = 0;
|
|
|
|
|
for (auto &block : interleaved_blocks)
|
|
|
|
|
{
|
|
|
|
|
const auto range = block.calculate_required_range(first_vertex, vertex_count);
|
|
|
|
|
mem += range.second * block.attribute_stride;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return mem;
|
|
|
|
|
}
|
2017-07-31 13:38:28 +02:00
|
|
|
};
|
|
|
|
|
|
2018-07-23 23:55:15 +02:00
|
|
|
struct framebuffer_layout
|
|
|
|
|
{
|
|
|
|
|
u16 width;
|
|
|
|
|
u16 height;
|
|
|
|
|
std::array<u32, 4> color_addresses;
|
|
|
|
|
std::array<u32, 4> color_pitch;
|
|
|
|
|
std::array<u32, 4> actual_color_pitch;
|
2019-08-27 13:55:45 +02:00
|
|
|
std::array<bool, 4> color_write_enabled;
|
2018-07-23 23:55:15 +02:00
|
|
|
u32 zeta_address;
|
|
|
|
|
u32 zeta_pitch;
|
|
|
|
|
u32 actual_zeta_pitch;
|
2019-08-27 13:55:45 +02:00
|
|
|
bool zeta_write_enabled;
|
2018-07-23 23:55:15 +02:00
|
|
|
rsx::surface_target target;
|
|
|
|
|
rsx::surface_color_format color_format;
|
2020-08-15 13:07:18 +02:00
|
|
|
rsx::surface_depth_format2 depth_format;
|
2018-07-23 23:55:15 +02:00
|
|
|
rsx::surface_antialiasing aa_mode;
|
2020-08-01 13:27:13 +02:00
|
|
|
rsx::surface_raster_type raster_type;
|
2018-07-23 23:55:15 +02:00
|
|
|
u32 aa_factors[2];
|
|
|
|
|
bool ignore_change;
|
|
|
|
|
};
|
|
|
|
|
|
2019-09-19 19:08:06 +02:00
|
|
|
struct frame_statistics_t
|
|
|
|
|
{
|
|
|
|
|
u32 draw_calls;
|
2022-05-14 16:43:43 +02:00
|
|
|
u32 submit_count;
|
|
|
|
|
|
2019-09-19 19:08:06 +02:00
|
|
|
s64 setup_time;
|
|
|
|
|
s64 vertex_upload_time;
|
|
|
|
|
s64 textures_upload_time;
|
|
|
|
|
s64 draw_exec_time;
|
|
|
|
|
s64 flip_time;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
struct display_flip_info_t
|
|
|
|
|
{
|
2019-09-28 14:30:30 +02:00
|
|
|
std::deque<u32> buffer_queue;
|
2019-09-19 19:08:06 +02:00
|
|
|
u32 buffer;
|
|
|
|
|
bool skip_frame;
|
|
|
|
|
bool emu_flip;
|
2019-09-20 12:01:47 +02:00
|
|
|
bool in_progress;
|
2019-09-19 19:08:06 +02:00
|
|
|
frame_statistics_t stats;
|
2019-09-28 14:30:30 +02:00
|
|
|
|
|
|
|
|
inline void push(u32 _buffer)
|
|
|
|
|
{
|
|
|
|
|
buffer_queue.push_back(_buffer);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
inline bool pop(u32 _buffer)
|
|
|
|
|
{
|
|
|
|
|
if (buffer_queue.empty())
|
|
|
|
|
{
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
do
|
|
|
|
|
{
|
|
|
|
|
const auto index = buffer_queue.front();
|
|
|
|
|
buffer_queue.pop_front();
|
|
|
|
|
|
|
|
|
|
if (index == _buffer)
|
|
|
|
|
{
|
|
|
|
|
buffer = _buffer;
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
while (!buffer_queue.empty());
|
|
|
|
|
|
|
|
|
|
// Need to observe this happening in the wild
|
2020-02-01 09:07:25 +01:00
|
|
|
rsx_log.error("Display queue was discarded while not empty!");
|
2019-09-28 14:30:30 +02:00
|
|
|
return false;
|
|
|
|
|
}
|
2019-09-19 19:08:06 +02:00
|
|
|
};
|
|
|
|
|
|
2019-10-13 21:37:10 +02:00
|
|
|
struct backend_configuration
|
|
|
|
|
{
|
2019-11-30 13:44:47 +01:00
|
|
|
bool supports_multidraw; // Draw call batching
|
|
|
|
|
bool supports_hw_a2c; // Alpha to coverage
|
|
|
|
|
bool supports_hw_renormalization; // Should be true on NV hardware which matches PS3 texture renormalization behaviour
|
2022-03-13 09:32:04 +01:00
|
|
|
bool supports_hw_msaa; // MSAA support
|
2019-11-30 13:44:47 +01:00
|
|
|
bool supports_hw_a2one; // Alpha to one
|
|
|
|
|
bool supports_hw_conditional_render; // Conditional render
|
2021-05-25 19:04:45 +02:00
|
|
|
bool supports_passthrough_dma; // DMA passthrough
|
|
|
|
|
bool supports_asynchronous_compute; // Async compute
|
2022-02-19 18:22:52 +01:00
|
|
|
bool supports_host_gpu_labels; // Advanced host synchronization
|
2019-10-13 21:37:10 +02:00
|
|
|
};
|
|
|
|
|
|
2017-10-30 13:27:22 +01:00
|
|
|
struct sampled_image_descriptor_base;
|
|
|
|
|
|
2021-01-22 09:11:54 +01:00
|
|
|
class thread : public cpu_thread
|
2015-10-04 00:45:26 +02:00
|
|
|
{
|
2018-07-24 23:33:55 +02:00
|
|
|
u64 timestamp_ctrl = 0;
|
|
|
|
|
u64 timestamp_subvalue = 0;
|
2022-05-14 17:41:33 +02:00
|
|
|
u64 m_cycles_counter = 0;
|
2018-07-24 23:33:55 +02:00
|
|
|
|
2019-09-27 20:02:49 +02:00
|
|
|
display_flip_info_t m_queued_flip{};
|
2019-09-19 19:08:06 +02:00
|
|
|
|
2021-01-22 09:11:54 +01:00
|
|
|
void cpu_task() override;
|
2015-10-04 00:45:26 +02:00
|
|
|
protected:
|
2019-08-25 17:47:49 +02:00
|
|
|
atomic_t<bool> m_rsx_thread_exiting{ true };
|
|
|
|
|
|
2017-03-25 22:59:57 +01:00
|
|
|
std::array<push_buffer_vertex_info, 16> vertex_push_buffers;
|
2017-03-28 12:41:45 +02:00
|
|
|
std::vector<u32> element_push_buffer;
|
2015-10-04 00:45:26 +02:00
|
|
|
|
2017-09-07 21:32:52 +02:00
|
|
|
s32 m_skip_frame_ctr = 0;
|
2019-09-19 19:08:06 +02:00
|
|
|
bool skip_current_frame = false;
|
2017-09-07 21:32:52 +02:00
|
|
|
|
2019-10-13 21:37:10 +02:00
|
|
|
backend_configuration backend_config{};
|
2017-09-07 21:32:52 +02:00
|
|
|
|
2018-09-24 15:03:25 +02:00
|
|
|
// FIFO
|
2020-03-24 10:47:13 +01:00
|
|
|
public:
|
2018-09-24 15:03:25 +02:00
|
|
|
std::unique_ptr<FIFO::FIFO_control> fifo_ctrl;
|
2021-01-22 09:11:54 +01:00
|
|
|
std::vector<std::pair<u32, u32>> dump_callstack_list() const override;
|
2021-01-15 19:28:45 +01:00
|
|
|
|
2020-03-24 10:47:13 +01:00
|
|
|
protected:
|
2018-11-07 10:34:03 +01:00
|
|
|
FIFO::flattening_helper m_flattener;
|
2019-09-28 09:29:16 +02:00
|
|
|
u32 fifo_ret_addr = RSX_CALL_STACK_EMPTY;
|
|
|
|
|
u32 saved_fifo_ret = RSX_CALL_STACK_EMPTY;
|
2018-09-24 15:03:25 +02:00
|
|
|
|
2018-05-23 11:55:14 +02:00
|
|
|
// Occlusion query
|
2017-11-16 22:52:21 +01:00
|
|
|
bool zcull_surface_active = false;
|
2018-03-05 12:09:43 +01:00
|
|
|
std::unique_ptr<reports::ZCULL_control> zcull_ctrl;
|
2017-11-16 22:52:21 +01:00
|
|
|
|
2018-05-23 11:55:14 +02:00
|
|
|
// Framebuffer setup
|
2017-11-16 22:52:21 +01:00
|
|
|
rsx::gcm_framebuffer_info m_surface_info[rsx::limits::color_buffers_count];
|
|
|
|
|
rsx::gcm_framebuffer_info m_depth_surface_info;
|
2020-06-14 13:18:34 +02:00
|
|
|
framebuffer_layout m_framebuffer_layout{};
|
2017-11-16 22:52:21 +01:00
|
|
|
bool framebuffer_status_valid = false;
|
|
|
|
|
|
2018-05-23 11:55:14 +02:00
|
|
|
// Overlays
|
2019-08-26 02:08:16 +02:00
|
|
|
rsx::overlays::display_manager* m_overlay_manager = nullptr;
|
2018-05-23 11:55:14 +02:00
|
|
|
|
|
|
|
|
// Invalidated memory range
|
2018-09-22 02:14:26 +02:00
|
|
|
address_range m_invalidated_memory_range;
|
2018-01-17 17:14:00 +01:00
|
|
|
|
2019-06-18 20:31:35 +02:00
|
|
|
// Profiler
|
|
|
|
|
rsx::profiling_timer m_profiler;
|
2019-09-19 19:08:06 +02:00
|
|
|
frame_statistics_t m_frame_stats;
|
2019-06-18 20:31:35 +02:00
|
|
|
|
2015-10-04 00:45:26 +02:00
|
|
|
public:
|
2017-07-26 04:33:32 +02:00
|
|
|
RsxDmaControl* ctrl = nullptr;
|
2020-04-11 13:56:15 +02:00
|
|
|
u32 dma_address{0};
|
2020-01-16 21:40:47 +01:00
|
|
|
rsx_iomap_table iomap_table;
|
2018-11-12 14:15:28 +01:00
|
|
|
u32 restore_point = 0;
|
2021-02-19 12:53:09 +01:00
|
|
|
u32 dbg_step_pc = 0;
|
2022-03-25 16:17:25 +01:00
|
|
|
u32 last_known_code_start = 0;
|
2020-02-11 22:36:46 +01:00
|
|
|
atomic_t<u32> external_interrupt_lock{ 0 };
|
2017-12-09 11:14:00 +01:00
|
|
|
atomic_t<bool> external_interrupt_ack{ false };
|
2021-03-21 17:55:47 +01:00
|
|
|
atomic_t<bool> is_inited{ false };
|
2020-04-10 21:25:59 +02:00
|
|
|
bool is_fifo_idle() const;
|
2019-09-29 19:54:33 +02:00
|
|
|
void flush_fifo();
|
2021-08-13 20:46:38 +02:00
|
|
|
|
2022-03-25 16:17:25 +01:00
|
|
|
// Returns [count of found commands, PC of their start]
|
|
|
|
|
std::pair<u32, u32> try_get_pc_of_x_cmds_backwards(u32 count, u32 get) const;
|
|
|
|
|
|
2021-08-13 20:46:38 +02:00
|
|
|
void recover_fifo(u32 line = __builtin_LINE(),
|
|
|
|
|
u32 col = __builtin_COLUMN(),
|
|
|
|
|
const char* file = __builtin_FILE(),
|
|
|
|
|
const char* func = __builtin_FUNCTION());
|
|
|
|
|
|
2020-02-11 06:00:30 +01:00
|
|
|
static void fifo_wake_delay(u64 div = 1);
|
|
|
|
|
u32 get_fifo_cmd() const;
|
2021-02-03 19:14:31 +01:00
|
|
|
|
2021-01-22 09:11:54 +01:00
|
|
|
std::string dump_regs() const override;
|
2021-02-13 15:50:07 +01:00
|
|
|
void cpu_wait(bs_t<cpu_flag> old) override;
|
2015-10-04 00:45:26 +02:00
|
|
|
|
2021-05-20 06:00:22 +02:00
|
|
|
static constexpr u32 id_base = 0x5555'5555; // See get_current_cpu_thread()
|
|
|
|
|
|
2018-05-23 11:55:14 +02:00
|
|
|
// Performance approximation counters
|
2018-02-28 12:46:39 +01:00
|
|
|
struct
|
|
|
|
|
{
|
2018-05-23 11:55:14 +02:00
|
|
|
atomic_t<u64> idle_time{ 0 }; // Time spent idling in microseconds
|
|
|
|
|
u64 last_update_timestamp = 0; // Timestamp of last load update
|
|
|
|
|
u64 FIFO_idle_timestamp = 0; // Timestamp of when FIFO queue becomes idle
|
2018-05-06 12:37:28 +02:00
|
|
|
FIFO_state state = FIFO_state::running;
|
2018-02-28 12:46:39 +01:00
|
|
|
u32 approximate_load = 0;
|
|
|
|
|
u32 sampled_frames = 0;
|
|
|
|
|
}
|
|
|
|
|
performance_counters;
|
|
|
|
|
|
2018-07-20 16:22:21 +02:00
|
|
|
enum class flip_request : u32
|
|
|
|
|
{
|
2018-09-22 09:51:48 +02:00
|
|
|
emu_requested = 1,
|
|
|
|
|
native_ui = 2,
|
2018-07-20 16:22:21 +02:00
|
|
|
|
2018-09-22 09:51:48 +02:00
|
|
|
any = emu_requested | native_ui
|
2018-07-20 16:22:21 +02:00
|
|
|
};
|
|
|
|
|
|
2018-09-22 09:51:48 +02:00
|
|
|
atomic_bitmask_t<flip_request> async_flip_requested{};
|
2018-07-20 16:22:21 +02:00
|
|
|
u8 async_flip_buffer{ 0 };
|
2018-01-17 17:14:00 +01:00
|
|
|
|
2015-10-04 00:45:26 +02:00
|
|
|
GcmTileInfo tiles[limits::tiles_count];
|
|
|
|
|
GcmZcullInfo zculls[limits::zculls_count];
|
|
|
|
|
|
2015-11-09 23:57:35 +01:00
|
|
|
void capture_frame(const std::string &name);
|
2021-05-25 19:04:45 +02:00
|
|
|
const backend_configuration& get_backend_config() const { return backend_config; }
|
2016-01-20 13:46:58 +01:00
|
|
|
|
2015-10-04 00:45:26 +02:00
|
|
|
public:
|
2018-10-11 00:17:19 +02:00
|
|
|
std::shared_ptr<named_thread<class ppu_thread>> intr_thread;
|
2016-07-27 23:43:22 +02:00
|
|
|
|
2017-07-26 04:57:43 +02:00
|
|
|
// I hate this flag, but until hle is closer to lle, its needed
|
|
|
|
|
bool isHLE{ false };
|
|
|
|
|
|
2017-05-15 13:30:14 +02:00
|
|
|
u32 flip_status;
|
2015-10-04 00:45:26 +02:00
|
|
|
int debug_level;
|
|
|
|
|
|
2017-07-26 04:33:32 +02:00
|
|
|
atomic_t<bool> requested_vsync{false};
|
2017-07-26 04:57:43 +02:00
|
|
|
atomic_t<bool> enable_second_vhandler{false};
|
2017-07-26 04:33:32 +02:00
|
|
|
|
|
|
|
|
RsxDisplayInfo display_buffers[8];
|
|
|
|
|
u32 display_buffers_count{0};
|
|
|
|
|
u32 current_display_buffer{0};
|
2015-10-04 00:45:26 +02:00
|
|
|
|
2021-05-09 20:16:14 +02:00
|
|
|
shared_mutex sys_rsx_mtx;
|
|
|
|
|
u32 device_addr{0};
|
|
|
|
|
u32 label_addr{0};
|
2019-06-29 00:27:49 +02:00
|
|
|
u32 main_mem_size{0};
|
2020-01-14 14:50:53 +01:00
|
|
|
u32 local_mem_size{0};
|
2021-05-09 20:16:14 +02:00
|
|
|
u32 rsx_event_port{0};
|
|
|
|
|
u32 driver_info{0};
|
|
|
|
|
|
|
|
|
|
void send_event(u64, u64, u64) const;
|
2015-10-04 00:45:26 +02:00
|
|
|
|
2020-06-14 13:18:34 +02:00
|
|
|
bool m_rtts_dirty = true;
|
2020-03-12 20:22:49 +01:00
|
|
|
std::array<bool, 16> m_textures_dirty;
|
|
|
|
|
std::array<bool, 4> m_vertex_textures_dirty;
|
2017-11-15 15:15:19 +01:00
|
|
|
bool m_framebuffer_state_contested = false;
|
2018-12-12 09:58:44 +01:00
|
|
|
rsx::framebuffer_creation_context m_current_framebuffer_context = rsx::framebuffer_creation_context::context_draw;
|
2018-07-23 23:55:15 +02:00
|
|
|
|
2022-05-16 22:38:11 +02:00
|
|
|
rsx::atomic_bitmask_t<rsx::eng_interrupt_reason> m_eng_interrupt_mask;
|
2021-04-29 20:28:17 +02:00
|
|
|
u32 m_graphics_state = 0;
|
|
|
|
|
u64 ROP_sync_timestamp = 0;
|
2017-06-30 00:20:23 +02:00
|
|
|
|
2018-07-09 20:31:31 +02:00
|
|
|
program_hash_util::fragment_program_utils::fragment_program_metadata current_fp_metadata = {};
|
|
|
|
|
program_hash_util::vertex_program_utils::vertex_program_metadata current_vp_metadata = {};
|
|
|
|
|
|
2016-01-19 18:23:09 +01:00
|
|
|
protected:
|
|
|
|
|
std::array<u32, 4> get_color_surface_addresses() const;
|
|
|
|
|
u32 get_zeta_surface_address() const;
|
2017-07-31 13:38:28 +02:00
|
|
|
|
2019-08-27 13:55:45 +02:00
|
|
|
void get_framebuffer_layout(rsx::framebuffer_creation_context context, framebuffer_layout &layout);
|
2019-07-20 13:58:05 +02:00
|
|
|
bool get_scissor(areau& region, bool clip_viewport);
|
2018-07-23 23:55:15 +02:00
|
|
|
|
2017-07-31 13:38:28 +02:00
|
|
|
/**
|
|
|
|
|
* Analyze vertex inputs and group all interleaved blocks
|
|
|
|
|
*/
|
2021-11-13 21:47:07 +01:00
|
|
|
void analyse_inputs_interleaved(vertex_input_layout&);
|
2017-07-31 13:38:28 +02:00
|
|
|
|
|
|
|
|
RSXVertexProgram current_vertex_program = {};
|
|
|
|
|
RSXFragmentProgram current_fragment_program = {};
|
|
|
|
|
|
2021-05-12 23:56:01 +02:00
|
|
|
vertex_program_texture_state current_vp_texture_state = {};
|
|
|
|
|
fragment_program_texture_state current_fp_texture_state = {};
|
|
|
|
|
|
2020-12-13 11:39:58 +01:00
|
|
|
// Runs shader prefetch and resolves pipeline status flags
|
|
|
|
|
void analyse_current_rsx_pipeline();
|
|
|
|
|
|
2020-12-03 19:11:32 +01:00
|
|
|
// Prefetch and analyze the currently active fragment program ucode
|
|
|
|
|
void prefetch_fragment_program();
|
|
|
|
|
|
|
|
|
|
// Prefetch and analyze the currently active vertex program ucode
|
|
|
|
|
void prefetch_vertex_program();
|
|
|
|
|
|
|
|
|
|
void get_current_vertex_program(const std::array<std::unique_ptr<rsx::sampled_image_descriptor_base>, rsx::limits::vertex_textures_count>& sampler_descriptors);
|
2017-02-10 10:08:46 +01:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Gets current fragment program and associated fragment state
|
|
|
|
|
*/
|
2017-10-30 13:27:22 +01:00
|
|
|
void get_current_fragment_program(const std::array<std::unique_ptr<rsx::sampled_image_descriptor_base>, rsx::limits::fragment_textures_count>& sampler_descriptors);
|
2020-06-14 13:18:34 +02:00
|
|
|
|
2020-03-28 11:17:40 +01:00
|
|
|
public:
|
|
|
|
|
bool invalidate_fragment_program(u32 dst_dma, u32 dst_offset, u32 size);
|
2020-06-14 13:18:34 +02:00
|
|
|
void on_framebuffer_options_changed(u32 opt);
|
2017-10-30 13:27:22 +01:00
|
|
|
|
2015-10-04 00:45:26 +02:00
|
|
|
public:
|
2020-03-07 23:11:35 +01:00
|
|
|
u64 target_rsx_flip_time = 0;
|
2017-11-10 15:31:02 +01:00
|
|
|
u64 int_flip_index = 0;
|
2022-04-13 21:29:26 +02:00
|
|
|
u64 last_guest_flip_timestamp = 0;
|
|
|
|
|
u64 last_host_flip_timestamp = 0;
|
|
|
|
|
|
2018-02-09 15:49:37 +01:00
|
|
|
vm::ptr<void(u32)> flip_handler = vm::null;
|
|
|
|
|
vm::ptr<void(u32)> user_handler = vm::null;
|
|
|
|
|
vm::ptr<void(u32)> vblank_handler = vm::null;
|
2019-10-17 17:49:19 +02:00
|
|
|
atomic_t<u64> vblank_count{0};
|
2020-03-09 20:06:38 +01:00
|
|
|
bool capture_current_frame = false;
|
2015-10-04 00:45:26 +02:00
|
|
|
|
2022-05-21 12:05:35 +02:00
|
|
|
bool wait_for_flip_sema = false;
|
|
|
|
|
u32 flip_sema_wait_val = 0;
|
|
|
|
|
|
2015-10-04 00:45:26 +02:00
|
|
|
public:
|
2020-04-07 12:18:41 +02:00
|
|
|
atomic_t<bool> sync_point_request = false;
|
2017-03-25 22:59:57 +01:00
|
|
|
bool in_begin_end = false;
|
2015-10-04 00:45:26 +02:00
|
|
|
|
2020-03-26 08:05:20 +01:00
|
|
|
struct desync_fifo_cmd_info
|
|
|
|
|
{
|
|
|
|
|
u32 cmd;
|
|
|
|
|
u64 timestamp;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
std::queue<desync_fifo_cmd_info> recovered_fifo_cmds_history;
|
|
|
|
|
|
2018-03-13 14:34:31 +01:00
|
|
|
atomic_t<s32> async_tasks_pending{ 0 };
|
|
|
|
|
|
2017-07-27 18:04:55 +02:00
|
|
|
bool zcull_stats_enabled = false;
|
|
|
|
|
bool zcull_rendering_enabled = false;
|
|
|
|
|
bool zcull_pixel_cnt_enabled = false;
|
|
|
|
|
|
2019-11-30 13:44:47 +01:00
|
|
|
reports::conditional_render_eval cond_render_ctrl;
|
|
|
|
|
|
2018-10-11 00:17:19 +02:00
|
|
|
virtual u64 get_cycles() = 0;
|
2019-09-26 17:32:31 +02:00
|
|
|
virtual ~thread();
|
|
|
|
|
|
|
|
|
|
static constexpr auto thread_name = "rsx::thread"sv;
|
2018-10-11 00:17:19 +02:00
|
|
|
|
2015-10-04 00:45:26 +02:00
|
|
|
protected:
|
2016-01-20 13:46:58 +01:00
|
|
|
thread();
|
2018-10-11 00:17:19 +02:00
|
|
|
virtual void on_task();
|
|
|
|
|
virtual void on_exit();
|
2018-02-09 15:49:37 +01:00
|
|
|
|
2017-02-16 19:29:56 +01:00
|
|
|
/**
|
|
|
|
|
* Execute a backend local task queue
|
|
|
|
|
*/
|
2018-05-29 13:53:16 +02:00
|
|
|
virtual void do_local_task(FIFO_state state);
|
2015-11-26 09:06:29 +01:00
|
|
|
|
2018-10-01 22:05:51 +02:00
|
|
|
virtual void emit_geometry(u32) {}
|
|
|
|
|
|
2018-09-24 15:03:25 +02:00
|
|
|
void run_FIFO();
|
|
|
|
|
|
2015-10-04 00:45:26 +02:00
|
|
|
public:
|
2021-04-19 13:22:03 +02:00
|
|
|
thread(const thread&) = delete;
|
|
|
|
|
thread& operator=(const thread&) = delete;
|
|
|
|
|
|
2021-04-09 21:12:47 +02:00
|
|
|
virtual void clear_surface(u32 /*arg*/) {}
|
2015-10-04 00:45:26 +02:00
|
|
|
virtual void begin();
|
|
|
|
|
virtual void end();
|
2018-11-21 12:38:56 +01:00
|
|
|
virtual void execute_nop_draw();
|
2015-10-04 00:45:26 +02:00
|
|
|
|
2015-11-26 09:06:29 +01:00
|
|
|
virtual void on_init_thread() = 0;
|
2019-09-17 13:26:03 +02:00
|
|
|
virtual void on_frame_end(u32 buffer, bool forced = false);
|
2019-09-19 19:08:06 +02:00
|
|
|
virtual void flip(const display_flip_info_t& info) = 0;
|
2018-07-24 23:33:55 +02:00
|
|
|
virtual u64 timestamp();
|
2017-06-18 16:53:02 +02:00
|
|
|
virtual bool on_access_violation(u32 /*address*/, bool /*is_writing*/) { return false; }
|
2019-08-25 17:47:49 +02:00
|
|
|
virtual void on_invalidate_memory_range(const address_range & /*range*/, rsx::invalidation_cause) {}
|
2017-11-02 13:10:07 +01:00
|
|
|
virtual void notify_tile_unbound(u32 /*tile*/) {}
|
2015-10-04 00:45:26 +02:00
|
|
|
|
2019-12-07 14:28:35 +01:00
|
|
|
// control
|
2020-01-19 14:46:28 +01:00
|
|
|
virtual void renderctl(u32 /*request_code*/, void* /*args*/) {}
|
2019-12-07 14:28:35 +01:00
|
|
|
|
2018-05-23 11:55:14 +02:00
|
|
|
// zcull
|
2018-03-05 12:09:43 +01:00
|
|
|
void notify_zcull_info_changed();
|
|
|
|
|
void clear_zcull_stats(u32 type);
|
|
|
|
|
void check_zcull_status(bool framebuffer_swap);
|
|
|
|
|
void get_zcull_stats(u32 type, vm::addr_t sink);
|
2019-10-24 22:02:42 +02:00
|
|
|
u32 copy_zcull_stats(u32 memory_range_start, u32 memory_range, u32 destination);
|
2018-03-05 12:09:43 +01:00
|
|
|
|
2019-11-30 13:44:47 +01:00
|
|
|
void enable_conditional_rendering(vm::addr_t ref);
|
|
|
|
|
void disable_conditional_rendering();
|
2019-12-10 05:56:44 +01:00
|
|
|
virtual void begin_conditional_rendering(const std::vector<reports::occlusion_query_info*>& sources);
|
2019-11-30 13:44:47 +01:00
|
|
|
virtual void end_conditional_rendering();
|
|
|
|
|
|
2018-05-23 11:55:14 +02:00
|
|
|
// sync
|
2018-03-05 12:09:43 +01:00
|
|
|
void sync();
|
2019-10-24 22:02:42 +02:00
|
|
|
flags32_t read_barrier(u32 memory_address, u32 memory_range, bool unconditional);
|
2022-05-14 18:04:12 +02:00
|
|
|
virtual void sync_hint(FIFO_hint hint, reports::sync_hint_payload_t payload);
|
2022-02-19 18:22:52 +01:00
|
|
|
virtual bool release_GCM_label(u32 /*address*/, u32 /*value*/) { return false; }
|
2018-09-25 14:21:04 +02:00
|
|
|
|
2021-05-30 16:10:46 +02:00
|
|
|
std::span<const std::byte> get_raw_index_array(const draw_clause& draw_indexed_clause) const;
|
2018-02-09 15:49:37 +01:00
|
|
|
|
2016-08-28 17:00:02 +02:00
|
|
|
std::variant<draw_array_command, draw_indexed_array_command, draw_inlined_array>
|
|
|
|
|
get_draw_command(const rsx::rsx_state& state) const;
|
2016-07-31 23:01:31 +02:00
|
|
|
|
2017-07-31 13:38:28 +02:00
|
|
|
/**
|
2017-03-25 22:59:57 +01:00
|
|
|
* Immediate mode rendering requires a temp push buffer to hold attrib values
|
|
|
|
|
* Appends a value to the push buffer (currently only supports 32-wide types)
|
|
|
|
|
*/
|
2017-03-26 11:01:50 +02:00
|
|
|
void append_to_push_buffer(u32 attribute, u32 size, u32 subreg_index, vertex_base_type type, u32 value);
|
2017-03-28 12:41:45 +02:00
|
|
|
u32 get_push_buffer_vertex_count() const;
|
|
|
|
|
|
|
|
|
|
void append_array_element(u32 index);
|
|
|
|
|
u32 get_push_buffer_index_count() const;
|
2017-03-25 22:59:57 +01:00
|
|
|
|
2017-06-25 22:14:56 +02:00
|
|
|
protected:
|
|
|
|
|
|
2017-07-31 13:38:28 +02:00
|
|
|
/**
|
|
|
|
|
* Computes VRAM requirements needed to upload raw vertex streams
|
|
|
|
|
* result.first contains persistent memory requirements
|
|
|
|
|
* result.second contains volatile memory requirements
|
|
|
|
|
*/
|
2019-01-21 19:07:27 +01:00
|
|
|
std::pair<u32, u32> calculate_memory_requirements(const vertex_input_layout& layout, u32 first_vertex, u32 vertex_count);
|
2017-06-25 22:14:56 +02:00
|
|
|
|
2017-07-31 13:38:28 +02:00
|
|
|
/**
|
|
|
|
|
* Generates vertex input descriptors as an array of 16x4 s32s
|
|
|
|
|
*/
|
2019-01-21 19:07:27 +01:00
|
|
|
void fill_vertex_layout_state(const vertex_input_layout& layout, u32 first_vertex, u32 vertex_count, s32* buffer, u32 persistent_offset = 0, u32 volatile_offset = 0);
|
2017-06-25 22:14:56 +02:00
|
|
|
|
2017-07-31 13:38:28 +02:00
|
|
|
/**
|
|
|
|
|
* Uploads vertex data described in the layout descriptor
|
|
|
|
|
* Copies from local memory to the write-only output buffers provided in a sequential manner
|
|
|
|
|
*/
|
2018-01-21 16:31:35 +01:00
|
|
|
void write_vertex_data_to_memory(const vertex_input_layout& layout, u32 first_vertex, u32 vertex_count, void *persistent_data, void *volatile_data);
|
2017-06-25 22:14:56 +02:00
|
|
|
|
2016-01-20 15:12:49 +01:00
|
|
|
private:
|
2018-03-05 12:09:43 +01:00
|
|
|
shared_mutex m_mtx_task;
|
2016-01-20 15:12:49 +01:00
|
|
|
|
2018-07-20 16:22:21 +02:00
|
|
|
void handle_emu_flip(u32 buffer);
|
|
|
|
|
void handle_invalidated_memory_range();
|
2016-01-20 15:12:49 +01:00
|
|
|
|
|
|
|
|
public:
|
2015-10-31 18:08:49 +01:00
|
|
|
/**
|
|
|
|
|
* Fill buffer with 4x4 scale offset matrix.
|
|
|
|
|
* Vertex shader's position is to be multiplied by this matrix.
|
2017-06-14 17:47:01 +02:00
|
|
|
* if flip_y is set, the matrix is modified to use d3d convention.
|
2015-10-31 18:08:49 +01:00
|
|
|
*/
|
2017-06-14 17:47:01 +02:00
|
|
|
void fill_scale_offset_data(void *buffer, bool flip_y) const;
|
2017-04-25 12:32:39 +02:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Fill buffer with user clip information
|
2018-05-23 11:55:14 +02:00
|
|
|
*/
|
2017-04-25 12:32:39 +02:00
|
|
|
void fill_user_clip_data(void *buffer) const;
|
2015-10-31 18:08:49 +01:00
|
|
|
|
2015-10-31 18:19:45 +01:00
|
|
|
/**
|
|
|
|
|
* Fill buffer with vertex program constants.
|
2022-03-23 20:53:18 +01:00
|
|
|
* Relocation table allows to do a partial fill with only selected registers.
|
2015-10-31 18:19:45 +01:00
|
|
|
*/
|
2022-03-23 20:53:18 +01:00
|
|
|
void fill_vertex_program_constants_data(void* buffer, const std::vector<u16>& reloc_table);
|
2015-10-31 18:19:45 +01:00
|
|
|
|
2017-02-10 10:08:46 +01:00
|
|
|
/**
|
|
|
|
|
* Fill buffer with fragment rasterization state.
|
|
|
|
|
* Fills current fog values, alpha test parameters and texture scaling parameters
|
|
|
|
|
*/
|
2021-03-05 20:05:37 +01:00
|
|
|
void fill_fragment_state_buffer(void* buffer, const RSXFragmentProgram& fragment_program);
|
2017-02-10 10:08:46 +01:00
|
|
|
|
2018-09-22 16:45:55 +02:00
|
|
|
/**
|
|
|
|
|
* Notify that a section of memory has been mapped
|
|
|
|
|
* If there is a notify_memory_unmapped request on this range yet to be handled,
|
|
|
|
|
* handles it immediately.
|
|
|
|
|
*/
|
|
|
|
|
void on_notify_memory_mapped(u32 address_base, u32 size);
|
|
|
|
|
|
2018-05-23 11:55:14 +02:00
|
|
|
/**
|
|
|
|
|
* Notify that a section of memory has been unmapped
|
|
|
|
|
* Any data held in the defined range is discarded
|
|
|
|
|
*/
|
|
|
|
|
void on_notify_memory_unmapped(u32 address_base, u32 size);
|
|
|
|
|
|
2018-05-29 13:53:16 +02:00
|
|
|
/**
|
|
|
|
|
* Notify to check internal state during semaphore wait
|
|
|
|
|
*/
|
2019-08-25 17:47:49 +02:00
|
|
|
virtual void on_semaphore_acquire_wait() {}
|
2018-05-29 13:53:16 +02:00
|
|
|
|
2019-05-11 10:14:56 +02:00
|
|
|
virtual std::pair<std::string, std::string> get_programs() const { return std::make_pair("", ""); }
|
2016-01-20 13:46:58 +01:00
|
|
|
|
2019-05-11 10:14:56 +02:00
|
|
|
virtual bool scaled_image_from_memory(blit_src_info& /*src_info*/, blit_dst_info& /*dst_info*/, bool /*interpolate*/) { return false; }
|
2017-03-15 13:32:36 +01:00
|
|
|
|
2015-10-04 00:45:26 +02:00
|
|
|
public:
|
|
|
|
|
void reset();
|
2019-06-29 00:27:49 +02:00
|
|
|
void init(u32 ctrlAddress);
|
2015-10-04 00:45:26 +02:00
|
|
|
|
2018-07-20 16:22:21 +02:00
|
|
|
// Emu App/Game flip, only immediately flips when called from rsxthread
|
|
|
|
|
void request_emu_flip(u32 buffer);
|
|
|
|
|
|
2017-12-09 11:14:00 +01:00
|
|
|
void pause();
|
|
|
|
|
void unpause();
|
2020-02-11 22:36:46 +01:00
|
|
|
void wait_pause();
|
2018-01-17 17:14:00 +01:00
|
|
|
|
2019-08-25 17:47:49 +02:00
|
|
|
// Get RSX approximate load in %
|
2018-02-28 12:46:39 +01:00
|
|
|
u32 get_load();
|
2019-08-25 17:47:49 +02:00
|
|
|
|
2022-05-14 16:43:43 +02:00
|
|
|
// Get stats object
|
|
|
|
|
frame_statistics_t& get_stats() { return m_frame_stats; }
|
|
|
|
|
|
2019-08-25 17:47:49 +02:00
|
|
|
// Returns true if the current thread is the active RSX thread
|
2021-09-25 01:01:27 +02:00
|
|
|
inline bool is_current_thread() const
|
|
|
|
|
{
|
|
|
|
|
return !!cpu_thread::get_current<rsx::thread>();
|
|
|
|
|
}
|
2015-10-04 00:45:26 +02:00
|
|
|
};
|
2019-09-26 17:32:31 +02:00
|
|
|
|
|
|
|
|
inline thread* get_current_renderer()
|
|
|
|
|
{
|
2021-02-03 19:14:31 +01:00
|
|
|
return g_fxo->try_get<rsx::thread>();
|
2019-09-26 17:32:31 +02:00
|
|
|
}
|
2020-04-07 19:29:11 +02:00
|
|
|
|
2020-09-14 22:38:17 +02:00
|
|
|
template<bool IsFullLock = false>
|
|
|
|
|
class reservation_lock
|
2020-04-07 19:29:11 +02:00
|
|
|
{
|
2020-09-14 22:38:17 +02:00
|
|
|
u32 addr = 0, length = 0;
|
|
|
|
|
bool locked = false;
|
|
|
|
|
|
|
|
|
|
inline void lock_range(u32 addr, u32 length)
|
2020-04-07 19:29:11 +02:00
|
|
|
{
|
2020-09-14 22:38:17 +02:00
|
|
|
this->addr = addr;
|
|
|
|
|
this->length = length;
|
2020-04-07 19:29:11 +02:00
|
|
|
|
2020-09-14 22:38:17 +02:00
|
|
|
auto renderer = get_current_renderer();
|
2022-05-20 21:35:26 +02:00
|
|
|
cpu_thread* lock_owner = renderer->is_current_thread() ? renderer : nullptr;
|
|
|
|
|
this->locked = renderer->iomap_table.lock<IsFullLock>(addr, length, lock_owner);
|
2020-09-14 22:38:17 +02:00
|
|
|
}
|
2020-04-07 19:29:11 +02:00
|
|
|
|
2020-09-14 22:38:17 +02:00
|
|
|
public:
|
|
|
|
|
reservation_lock(u32 addr, u32 length)
|
|
|
|
|
{
|
|
|
|
|
if (g_cfg.core.rsx_accurate_res_access &&
|
|
|
|
|
addr < constants::local_mem_base)
|
|
|
|
|
{
|
|
|
|
|
lock_range(addr, length);
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-04-07 19:29:11 +02:00
|
|
|
|
2020-09-14 22:38:17 +02:00
|
|
|
// Multi-range lock. If ranges overlap, the combined range will be acquired.
|
|
|
|
|
// If ranges do not overlap, the first range that is in main memory will be acquired.
|
|
|
|
|
reservation_lock(u32 dst_addr, u32 dst_length, u32 src_addr, u32 src_length)
|
2020-04-07 19:29:11 +02:00
|
|
|
{
|
2020-09-14 22:38:17 +02:00
|
|
|
if (g_cfg.core.rsx_accurate_res_access)
|
|
|
|
|
{
|
|
|
|
|
const auto range1 = utils::address_range::start_length(dst_addr, dst_length);
|
|
|
|
|
const auto range2 = utils::address_range::start_length(src_addr, src_length);
|
|
|
|
|
utils::address_range target_range;
|
|
|
|
|
|
|
|
|
|
if (!range1.overlaps(range2)) [[likely]]
|
|
|
|
|
{
|
|
|
|
|
target_range = (dst_addr < constants::local_mem_base) ? range1 : range2;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
// Very unlikely
|
|
|
|
|
target_range = range1.get_min_max(range2);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (target_range.start < constants::local_mem_base)
|
|
|
|
|
{
|
|
|
|
|
lock_range(target_range.start, target_range.length());
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-04-07 19:29:11 +02:00
|
|
|
}
|
|
|
|
|
|
2020-09-14 22:38:17 +02:00
|
|
|
~reservation_lock()
|
|
|
|
|
{
|
|
|
|
|
if (locked)
|
|
|
|
|
{
|
|
|
|
|
get_current_renderer()->iomap_table.unlock<IsFullLock>(addr, length);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
};
|
2022-05-15 10:40:30 +02:00
|
|
|
|
|
|
|
|
class eng_lock
|
|
|
|
|
{
|
|
|
|
|
rsx::thread* pthr;
|
|
|
|
|
public:
|
|
|
|
|
eng_lock(rsx::thread* target)
|
|
|
|
|
:pthr(target)
|
|
|
|
|
{
|
2022-05-15 13:54:11 +02:00
|
|
|
if (pthr->is_current_thread())
|
|
|
|
|
{
|
|
|
|
|
pthr = nullptr;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
pthr->pause();
|
|
|
|
|
}
|
2022-05-15 10:40:30 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
~eng_lock()
|
|
|
|
|
{
|
|
|
|
|
if (pthr) pthr->unpause();
|
|
|
|
|
}
|
|
|
|
|
};
|
2015-10-04 00:45:26 +02:00
|
|
|
}
|