renderer_vulkan: Handle scheduler switches properly

This commit is contained in:
GPUCode
2022-09-29 19:51:25 +03:00
parent f6af97fc16
commit 075090569f
5 changed files with 18 additions and 22 deletions

View File

@ -180,6 +180,13 @@ PipelineCache::~PipelineCache() {
void PipelineCache::BindPipeline(const PipelineInfo& info) {
ApplyDynamic(info);
// When texture downloads occur the runtime will flush the GPU and cause
// a scheduler slot switch behind our back. This might invalidate any
// cached descriptor sets/require pipeline rebinding.
if (timestamp != scheduler.GetHostFenceCounter()) {
MarkDirty();
}
u64 shader_hash = 0;
for (u32 i = 0; i < MAX_SHADER_STAGES; i++) {
shader_hash = Common::HashCombine(shader_hash, shader_hashes[i]);
@ -303,9 +310,10 @@ void PipelineCache::SetScissor(s32 x, s32 y, u32 width, u32 height) {
command_buffer.setScissor(0, vk::Rect2D{{x, y}, {width, height}});
}
void PipelineCache::MarkDescriptorSetsDirty() {
void PipelineCache::MarkDirty() {
descriptor_dirty.fill(true);
current_pipeline = VK_NULL_HANDLE;
timestamp = scheduler.GetHostFenceCounter();
}
void PipelineCache::ApplyDynamic(const PipelineInfo& info) {

View File

@ -195,8 +195,8 @@ public:
/// Sets the scissor rectange to the provided values
void SetScissor(s32 x, s32 y, u32 width, u32 height);
/// Marks all descriptor sets as dirty
void MarkDescriptorSetsDirty();
/// Marks all cached pipeline cache state as dirty
void MarkDirty();
private:
/// Binds a resource to the provided binding
@ -248,6 +248,7 @@ private:
std::array<DescriptorSetData, MAX_DESCRIPTOR_SETS> update_data{};
std::array<bool, MAX_DESCRIPTOR_SETS> descriptor_dirty{};
std::array<vk::DescriptorSet, MAX_DESCRIPTOR_SETS> descriptor_sets;
u64 timestamp = 0;
// Bound shader modules
enum ProgramType : u32 {

View File

@ -784,8 +784,6 @@ bool RasterizerVulkan::Draw(bool accelerate, bool is_indexed) {
// Enable scissor test to prevent drawing outside of the framebuffer region
pipeline_cache.SetScissor(draw_rect.left, draw_rect.bottom, draw_rect.GetWidth(), draw_rect.GetHeight());
//return true;
auto valid_surface = color_surface ? color_surface : depth_surface;
const FramebufferInfo framebuffer_info = {
.color = color_surface ? color_surface->alloc.image_view : VK_NULL_HANDLE,
@ -834,6 +832,7 @@ bool RasterizerVulkan::Draw(bool accelerate, bool is_indexed) {
if (accelerate) {
succeeded = AccelerateDrawBatchInternal(is_indexed);
} else {
pipeline_info.rasterization.topology.Assign(Pica::PipelineRegs::TriangleTopology::List);
pipeline_cache.UseTrivialVertexShader();
pipeline_cache.UseTrivialGeometryShader();
pipeline_cache.BindPipeline(pipeline_info);
@ -1625,7 +1624,6 @@ void RasterizerVulkan::FlushBuffers() {
index_buffer.Flush();
texture_buffer.Flush();
texture_lf_buffer.Flush();
pipeline_cache.MarkDescriptorSetsDirty();
}
void RasterizerVulkan::SetShader() {

View File

@ -119,25 +119,13 @@ void TaskScheduler::Synchronize(u32 slot) {
LOG_ERROR(Render_Vulkan, "Waiting for fence counter {} failed!", command.fence_counter);
UNREACHABLE();
}
completed_fence_counter = command.fence_counter;
}
completed_fence_counter = command.fence_counter;
device.resetFences(command.fence);
device.resetDescriptorPool(command.descriptor_pool);
}
void TaskScheduler::WaitFence(u32 counter) {
for (u32 i = 0; i < SCHEDULER_COMMAND_COUNT; i++) {
if (commands[i].fence_counter == counter) {
return Synchronize(i);
}
}
LOG_CRITICAL(Render_Vulkan,"Invalid fence counter {}!", counter);
UNREACHABLE();
}
void TaskScheduler::Submit(SubmitMode mode) {
const auto& command = commands[current_command];
command.render_command_buffer.end();

View File

@ -32,9 +32,6 @@ public:
/// Blocks the host until the current command completes execution
void Synchronize(u32 slot);
/// Waits for the fence counter to be reached by the GPU
void WaitFence(u32 counter);
/// Submits the current command to the graphics queue
void Submit(SubmitMode mode);
@ -59,6 +56,10 @@ public:
return current_command;
}
u64 GetHostFenceCounter() const {
return next_fence_counter - 1;
}
vk::Semaphore GetImageAcquiredSemaphore() const {
return commands[current_command].image_acquired;
}