renderer_vulkan: Remove upload_cmdbuf
* No longer needed with the new stream buffer
This commit is contained in:
@@ -206,12 +206,11 @@ void RendererVulkan::BeginRendering() {
|
||||
vk::DescriptorSet set = desc_manager.AllocateSet(present_descriptor_layout);
|
||||
device.updateDescriptorSetWithTemplate(set, present_update_template, present_textures[0]);
|
||||
|
||||
scheduler.Record([this, set, pipeline_index = current_pipeline](vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer) {
|
||||
render_cmdbuf.bindPipeline(vk::PipelineBindPoint::eGraphics,
|
||||
scheduler.Record([this, set, pipeline_index = current_pipeline](vk::CommandBuffer cmdbuf) {
|
||||
cmdbuf.bindPipeline(vk::PipelineBindPoint::eGraphics,
|
||||
present_pipelines[pipeline_index]);
|
||||
|
||||
render_cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eGraphics, present_pipeline_layout,
|
||||
cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eGraphics, present_pipeline_layout,
|
||||
0, set, {});
|
||||
});
|
||||
|
||||
@@ -529,8 +528,7 @@ void RendererVulkan::LoadColorToActiveVkTexture(u8 color_r, u8 color_g, u8 color
|
||||
};
|
||||
|
||||
renderpass_cache.ExitRenderpass();
|
||||
scheduler.Record([image = texture.alloc.image, clear_color](vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer) {
|
||||
scheduler.Record([image = texture.alloc.image, clear_color](vk::CommandBuffer cmdbuf) {
|
||||
const vk::ImageSubresourceRange range = {
|
||||
.aspectMask = vk::ImageAspectFlagBits::eColor,
|
||||
.baseMipLevel = 0,
|
||||
@@ -561,14 +559,14 @@ void RendererVulkan::LoadColorToActiveVkTexture(u8 color_r, u8 color_g, u8 color
|
||||
.subresourceRange = range,
|
||||
};
|
||||
|
||||
render_cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eFragmentShader,
|
||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eFragmentShader,
|
||||
vk::PipelineStageFlagBits::eTransfer,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barrier);
|
||||
|
||||
render_cmdbuf.clearColorImage(image, vk::ImageLayout::eTransferDstOptimal, clear_color,
|
||||
cmdbuf.clearColorImage(image, vk::ImageLayout::eTransferDstOptimal, clear_color,
|
||||
range);
|
||||
|
||||
render_cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer,
|
||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer,
|
||||
vk::PipelineStageFlagBits::eFragmentShader,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, post_barrier);
|
||||
});
|
||||
@@ -625,15 +623,14 @@ void RendererVulkan::DrawSingleScreenRotated(u32 screen_id, float x, float y, fl
|
||||
draw_info.o_resolution = Common::Vec4f{h, w, 1.0f / h, 1.0f / w};
|
||||
draw_info.screen_id_l = screen_id;
|
||||
|
||||
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer) {
|
||||
render_cmdbuf.pushConstants(present_pipeline_layout,
|
||||
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer cmdbuf) {
|
||||
cmdbuf.pushConstants(present_pipeline_layout,
|
||||
vk::ShaderStageFlagBits::eFragment |
|
||||
vk::ShaderStageFlagBits::eVertex,
|
||||
0, sizeof(info), &info);
|
||||
|
||||
render_cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
||||
render_cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
||||
cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
||||
cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -664,15 +661,14 @@ void RendererVulkan::DrawSingleScreen(u32 screen_id, float x, float y, float w,
|
||||
draw_info.o_resolution = Common::Vec4f{h, w, 1.0f / h, 1.0f / w};
|
||||
draw_info.screen_id_l = screen_id;
|
||||
|
||||
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer) {
|
||||
render_cmdbuf.pushConstants(present_pipeline_layout,
|
||||
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer cmdbuf) {
|
||||
cmdbuf.pushConstants(present_pipeline_layout,
|
||||
vk::ShaderStageFlagBits::eFragment |
|
||||
vk::ShaderStageFlagBits::eVertex,
|
||||
0, sizeof(info), &info);
|
||||
|
||||
render_cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
||||
render_cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
||||
cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
||||
cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -704,15 +700,14 @@ void RendererVulkan::DrawSingleScreenStereoRotated(u32 screen_id_l, u32 screen_i
|
||||
draw_info.screen_id_l = screen_id_l;
|
||||
draw_info.screen_id_r = screen_id_r;
|
||||
|
||||
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer) {
|
||||
render_cmdbuf.pushConstants(present_pipeline_layout,
|
||||
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer cmdbuf) {
|
||||
cmdbuf.pushConstants(present_pipeline_layout,
|
||||
vk::ShaderStageFlagBits::eFragment |
|
||||
vk::ShaderStageFlagBits::eVertex,
|
||||
0, sizeof(info), &info);
|
||||
|
||||
render_cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
||||
render_cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
||||
cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
||||
cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -746,15 +741,14 @@ void RendererVulkan::DrawSingleScreenStereo(u32 screen_id_l, u32 screen_id_r, fl
|
||||
draw_info.screen_id_l = screen_id_l;
|
||||
draw_info.screen_id_r = screen_id_r;
|
||||
|
||||
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer) {
|
||||
render_cmdbuf.pushConstants(present_pipeline_layout,
|
||||
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer cmdbuf) {
|
||||
cmdbuf.pushConstants(present_pipeline_layout,
|
||||
vk::ShaderStageFlagBits::eFragment |
|
||||
vk::ShaderStageFlagBits::eVertex,
|
||||
0, sizeof(info), &info);
|
||||
|
||||
render_cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
||||
render_cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
||||
cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
||||
cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -925,7 +919,7 @@ void RendererVulkan::SwapBuffers() {
|
||||
swapchain.AcquireNextImage();
|
||||
} while (swapchain.NeedsRecreation());
|
||||
|
||||
scheduler.Record([layout](vk::CommandBuffer render_cmdbuf, vk::CommandBuffer) {
|
||||
scheduler.Record([layout](vk::CommandBuffer cmdbuf) {
|
||||
const vk::Viewport viewport = {
|
||||
.x = 0.0f,
|
||||
.y = 0.0f,
|
||||
@@ -940,8 +934,8 @@ void RendererVulkan::SwapBuffers() {
|
||||
.extent = {layout.width, layout.height},
|
||||
};
|
||||
|
||||
render_cmdbuf.setViewport(0, viewport);
|
||||
render_cmdbuf.setScissor(0, scissor);
|
||||
cmdbuf.setViewport(0, viewport);
|
||||
cmdbuf.setScissor(0, scissor);
|
||||
});
|
||||
|
||||
DrawScreens(layout, false);
|
||||
|
@@ -167,8 +167,7 @@ void BlitHelper::BlitD24S8ToR32(Surface& source, Surface& dest,
|
||||
device.updateDescriptorSetWithTemplate(set, update_template, textures[0]);
|
||||
|
||||
scheduler.Record([this, set, blit, src_image = source.alloc.image,
|
||||
dst_image = dest.alloc.image](vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer) {
|
||||
dst_image = dest.alloc.image](vk::CommandBuffer cmdbuf) {
|
||||
const std::array pre_barriers = {
|
||||
vk::ImageMemoryBarrier{
|
||||
.srcAccessMask = vk::AccessFlagBits::eShaderWrite |
|
||||
@@ -240,21 +239,21 @@ void BlitHelper::BlitD24S8ToR32(Surface& source, Surface& dest,
|
||||
.layerCount = VK_REMAINING_ARRAY_LAYERS,
|
||||
},
|
||||
}};
|
||||
render_cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eAllCommands,
|
||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eAllCommands,
|
||||
vk::PipelineStageFlagBits::eComputeShader,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barriers);
|
||||
|
||||
render_cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eCompute, compute_pipeline_layout,
|
||||
cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eCompute, compute_pipeline_layout,
|
||||
0, set, {});
|
||||
render_cmdbuf.bindPipeline(vk::PipelineBindPoint::eCompute, compute_pipeline);
|
||||
cmdbuf.bindPipeline(vk::PipelineBindPoint::eCompute, compute_pipeline);
|
||||
|
||||
const auto src_offset = Common::MakeVec(blit.src_rect.left, blit.src_rect.bottom);
|
||||
render_cmdbuf.pushConstants(compute_pipeline_layout, vk::ShaderStageFlagBits::eCompute, 0,
|
||||
cmdbuf.pushConstants(compute_pipeline_layout, vk::ShaderStageFlagBits::eCompute, 0,
|
||||
sizeof(Common::Vec2i), src_offset.AsArray());
|
||||
|
||||
render_cmdbuf.dispatch(blit.src_rect.GetWidth() / 8, blit.src_rect.GetHeight() / 8, 1);
|
||||
cmdbuf.dispatch(blit.src_rect.GetWidth() / 8, blit.src_rect.GetHeight() / 8, 1);
|
||||
|
||||
render_cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eComputeShader,
|
||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eComputeShader,
|
||||
vk::PipelineStageFlagBits::eAllCommands,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, post_barriers);
|
||||
});
|
||||
|
@@ -132,8 +132,8 @@ void DescriptorManager::BindDescriptorSets() {
|
||||
bound_sets[i] = descriptor_sets[i];
|
||||
}
|
||||
|
||||
scheduler.Record([this, bound_sets](vk::CommandBuffer render_cmdbuf, vk::CommandBuffer) {
|
||||
render_cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipeline_layout, 0,
|
||||
scheduler.Record([this, bound_sets](vk::CommandBuffer cmdbuf) {
|
||||
cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipeline_layout, 0,
|
||||
bound_sets, {});
|
||||
});
|
||||
|
||||
|
@@ -165,8 +165,7 @@ void D24S8toRGBA8::Reinterpret(Surface& source, VideoCore::Rect2D src_rect, Surf
|
||||
|
||||
runtime.GetRenderpassCache().ExitRenderpass();
|
||||
scheduler.Record([this, set, src_rect, src_image = source.alloc.image,
|
||||
dst_image = dest.alloc.image](vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer) {
|
||||
dst_image = dest.alloc.image](vk::CommandBuffer cmdbuf) {
|
||||
const vk::ImageMemoryBarrier pre_barrier = {
|
||||
.srcAccessMask = vk::AccessFlagBits::eShaderWrite |
|
||||
vk::AccessFlagBits::eDepthStencilAttachmentWrite |
|
||||
@@ -221,21 +220,21 @@ void D24S8toRGBA8::Reinterpret(Surface& source, VideoCore::Rect2D src_rect, Surf
|
||||
},
|
||||
}};
|
||||
|
||||
render_cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eAllCommands,
|
||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eAllCommands,
|
||||
vk::PipelineStageFlagBits::eComputeShader,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barrier);
|
||||
|
||||
render_cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eCompute, compute_pipeline_layout,
|
||||
cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eCompute, compute_pipeline_layout,
|
||||
0, set, {});
|
||||
render_cmdbuf.bindPipeline(vk::PipelineBindPoint::eCompute, compute_pipeline);
|
||||
cmdbuf.bindPipeline(vk::PipelineBindPoint::eCompute, compute_pipeline);
|
||||
|
||||
const auto src_offset = Common::MakeVec(src_rect.left, src_rect.bottom);
|
||||
render_cmdbuf.pushConstants(compute_pipeline_layout, vk::ShaderStageFlagBits::eCompute, 0,
|
||||
cmdbuf.pushConstants(compute_pipeline_layout, vk::ShaderStageFlagBits::eCompute, 0,
|
||||
sizeof(Common::Vec2i), src_offset.AsArray());
|
||||
|
||||
render_cmdbuf.dispatch(src_rect.GetWidth() / 8, src_rect.GetHeight() / 8, 1);
|
||||
cmdbuf.dispatch(src_rect.GetWidth() / 8, src_rect.GetHeight() / 8, 1);
|
||||
|
||||
render_cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eComputeShader,
|
||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eComputeShader,
|
||||
vk::PipelineStageFlagBits::eAllCommands,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, post_barriers);
|
||||
});
|
||||
|
@@ -197,7 +197,7 @@ void PipelineCache::SaveDiskCache() {
|
||||
void PipelineCache::BindPipeline(const PipelineInfo& info) {
|
||||
ApplyDynamic(info);
|
||||
|
||||
scheduler.Record([this, info](vk::CommandBuffer render_cmdbuf, vk::CommandBuffer) {
|
||||
scheduler.Record([this, info](vk::CommandBuffer cmdbuf) {
|
||||
std::size_t shader_hash = 0;
|
||||
for (u32 i = 0; i < MAX_SHADER_STAGES; i++) {
|
||||
shader_hash = Common::HashCombine(shader_hash, shader_hashes[i]);
|
||||
@@ -215,7 +215,7 @@ void PipelineCache::BindPipeline(const PipelineInfo& info) {
|
||||
it->second = BuildPipeline(info);
|
||||
}
|
||||
|
||||
render_cmdbuf.bindPipeline(vk::PipelineBindPoint::eGraphics, it->second);
|
||||
cmdbuf.bindPipeline(vk::PipelineBindPoint::eGraphics, it->second);
|
||||
current_pipeline = it->second;
|
||||
});
|
||||
|
||||
@@ -248,7 +248,7 @@ bool PipelineCache::UseProgrammableVertexShader(const Pica::Regs& regs,
|
||||
}
|
||||
|
||||
scheduler.Record(
|
||||
[this, handle = handle, hash = config.Hash()](vk::CommandBuffer, vk::CommandBuffer) {
|
||||
[this, handle = handle, hash = config.Hash()](vk::CommandBuffer) {
|
||||
current_shaders[ProgramType::VS] = handle;
|
||||
shader_hashes[ProgramType::VS] = hash;
|
||||
});
|
||||
@@ -257,7 +257,7 @@ bool PipelineCache::UseProgrammableVertexShader(const Pica::Regs& regs,
|
||||
}
|
||||
|
||||
void PipelineCache::UseTrivialVertexShader() {
|
||||
scheduler.Record([this](vk::CommandBuffer, vk::CommandBuffer) {
|
||||
scheduler.Record([this](vk::CommandBuffer) {
|
||||
current_shaders[ProgramType::VS] = trivial_vertex_shader;
|
||||
shader_hashes[ProgramType::VS] = 0;
|
||||
});
|
||||
@@ -273,14 +273,14 @@ void PipelineCache::UseFixedGeometryShader(const Pica::Regs& regs) {
|
||||
fixed_geometry_shaders.Get(gs_config, vk::ShaderStageFlagBits::eGeometry,
|
||||
instance.GetDevice(), ShaderOptimization::Debug);
|
||||
|
||||
scheduler.Record([this, handle, hash = gs_config.Hash()](vk::CommandBuffer, vk::CommandBuffer) {
|
||||
scheduler.Record([this, handle, hash = gs_config.Hash()](vk::CommandBuffer) {
|
||||
current_shaders[ProgramType::GS] = handle;
|
||||
shader_hashes[ProgramType::GS] = hash;
|
||||
});
|
||||
}
|
||||
|
||||
void PipelineCache::UseTrivialGeometryShader() {
|
||||
scheduler.Record([this](vk::CommandBuffer, vk::CommandBuffer) {
|
||||
scheduler.Record([this](vk::CommandBuffer) {
|
||||
current_shaders[ProgramType::GS] = VK_NULL_HANDLE;
|
||||
shader_hashes[ProgramType::GS] = 0;
|
||||
});
|
||||
@@ -291,7 +291,7 @@ MICROPROFILE_DEFINE(Vulkan_FragmentGeneration, "Vulkan", "Fragment Shader Compil
|
||||
void PipelineCache::UseFragmentShader(const Pica::Regs& regs) {
|
||||
const PicaFSConfig config{regs, instance};
|
||||
|
||||
scheduler.Record([this, config](vk::CommandBuffer, vk::CommandBuffer) {
|
||||
scheduler.Record([this, config](vk::CommandBuffer) {
|
||||
MICROPROFILE_SCOPE(Vulkan_FragmentGeneration);
|
||||
|
||||
vk::ShaderModule handle{};
|
||||
@@ -354,15 +354,15 @@ void PipelineCache::BindSampler(u32 binding, vk::Sampler sampler) {
|
||||
|
||||
void PipelineCache::SetViewport(float x, float y, float width, float height) {
|
||||
const vk::Viewport viewport{x, y, width, height, 0.f, 1.f};
|
||||
scheduler.Record([viewport](vk::CommandBuffer render_cmdbuf, vk::CommandBuffer) {
|
||||
render_cmdbuf.setViewport(0, viewport);
|
||||
scheduler.Record([viewport](vk::CommandBuffer cmdbuf) {
|
||||
cmdbuf.setViewport(0, viewport);
|
||||
});
|
||||
}
|
||||
|
||||
void PipelineCache::SetScissor(s32 x, s32 y, u32 width, u32 height) {
|
||||
const vk::Rect2D scissor{{x, y}, {width, height}};
|
||||
scheduler.Record([scissor](vk::CommandBuffer render_cmdbuf, vk::CommandBuffer) {
|
||||
render_cmdbuf.setScissor(0, scissor);
|
||||
scheduler.Record([scissor](vk::CommandBuffer cmdbuf) {
|
||||
cmdbuf.setScissor(0, scissor);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -370,59 +370,58 @@ void PipelineCache::ApplyDynamic(const PipelineInfo& info) {
|
||||
const bool is_dirty = scheduler.IsStateDirty(StateFlags::Pipeline);
|
||||
|
||||
PipelineInfo current = current_info;
|
||||
scheduler.Record([this, info, is_dirty, current](vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer) {
|
||||
scheduler.Record([this, info, is_dirty, current](vk::CommandBuffer cmdbuf) {
|
||||
if (info.dynamic.stencil_compare_mask != current.dynamic.stencil_compare_mask || is_dirty) {
|
||||
render_cmdbuf.setStencilCompareMask(vk::StencilFaceFlagBits::eFrontAndBack,
|
||||
cmdbuf.setStencilCompareMask(vk::StencilFaceFlagBits::eFrontAndBack,
|
||||
info.dynamic.stencil_compare_mask);
|
||||
}
|
||||
|
||||
if (info.dynamic.stencil_write_mask != current.dynamic.stencil_write_mask || is_dirty) {
|
||||
render_cmdbuf.setStencilWriteMask(vk::StencilFaceFlagBits::eFrontAndBack,
|
||||
cmdbuf.setStencilWriteMask(vk::StencilFaceFlagBits::eFrontAndBack,
|
||||
info.dynamic.stencil_write_mask);
|
||||
}
|
||||
|
||||
if (info.dynamic.stencil_reference != current.dynamic.stencil_reference || is_dirty) {
|
||||
render_cmdbuf.setStencilReference(vk::StencilFaceFlagBits::eFrontAndBack,
|
||||
cmdbuf.setStencilReference(vk::StencilFaceFlagBits::eFrontAndBack,
|
||||
info.dynamic.stencil_reference);
|
||||
}
|
||||
|
||||
if (info.dynamic.blend_color != current.dynamic.blend_color || is_dirty) {
|
||||
const Common::Vec4f color = PicaToVK::ColorRGBA8(info.dynamic.blend_color);
|
||||
render_cmdbuf.setBlendConstants(color.AsArray());
|
||||
cmdbuf.setBlendConstants(color.AsArray());
|
||||
}
|
||||
|
||||
if (instance.IsExtendedDynamicStateSupported()) {
|
||||
if (info.rasterization.cull_mode != current.rasterization.cull_mode || is_dirty) {
|
||||
render_cmdbuf.setCullModeEXT(PicaToVK::CullMode(info.rasterization.cull_mode));
|
||||
render_cmdbuf.setFrontFaceEXT(PicaToVK::FrontFace(info.rasterization.cull_mode));
|
||||
cmdbuf.setCullModeEXT(PicaToVK::CullMode(info.rasterization.cull_mode));
|
||||
cmdbuf.setFrontFaceEXT(PicaToVK::FrontFace(info.rasterization.cull_mode));
|
||||
}
|
||||
|
||||
if (info.depth_stencil.depth_compare_op != current.depth_stencil.depth_compare_op ||
|
||||
is_dirty) {
|
||||
render_cmdbuf.setDepthCompareOpEXT(
|
||||
cmdbuf.setDepthCompareOpEXT(
|
||||
PicaToVK::CompareFunc(info.depth_stencil.depth_compare_op));
|
||||
}
|
||||
|
||||
if (info.depth_stencil.depth_test_enable != current.depth_stencil.depth_test_enable ||
|
||||
is_dirty) {
|
||||
render_cmdbuf.setDepthTestEnableEXT(info.depth_stencil.depth_test_enable);
|
||||
cmdbuf.setDepthTestEnableEXT(info.depth_stencil.depth_test_enable);
|
||||
}
|
||||
|
||||
if (info.depth_stencil.depth_write_enable != current.depth_stencil.depth_write_enable ||
|
||||
is_dirty) {
|
||||
render_cmdbuf.setDepthWriteEnableEXT(info.depth_stencil.depth_write_enable);
|
||||
cmdbuf.setDepthWriteEnableEXT(info.depth_stencil.depth_write_enable);
|
||||
}
|
||||
|
||||
if (info.rasterization.topology != current.rasterization.topology || is_dirty) {
|
||||
render_cmdbuf.setPrimitiveTopologyEXT(
|
||||
cmdbuf.setPrimitiveTopologyEXT(
|
||||
PicaToVK::PrimitiveTopology(info.rasterization.topology));
|
||||
}
|
||||
|
||||
if (info.depth_stencil.stencil_test_enable !=
|
||||
current.depth_stencil.stencil_test_enable ||
|
||||
is_dirty) {
|
||||
render_cmdbuf.setStencilTestEnableEXT(info.depth_stencil.stencil_test_enable);
|
||||
cmdbuf.setStencilTestEnableEXT(info.depth_stencil.stencil_test_enable);
|
||||
}
|
||||
|
||||
if (info.depth_stencil.stencil_fail_op != current.depth_stencil.stencil_fail_op ||
|
||||
@@ -431,7 +430,7 @@ void PipelineCache::ApplyDynamic(const PipelineInfo& info) {
|
||||
current.depth_stencil.stencil_depth_fail_op ||
|
||||
info.depth_stencil.stencil_compare_op != current.depth_stencil.stencil_compare_op ||
|
||||
is_dirty) {
|
||||
render_cmdbuf.setStencilOpEXT(
|
||||
cmdbuf.setStencilOpEXT(
|
||||
vk::StencilFaceFlagBits::eFrontAndBack,
|
||||
PicaToVK::StencilOp(info.depth_stencil.stencil_fail_op),
|
||||
PicaToVK::StencilOp(info.depth_stencil.stencil_pass_op),
|
||||
|
@@ -285,9 +285,9 @@ void RasterizerVulkan::SetupVertexArray(u32 vs_input_size, u32 vs_input_index_mi
|
||||
SetupFixedAttribs();
|
||||
|
||||
// Bind the generated bindings
|
||||
scheduler.Record([this, binding_count = layout.binding_count, vertex_offsets = binding_offsets](
|
||||
vk::CommandBuffer render_cmdbuf, vk::CommandBuffer) {
|
||||
render_cmdbuf.bindVertexBuffers(0, binding_count, vertex_buffers.data(),
|
||||
scheduler.Record([this, binding_count = layout.binding_count, vertex_offsets = binding_offsets]
|
||||
(vk::CommandBuffer cmdbuf) {
|
||||
cmdbuf.bindVertexBuffers(0, binding_count, vertex_buffers.data(),
|
||||
vertex_offsets.data());
|
||||
});
|
||||
}
|
||||
@@ -423,11 +423,11 @@ bool RasterizerVulkan::AccelerateDrawBatchInternal(bool is_indexed) {
|
||||
.is_indexed = is_indexed,
|
||||
};
|
||||
|
||||
scheduler.Record([params](vk::CommandBuffer render_cmdbuf, vk::CommandBuffer) {
|
||||
scheduler.Record([params](vk::CommandBuffer cmdbuf) {
|
||||
if (params.is_indexed) {
|
||||
render_cmdbuf.drawIndexed(params.vertex_count, 1, 0, params.vertex_offset, 0);
|
||||
cmdbuf.drawIndexed(params.vertex_count, 1, 0, params.vertex_offset, 0);
|
||||
} else {
|
||||
render_cmdbuf.draw(params.vertex_count, 1, 0, 0);
|
||||
cmdbuf.draw(params.vertex_count, 1, 0, 0);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -459,8 +459,8 @@ void RasterizerVulkan::SetupIndexArray() {
|
||||
stream_buffer.Commit(index_buffer_size);
|
||||
|
||||
scheduler.Record([this, index_offset = index_offset,
|
||||
index_type = index_type](vk::CommandBuffer render_cmdbuf, vk::CommandBuffer) {
|
||||
render_cmdbuf.bindIndexBuffer(stream_buffer.Handle(), index_offset, index_type);
|
||||
index_type = index_type](vk::CommandBuffer cmdbuf) {
|
||||
cmdbuf.bindIndexBuffer(stream_buffer.Handle(), index_offset, index_type);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -780,9 +780,9 @@ bool RasterizerVulkan::Draw(bool accelerate, bool is_indexed) {
|
||||
stream_buffer.Commit(vertex_size);
|
||||
|
||||
scheduler.Record([this, vertices, base_vertex,
|
||||
offset = offset](vk::CommandBuffer render_cmdbuf, vk::CommandBuffer) {
|
||||
render_cmdbuf.bindVertexBuffers(0, stream_buffer.Handle(), offset);
|
||||
render_cmdbuf.draw(vertices, 1, base_vertex, 0);
|
||||
offset = offset](vk::CommandBuffer cmdbuf) {
|
||||
cmdbuf.bindVertexBuffers(0, stream_buffer.Handle(), offset);
|
||||
cmdbuf.draw(vertices, 1, base_vertex, 0);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@@ -95,9 +95,9 @@ void RenderpassCache::EnterRenderpass(const RenderpassState& state) {
|
||||
}
|
||||
|
||||
scheduler.Record([should_end = bool(current_state.renderpass),
|
||||
state](vk::CommandBuffer render_cmdbuf, vk::CommandBuffer) {
|
||||
state](vk::CommandBuffer cmdbuf) {
|
||||
if (should_end) {
|
||||
render_cmdbuf.endRenderPass();
|
||||
cmdbuf.endRenderPass();
|
||||
}
|
||||
|
||||
const vk::RenderPassBeginInfo renderpass_begin_info = {
|
||||
@@ -108,7 +108,7 @@ void RenderpassCache::EnterRenderpass(const RenderpassState& state) {
|
||||
.pClearValues = &state.clear,
|
||||
};
|
||||
|
||||
render_cmdbuf.beginRenderPass(renderpass_begin_info, vk::SubpassContents::eInline);
|
||||
cmdbuf.beginRenderPass(renderpass_begin_info, vk::SubpassContents::eInline);
|
||||
});
|
||||
|
||||
if (is_dirty) {
|
||||
@@ -123,9 +123,7 @@ void RenderpassCache::ExitRenderpass() {
|
||||
return;
|
||||
}
|
||||
|
||||
scheduler.Record(
|
||||
[](vk::CommandBuffer render_cmdbuf, vk::CommandBuffer) { render_cmdbuf.endRenderPass(); });
|
||||
|
||||
scheduler.Record([](vk::CommandBuffer cmdbuf) { cmdbuf.endRenderPass(); });
|
||||
current_state = {};
|
||||
}
|
||||
|
||||
|
@@ -5,18 +5,17 @@
|
||||
#include <utility>
|
||||
#include "common/microprofile.h"
|
||||
#include "common/settings.h"
|
||||
#include "video_core/renderer_vulkan/renderer_vulkan.h"
|
||||
#include "video_core/renderer_vulkan/vk_instance.h"
|
||||
#include "video_core/renderer_vulkan/vk_renderpass_cache.h"
|
||||
#include "video_core/renderer_vulkan/vk_scheduler.h"
|
||||
|
||||
namespace Vulkan {
|
||||
|
||||
void Scheduler::CommandChunk::ExecuteAll(vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer upload_cmdbuf) {
|
||||
void Scheduler::CommandChunk::ExecuteAll(vk::CommandBuffer cmdbuf) {
|
||||
auto command = first;
|
||||
while (command != nullptr) {
|
||||
auto next = command->GetNext();
|
||||
command->Execute(render_cmdbuf, upload_cmdbuf);
|
||||
command->Execute(cmdbuf);
|
||||
command->~Command();
|
||||
command = next;
|
||||
}
|
||||
@@ -109,7 +108,7 @@ void Scheduler::WorkerThread() {
|
||||
work_queue.pop();
|
||||
|
||||
has_submit = work->HasSubmit();
|
||||
work->ExecuteAll(render_cmdbuf, upload_cmdbuf);
|
||||
work->ExecuteAll(current_cmdbuf);
|
||||
}
|
||||
if (has_submit) {
|
||||
AllocateWorkerCommandBuffers();
|
||||
@@ -121,13 +120,11 @@ void Scheduler::WorkerThread() {
|
||||
|
||||
void Scheduler::AllocateWorkerCommandBuffers() {
|
||||
const vk::CommandBufferBeginInfo begin_info = {
|
||||
.flags = vk::CommandBufferUsageFlagBits::eOneTimeSubmit};
|
||||
.flags = vk::CommandBufferUsageFlagBits::eOneTimeSubmit,
|
||||
};
|
||||
|
||||
upload_cmdbuf = command_pool.Commit();
|
||||
upload_cmdbuf.begin(begin_info);
|
||||
|
||||
render_cmdbuf = command_pool.Commit();
|
||||
render_cmdbuf.begin(begin_info);
|
||||
current_cmdbuf = command_pool.Commit();
|
||||
current_cmdbuf.begin(begin_info);
|
||||
}
|
||||
|
||||
MICROPROFILE_DEFINE(Vulkan_Submit, "Vulkan", "Submit Exectution", MP_RGB(255, 192, 255));
|
||||
@@ -138,10 +135,9 @@ void Scheduler::SubmitExecution(vk::Semaphore signal_semaphore, vk::Semaphore wa
|
||||
|
||||
renderpass_cache.ExitRenderpass();
|
||||
Record([signal_semaphore, wait_semaphore, handle, signal_value,
|
||||
this](vk::CommandBuffer render_cmdbuf, vk::CommandBuffer upload_cmdbuf) {
|
||||
this](vk::CommandBuffer cmdbuf) {
|
||||
MICROPROFILE_SCOPE(Vulkan_Submit);
|
||||
upload_cmdbuf.end();
|
||||
render_cmdbuf.end();
|
||||
cmdbuf.end();
|
||||
|
||||
const u32 num_signal_semaphores = signal_semaphore ? 2U : 1U;
|
||||
const std::array signal_values{signal_value, u64(0)};
|
||||
@@ -163,14 +159,13 @@ void Scheduler::SubmitExecution(vk::Semaphore signal_semaphore, vk::Semaphore wa
|
||||
.pSignalSemaphoreValues = signal_values.data(),
|
||||
};
|
||||
|
||||
const std::array cmdbuffers = {upload_cmdbuf, render_cmdbuf};
|
||||
const vk::SubmitInfo submit_info = {
|
||||
.pNext = &timeline_si,
|
||||
.waitSemaphoreCount = num_wait_semaphores,
|
||||
.pWaitSemaphores = wait_semaphores.data(),
|
||||
.pWaitDstStageMask = wait_stage_masks.data(),
|
||||
.commandBufferCount = static_cast<u32>(cmdbuffers.size()),
|
||||
.pCommandBuffers = cmdbuffers.data(),
|
||||
.commandBufferCount = 1u,
|
||||
.pCommandBuffers = &cmdbuf,
|
||||
.signalSemaphoreCount = num_signal_semaphores,
|
||||
.pSignalSemaphores = signal_semaphores.data(),
|
||||
};
|
||||
|
@@ -53,7 +53,7 @@ public:
|
||||
template <typename T>
|
||||
void Record(T&& command) {
|
||||
if (!use_worker_thread) {
|
||||
command(render_cmdbuf, upload_cmdbuf);
|
||||
command(current_cmdbuf);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -103,8 +103,7 @@ private:
|
||||
public:
|
||||
virtual ~Command() = default;
|
||||
|
||||
virtual void Execute(vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer upload_cmdbuf) const = 0;
|
||||
virtual void Execute(vk::CommandBuffer cmdbuf) const = 0;
|
||||
|
||||
Command* GetNext() const {
|
||||
return next;
|
||||
@@ -127,9 +126,8 @@ private:
|
||||
TypedCommand(TypedCommand&&) = delete;
|
||||
TypedCommand& operator=(TypedCommand&&) = delete;
|
||||
|
||||
void Execute(vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer upload_cmdbuf) const override {
|
||||
command(render_cmdbuf, upload_cmdbuf);
|
||||
void Execute(vk::CommandBuffer cmdbuf) const override {
|
||||
command(cmdbuf);
|
||||
}
|
||||
|
||||
private:
|
||||
@@ -138,7 +136,7 @@ private:
|
||||
|
||||
class CommandChunk final {
|
||||
public:
|
||||
void ExecuteAll(vk::CommandBuffer render_cmdbuf, vk::CommandBuffer upload_cmdbuf);
|
||||
void ExecuteAll(vk::CommandBuffer cmdbuf);
|
||||
|
||||
template <typename T>
|
||||
bool Record(T& command) {
|
||||
@@ -201,8 +199,7 @@ private:
|
||||
std::unique_ptr<CommandChunk> chunk;
|
||||
std::queue<std::unique_ptr<CommandChunk>> work_queue;
|
||||
std::vector<std::unique_ptr<CommandChunk>> chunk_reserve;
|
||||
vk::CommandBuffer render_cmdbuf;
|
||||
vk::CommandBuffer upload_cmdbuf;
|
||||
vk::CommandBuffer current_cmdbuf;
|
||||
StateFlags state{};
|
||||
std::mutex reserve_mutex;
|
||||
std::mutex work_mutex;
|
||||
|
@@ -111,7 +111,7 @@ void Swapchain::AcquireNextImage() {
|
||||
|
||||
MICROPROFILE_DEFINE(Vulkan_Present, "Vulkan", "Swapchain Present", MP_RGB(66, 185, 245));
|
||||
void Swapchain::Present() {
|
||||
scheduler.Record([this, index = image_index](vk::CommandBuffer, vk::CommandBuffer) {
|
||||
scheduler.Record([this, index = image_index](vk::CommandBuffer) {
|
||||
const vk::PresentInfoKHR present_info = {
|
||||
.waitSemaphoreCount = 1,
|
||||
.pWaitSemaphores = &present_ready[index],
|
||||
|
@@ -327,8 +327,7 @@ ImageAlloc TextureRuntime::Allocate(u32 width, u32 height, VideoCore::PixelForma
|
||||
alloc.storage_view = device.createImageView(storage_view_info);
|
||||
}
|
||||
|
||||
scheduler.Record([image = alloc.image, aspect = alloc.aspect](vk::CommandBuffer,
|
||||
vk::CommandBuffer upload_cmdbuf) {
|
||||
scheduler.Record([image = alloc.image, aspect = alloc.aspect](vk::CommandBuffer cmdbuf) {
|
||||
const vk::ImageMemoryBarrier init_barrier = {
|
||||
.srcAccessMask = vk::AccessFlagBits::eNone,
|
||||
.dstAccessMask = vk::AccessFlagBits::eNone,
|
||||
@@ -346,7 +345,7 @@ ImageAlloc TextureRuntime::Allocate(u32 width, u32 height, VideoCore::PixelForma
|
||||
},
|
||||
};
|
||||
|
||||
upload_cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTopOfPipe,
|
||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTopOfPipe,
|
||||
vk::PipelineStageFlagBits::eTopOfPipe,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, init_barrier);
|
||||
});
|
||||
@@ -406,8 +405,7 @@ bool TextureRuntime::ClearTexture(Surface& surface, const VideoCore::TextureClea
|
||||
};
|
||||
|
||||
if (clear.texture_rect == surface.GetScaledRect()) {
|
||||
scheduler.Record([params, clear, value](vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer) {
|
||||
scheduler.Record([params, clear, value](vk::CommandBuffer cmdbuf) {
|
||||
const vk::ImageSubresourceRange range = {
|
||||
.aspectMask = params.aspect,
|
||||
.baseMipLevel = clear.texture_level,
|
||||
@@ -450,23 +448,23 @@ bool TextureRuntime::ClearTexture(Surface& surface, const VideoCore::TextureClea
|
||||
},
|
||||
};
|
||||
|
||||
render_cmdbuf.pipelineBarrier(params.pipeline_flags,
|
||||
cmdbuf.pipelineBarrier(params.pipeline_flags,
|
||||
vk::PipelineStageFlagBits::eTransfer,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barrier);
|
||||
|
||||
const bool is_color =
|
||||
static_cast<bool>(params.aspect & vk::ImageAspectFlagBits::eColor);
|
||||
if (is_color) {
|
||||
render_cmdbuf.clearColorImage(params.src_image,
|
||||
cmdbuf.clearColorImage(params.src_image,
|
||||
vk::ImageLayout::eTransferDstOptimal,
|
||||
MakeClearColorValue(value), range);
|
||||
} else {
|
||||
render_cmdbuf.clearDepthStencilImage(params.src_image,
|
||||
cmdbuf.clearDepthStencilImage(params.src_image,
|
||||
vk::ImageLayout::eTransferDstOptimal,
|
||||
MakeClearDepthStencilValue(value), range);
|
||||
}
|
||||
|
||||
render_cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer,
|
||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer,
|
||||
params.pipeline_flags, vk::DependencyFlagBits::eByRegion,
|
||||
{}, {}, post_barrier);
|
||||
});
|
||||
@@ -525,8 +523,7 @@ void TextureRuntime::ClearTextureWithRenderpass(Surface& surface,
|
||||
.src_image = surface.alloc.image,
|
||||
};
|
||||
|
||||
scheduler.Record([params, level = clear.texture_level](vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer) {
|
||||
scheduler.Record([params, level = clear.texture_level](vk::CommandBuffer cmdbuf) {
|
||||
const vk::ImageMemoryBarrier pre_barrier = {
|
||||
.srcAccessMask = params.src_access,
|
||||
.dstAccessMask = vk::AccessFlagBits::eTransferWrite,
|
||||
@@ -544,15 +541,14 @@ void TextureRuntime::ClearTextureWithRenderpass(Surface& surface,
|
||||
},
|
||||
};
|
||||
|
||||
render_cmdbuf.pipelineBarrier(params.pipeline_flags, vk::PipelineStageFlagBits::eTransfer,
|
||||
cmdbuf.pipelineBarrier(params.pipeline_flags, vk::PipelineStageFlagBits::eTransfer,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barrier);
|
||||
});
|
||||
|
||||
renderpass_cache.EnterRenderpass(clear_info);
|
||||
renderpass_cache.ExitRenderpass();
|
||||
|
||||
scheduler.Record([params, level = clear.texture_level](vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer) {
|
||||
scheduler.Record([params, level = clear.texture_level](vk::CommandBuffer cmdbuf) {
|
||||
const vk::ImageMemoryBarrier post_barrier = {
|
||||
.srcAccessMask = vk::AccessFlagBits::eTransferWrite,
|
||||
.dstAccessMask = params.src_access,
|
||||
@@ -570,7 +566,7 @@ void TextureRuntime::ClearTextureWithRenderpass(Surface& surface,
|
||||
},
|
||||
};
|
||||
|
||||
render_cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer, params.pipeline_flags,
|
||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer, params.pipeline_flags,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, post_barrier);
|
||||
});
|
||||
}
|
||||
@@ -589,7 +585,7 @@ bool TextureRuntime::CopyTextures(Surface& source, Surface& dest,
|
||||
.dst_image = dest.alloc.image,
|
||||
};
|
||||
|
||||
scheduler.Record([params, copy](vk::CommandBuffer render_cmdbuf, vk::CommandBuffer) {
|
||||
scheduler.Record([params, copy](vk::CommandBuffer cmdbuf) {
|
||||
const vk::ImageCopy image_copy = {
|
||||
.srcSubresource{
|
||||
.aspectMask = params.aspect,
|
||||
@@ -679,13 +675,13 @@ bool TextureRuntime::CopyTextures(Surface& source, Surface& dest,
|
||||
},
|
||||
};
|
||||
|
||||
render_cmdbuf.pipelineBarrier(params.pipeline_flags, vk::PipelineStageFlagBits::eTransfer,
|
||||
cmdbuf.pipelineBarrier(params.pipeline_flags, vk::PipelineStageFlagBits::eTransfer,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barriers);
|
||||
|
||||
render_cmdbuf.copyImage(params.src_image, vk::ImageLayout::eTransferSrcOptimal,
|
||||
cmdbuf.copyImage(params.src_image, vk::ImageLayout::eTransferSrcOptimal,
|
||||
params.dst_image, vk::ImageLayout::eTransferDstOptimal, image_copy);
|
||||
|
||||
render_cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer, params.pipeline_flags,
|
||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer, params.pipeline_flags,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, post_barriers);
|
||||
});
|
||||
|
||||
@@ -706,7 +702,7 @@ bool TextureRuntime::BlitTextures(Surface& source, Surface& dest,
|
||||
.dst_image = dest.alloc.image,
|
||||
};
|
||||
|
||||
scheduler.Record([params, blit](vk::CommandBuffer render_cmdbuf, vk::CommandBuffer) {
|
||||
scheduler.Record([params, blit](vk::CommandBuffer cmdbuf) {
|
||||
const std::array source_offsets = {
|
||||
vk::Offset3D{static_cast<s32>(blit.src_rect.left),
|
||||
static_cast<s32>(blit.src_rect.bottom), 0},
|
||||
@@ -807,14 +803,14 @@ bool TextureRuntime::BlitTextures(Surface& source, Surface& dest,
|
||||
},
|
||||
};
|
||||
|
||||
render_cmdbuf.pipelineBarrier(params.pipeline_flags, vk::PipelineStageFlagBits::eTransfer,
|
||||
cmdbuf.pipelineBarrier(params.pipeline_flags, vk::PipelineStageFlagBits::eTransfer,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, read_barriers);
|
||||
|
||||
render_cmdbuf.blitImage(params.src_image, vk::ImageLayout::eTransferSrcOptimal,
|
||||
cmdbuf.blitImage(params.src_image, vk::ImageLayout::eTransferSrcOptimal,
|
||||
params.dst_image, vk::ImageLayout::eTransferDstOptimal, blit_area,
|
||||
params.filter);
|
||||
|
||||
render_cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer, params.pipeline_flags,
|
||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer, params.pipeline_flags,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, write_barriers);
|
||||
});
|
||||
|
||||
@@ -932,7 +928,7 @@ void Surface::Upload(const VideoCore::BufferTextureCopy& upload, const StagingDa
|
||||
};
|
||||
|
||||
scheduler.Record([format = alloc.format, params, staging,
|
||||
upload](vk::CommandBuffer render_cmdbuf, vk::CommandBuffer) {
|
||||
upload](vk::CommandBuffer cmdbuf) {
|
||||
u32 num_copies = 1;
|
||||
std::array<vk::BufferImageCopy, 2> buffer_image_copies;
|
||||
|
||||
@@ -994,15 +990,15 @@ void Surface::Upload(const VideoCore::BufferTextureCopy& upload, const StagingDa
|
||||
},
|
||||
};
|
||||
|
||||
render_cmdbuf.pipelineBarrier(params.pipeline_flags,
|
||||
cmdbuf.pipelineBarrier(params.pipeline_flags,
|
||||
vk::PipelineStageFlagBits::eTransfer,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, read_barrier);
|
||||
|
||||
render_cmdbuf.copyBufferToImage(staging.buffer, params.src_image,
|
||||
cmdbuf.copyBufferToImage(staging.buffer, params.src_image,
|
||||
vk::ImageLayout::eTransferDstOptimal, num_copies,
|
||||
buffer_image_copies.data());
|
||||
|
||||
render_cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer,
|
||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer,
|
||||
params.pipeline_flags, vk::DependencyFlagBits::eByRegion,
|
||||
{}, {}, write_barrier);
|
||||
});
|
||||
@@ -1037,8 +1033,7 @@ void Surface::Download(const VideoCore::BufferTextureCopy& download, const Stagi
|
||||
.src_image = alloc.image,
|
||||
};
|
||||
|
||||
scheduler.Record([params, staging, download](vk::CommandBuffer render_cmdbuf,
|
||||
vk::CommandBuffer) {
|
||||
scheduler.Record([params, staging, download](vk::CommandBuffer cmdbuf) {
|
||||
const VideoCore::Rect2D rect = download.texture_rect;
|
||||
const vk::BufferImageCopy buffer_image_copy = {
|
||||
.bufferOffset = staging.buffer_offset + download.buffer_offset,
|
||||
@@ -1091,14 +1086,14 @@ void Surface::Download(const VideoCore::BufferTextureCopy& download, const Stagi
|
||||
.dstAccessMask = vk::AccessFlagBits::eMemoryRead | vk::AccessFlagBits::eMemoryWrite,
|
||||
};
|
||||
|
||||
render_cmdbuf.pipelineBarrier(params.pipeline_flags,
|
||||
cmdbuf.pipelineBarrier(params.pipeline_flags,
|
||||
vk::PipelineStageFlagBits::eTransfer,
|
||||
vk::DependencyFlagBits::eByRegion, {}, {}, read_barrier);
|
||||
|
||||
render_cmdbuf.copyImageToBuffer(params.src_image, vk::ImageLayout::eTransferSrcOptimal,
|
||||
cmdbuf.copyImageToBuffer(params.src_image, vk::ImageLayout::eTransferSrcOptimal,
|
||||
staging.buffer, buffer_image_copy);
|
||||
|
||||
render_cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer,
|
||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer,
|
||||
params.pipeline_flags, vk::DependencyFlagBits::eByRegion,
|
||||
memory_write_barrier, {}, image_write_barrier);
|
||||
});
|
||||
|
Reference in New Issue
Block a user