code: Run clang format
This commit is contained in:
@@ -19,13 +19,13 @@
|
|||||||
|
|
||||||
static bool IsPortraitMode() {
|
static bool IsPortraitMode() {
|
||||||
return JNI_FALSE != IDCache::GetEnvForThread()->CallStaticBooleanMethod(
|
return JNI_FALSE != IDCache::GetEnvForThread()->CallStaticBooleanMethod(
|
||||||
IDCache::GetNativeLibraryClass(), IDCache::GetIsPortraitMode());
|
IDCache::GetNativeLibraryClass(), IDCache::GetIsPortraitMode());
|
||||||
}
|
}
|
||||||
|
|
||||||
static void UpdateLandscapeScreenLayout() {
|
static void UpdateLandscapeScreenLayout() {
|
||||||
Settings::values.layout_option =
|
Settings::values.layout_option =
|
||||||
static_cast<Settings::LayoutOption>(IDCache::GetEnvForThread()->CallStaticIntMethod(
|
static_cast<Settings::LayoutOption>(IDCache::GetEnvForThread()->CallStaticIntMethod(
|
||||||
IDCache::GetNativeLibraryClass(), IDCache::GetLandscapeScreenLayout()));
|
IDCache::GetNativeLibraryClass(), IDCache::GetLandscapeScreenLayout()));
|
||||||
}
|
}
|
||||||
|
|
||||||
void EmuWindow_Android::OnSurfaceChanged(ANativeWindow* surface) {
|
void EmuWindow_Android::OnSurfaceChanged(ANativeWindow* surface) {
|
||||||
@@ -63,7 +63,7 @@ void EmuWindow_Android::OnFramebufferSizeChanged() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
EmuWindow_Android::EmuWindow_Android(ANativeWindow *surface) : host_window{surface} {
|
EmuWindow_Android::EmuWindow_Android(ANativeWindow* surface) : host_window{surface} {
|
||||||
LOG_DEBUG(Frontend, "Initializing EmuWindow_Android");
|
LOG_DEBUG(Frontend, "Initializing EmuWindow_Android");
|
||||||
|
|
||||||
if (!surface) {
|
if (!surface) {
|
||||||
@@ -71,7 +71,7 @@ EmuWindow_Android::EmuWindow_Android(ANativeWindow *surface) : host_window{surfa
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
window_width = ANativeWindow_getWidth(surface);
|
window_width = ANativeWindow_getWidth(surface);
|
||||||
window_height = ANativeWindow_getHeight(surface);
|
window_height = ANativeWindow_getHeight(surface);
|
||||||
|
|
||||||
Network::Init();
|
Network::Init();
|
||||||
|
|||||||
@@ -9,11 +9,11 @@
|
|||||||
|
|
||||||
class EmuWindow_Android : public Frontend::EmuWindow {
|
class EmuWindow_Android : public Frontend::EmuWindow {
|
||||||
public:
|
public:
|
||||||
EmuWindow_Android(ANativeWindow *surface);
|
EmuWindow_Android(ANativeWindow* surface);
|
||||||
~EmuWindow_Android();
|
~EmuWindow_Android();
|
||||||
|
|
||||||
/// Called by the onSurfaceChanges() method to change the surface
|
/// Called by the onSurfaceChanges() method to change the surface
|
||||||
void OnSurfaceChanged(ANativeWindow *surface);
|
void OnSurfaceChanged(ANativeWindow* surface);
|
||||||
|
|
||||||
/// Handles touch event that occur.(Touched or released)
|
/// Handles touch event that occur.(Touched or released)
|
||||||
bool OnTouchEvent(int x, int y, bool pressed);
|
bool OnTouchEvent(int x, int y, bool pressed);
|
||||||
|
|||||||
@@ -41,9 +41,11 @@ static constexpr std::array<EGLint, 4> egl_context_attribs{EGL_CONTEXT_CLIENT_VE
|
|||||||
class SharedContext_Android : public Frontend::GraphicsContext {
|
class SharedContext_Android : public Frontend::GraphicsContext {
|
||||||
public:
|
public:
|
||||||
SharedContext_Android(EGLDisplay egl_display, EGLConfig egl_config,
|
SharedContext_Android(EGLDisplay egl_display, EGLConfig egl_config,
|
||||||
EGLContext egl_share_context) : egl_display{egl_display},
|
EGLContext egl_share_context)
|
||||||
egl_surface{eglCreatePbufferSurface(egl_display, egl_config, egl_empty_attribs.data())},
|
: egl_display{egl_display}, egl_surface{eglCreatePbufferSurface(egl_display, egl_config,
|
||||||
egl_context{eglCreateContext(egl_display, egl_config, egl_share_context, egl_context_attribs.data())} {
|
egl_empty_attribs.data())},
|
||||||
|
egl_context{eglCreateContext(egl_display, egl_config, egl_share_context,
|
||||||
|
egl_context_attribs.data())} {
|
||||||
ASSERT_MSG(egl_surface, "eglCreatePbufferSurface() failed!");
|
ASSERT_MSG(egl_surface, "eglCreatePbufferSurface() failed!");
|
||||||
ASSERT_MSG(egl_context, "eglCreateContext() failed!");
|
ASSERT_MSG(egl_context, "eglCreateContext() failed!");
|
||||||
}
|
}
|
||||||
@@ -72,7 +74,8 @@ private:
|
|||||||
EGLContext egl_context{};
|
EGLContext egl_context{};
|
||||||
};
|
};
|
||||||
|
|
||||||
EmuWindow_Android_OpenGL::EmuWindow_Android_OpenGL(ANativeWindow* surface) : EmuWindow_Android{surface} {
|
EmuWindow_Android_OpenGL::EmuWindow_Android_OpenGL(ANativeWindow* surface)
|
||||||
|
: EmuWindow_Android{surface} {
|
||||||
if (egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY); egl_display == EGL_NO_DISPLAY) {
|
if (egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY); egl_display == EGL_NO_DISPLAY) {
|
||||||
LOG_CRITICAL(Frontend, "eglGetDisplay() failed");
|
LOG_CRITICAL(Frontend, "eglGetDisplay() failed");
|
||||||
return;
|
return;
|
||||||
|
|||||||
@@ -19,7 +19,8 @@
|
|||||||
|
|
||||||
class SharedContext_Android : public Frontend::GraphicsContext {};
|
class SharedContext_Android : public Frontend::GraphicsContext {};
|
||||||
|
|
||||||
EmuWindow_Android_Vulkan::EmuWindow_Android_Vulkan(ANativeWindow* surface) : EmuWindow_Android{surface} {
|
EmuWindow_Android_Vulkan::EmuWindow_Android_Vulkan(ANativeWindow* surface)
|
||||||
|
: EmuWindow_Android{surface} {
|
||||||
CreateWindowSurface();
|
CreateWindowSurface();
|
||||||
|
|
||||||
if (core_context = CreateSharedContext(); !core_context) {
|
if (core_context = CreateSharedContext(); !core_context) {
|
||||||
|
|||||||
@@ -32,8 +32,8 @@
|
|||||||
#include "jni/camera/ndk_camera.h"
|
#include "jni/camera/ndk_camera.h"
|
||||||
#include "jni/camera/still_image_camera.h"
|
#include "jni/camera/still_image_camera.h"
|
||||||
#include "jni/config.h"
|
#include "jni/config.h"
|
||||||
#include "jni/emu_window/emu_window_vk.h"
|
|
||||||
#include "jni/emu_window/emu_window_gl.h"
|
#include "jni/emu_window/emu_window_gl.h"
|
||||||
|
#include "jni/emu_window/emu_window_vk.h"
|
||||||
#include "jni/game_info.h"
|
#include "jni/game_info.h"
|
||||||
#include "jni/game_settings.h"
|
#include "jni/game_settings.h"
|
||||||
#include "jni/id_cache.h"
|
#include "jni/id_cache.h"
|
||||||
|
|||||||
@@ -30,7 +30,8 @@ std::string NativeErrorToString(int e) {
|
|||||||
return ret;
|
return ret;
|
||||||
#else
|
#else
|
||||||
char err_str[255];
|
char err_str[255];
|
||||||
#if (defined(__GLIBC__) && (_GNU_SOURCE || (_POSIX_C_SOURCE < 200112L && _XOPEN_SOURCE < 600))) || defined(ANDROID)
|
#if (defined(__GLIBC__) && (_GNU_SOURCE || (_POSIX_C_SOURCE < 200112L && _XOPEN_SOURCE < 600))) || \
|
||||||
|
defined(ANDROID)
|
||||||
// Thread safe (GNU-specific)
|
// Thread safe (GNU-specific)
|
||||||
const char* str = strerror_r(e, err_str, sizeof(err_str));
|
const char* str = strerror_r(e, err_str, sizeof(err_str));
|
||||||
return std::string(str);
|
return std::string(str);
|
||||||
|
|||||||
@@ -105,7 +105,7 @@ private:
|
|||||||
} // namespace polyfill
|
} // namespace polyfill
|
||||||
|
|
||||||
#if ANDROID
|
#if ANDROID
|
||||||
template < class T, class... Args >
|
template <class T, class... Args>
|
||||||
concept constructible_from = is_nothrow_destructible_v<T> && is_constructible_v<T, Args...>;
|
concept constructible_from = is_nothrow_destructible_v<T> && is_constructible_v<T, Args...>;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|||||||
@@ -125,7 +125,7 @@ void Driver::DeduceVendor() {
|
|||||||
} else if (gpu_vendor.find("ARM") != gpu_vendor.npos) {
|
} else if (gpu_vendor.find("ARM") != gpu_vendor.npos) {
|
||||||
vendor = Vendor::ARM;
|
vendor = Vendor::ARM;
|
||||||
} else if (gpu_vendor.find("GDI Generic") != gpu_vendor.npos) {
|
} else if (gpu_vendor.find("GDI Generic") != gpu_vendor.npos) {
|
||||||
vendor = Vendor::Generic;
|
vendor = Vendor::Generic;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -262,7 +262,8 @@ static std::string SampleTexture(const PicaFSConfig& config, unsigned texture_un
|
|||||||
// Only unit 0 respects the texturing type
|
// Only unit 0 respects the texturing type
|
||||||
switch (state.texture0_type) {
|
switch (state.texture0_type) {
|
||||||
case TexturingRegs::TextureConfig::Texture2D:
|
case TexturingRegs::TextureConfig::Texture2D:
|
||||||
return "textureLod(tex0, texcoord0, getLod(texcoord0 * vec2(textureSize(tex0, 0))) + tex_lod_bias[0])";
|
return "textureLod(tex0, texcoord0, getLod(texcoord0 * vec2(textureSize(tex0, 0))) + "
|
||||||
|
"tex_lod_bias[0])";
|
||||||
case TexturingRegs::TextureConfig::Projection2D:
|
case TexturingRegs::TextureConfig::Projection2D:
|
||||||
// TODO (wwylele): find the exact LOD formula for projection texture
|
// TODO (wwylele): find the exact LOD formula for projection texture
|
||||||
return "textureProj(tex0, vec3(texcoord0, texcoord0_w))";
|
return "textureProj(tex0, vec3(texcoord0, texcoord0_w))";
|
||||||
@@ -280,12 +281,15 @@ static std::string SampleTexture(const PicaFSConfig& config, unsigned texture_un
|
|||||||
return "texture(tex0, texcoord0)";
|
return "texture(tex0, texcoord0)";
|
||||||
}
|
}
|
||||||
case 1:
|
case 1:
|
||||||
return "textureLod(tex1, texcoord1, getLod(texcoord1 * vec2(textureSize(tex1, 0))) + tex_lod_bias[1])";
|
return "textureLod(tex1, texcoord1, getLod(texcoord1 * vec2(textureSize(tex1, 0))) + "
|
||||||
|
"tex_lod_bias[1])";
|
||||||
case 2:
|
case 2:
|
||||||
if (state.texture2_use_coord1)
|
if (state.texture2_use_coord1)
|
||||||
return "textureLod(tex2, texcoord1, getLod(texcoord1 * vec2(textureSize(tex2, 0))) + tex_lod_bias[2])";
|
return "textureLod(tex2, texcoord1, getLod(texcoord1 * vec2(textureSize(tex2, 0))) + "
|
||||||
|
"tex_lod_bias[2])";
|
||||||
else
|
else
|
||||||
return "textureLod(tex2, texcoord2, getLod(texcoord2 * vec2(textureSize(tex2, 0))) + tex_lod_bias[2])";
|
return "textureLod(tex2, texcoord2, getLod(texcoord2 * vec2(textureSize(tex2, 0))) + "
|
||||||
|
"tex_lod_bias[2])";
|
||||||
case 3:
|
case 3:
|
||||||
if (state.proctex.enable) {
|
if (state.proctex.enable) {
|
||||||
return "ProcTex()";
|
return "ProcTex()";
|
||||||
|
|||||||
@@ -222,11 +222,10 @@ bool TextureRuntime::ClearTexture(Surface& surface, const VideoCore::TextureClea
|
|||||||
|
|
||||||
bool TextureRuntime::CopyTextures(Surface& source, Surface& dest,
|
bool TextureRuntime::CopyTextures(Surface& source, Surface& dest,
|
||||||
const VideoCore::TextureCopy& copy) {
|
const VideoCore::TextureCopy& copy) {
|
||||||
glCopyImageSubData(source.texture.handle, GL_TEXTURE_2D,
|
glCopyImageSubData(source.texture.handle, GL_TEXTURE_2D, copy.src_level, copy.src_offset.x,
|
||||||
copy.src_level, copy.src_offset.x, copy.src_offset.y, 0,
|
copy.src_offset.y, 0, dest.texture.handle, GL_TEXTURE_2D, copy.dst_level,
|
||||||
dest.texture.handle, GL_TEXTURE_2D,
|
copy.dst_offset.x, copy.dst_offset.y, 0, copy.extent.width,
|
||||||
copy.dst_level, copy.dst_offset.x, copy.dst_offset.y, 0,
|
copy.extent.height, 1);
|
||||||
copy.extent.width, copy.extent.height, 1);
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -208,11 +208,10 @@ void RendererVulkan::BeginRendering() {
|
|||||||
device.updateDescriptorSetWithTemplate(set, present_update_template, present_textures[0]);
|
device.updateDescriptorSetWithTemplate(set, present_update_template, present_textures[0]);
|
||||||
|
|
||||||
scheduler.Record([this, set, pipeline_index = current_pipeline](vk::CommandBuffer cmdbuf) {
|
scheduler.Record([this, set, pipeline_index = current_pipeline](vk::CommandBuffer cmdbuf) {
|
||||||
cmdbuf.bindPipeline(vk::PipelineBindPoint::eGraphics,
|
cmdbuf.bindPipeline(vk::PipelineBindPoint::eGraphics, present_pipelines[pipeline_index]);
|
||||||
present_pipelines[pipeline_index]);
|
|
||||||
|
|
||||||
cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eGraphics, present_pipeline_layout,
|
cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eGraphics, present_pipeline_layout, 0, set,
|
||||||
0, set, {});
|
{});
|
||||||
});
|
});
|
||||||
|
|
||||||
const RenderpassState renderpass_info = {
|
const RenderpassState renderpass_info = {
|
||||||
@@ -561,15 +560,14 @@ void RendererVulkan::LoadColorToActiveVkTexture(u8 color_r, u8 color_g, u8 color
|
|||||||
};
|
};
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eFragmentShader,
|
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eFragmentShader,
|
||||||
vk::PipelineStageFlagBits::eTransfer,
|
vk::PipelineStageFlagBits::eTransfer,
|
||||||
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barrier);
|
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barrier);
|
||||||
|
|
||||||
cmdbuf.clearColorImage(image, vk::ImageLayout::eTransferDstOptimal, clear_color,
|
cmdbuf.clearColorImage(image, vk::ImageLayout::eTransferDstOptimal, clear_color, range);
|
||||||
range);
|
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer,
|
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer,
|
||||||
vk::PipelineStageFlagBits::eFragmentShader,
|
vk::PipelineStageFlagBits::eFragmentShader,
|
||||||
vk::DependencyFlagBits::eByRegion, {}, {}, post_barrier);
|
vk::DependencyFlagBits::eByRegion, {}, {}, post_barrier);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -626,9 +624,8 @@ void RendererVulkan::DrawSingleScreenRotated(u32 screen_id, float x, float y, fl
|
|||||||
|
|
||||||
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer cmdbuf) {
|
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer cmdbuf) {
|
||||||
cmdbuf.pushConstants(present_pipeline_layout,
|
cmdbuf.pushConstants(present_pipeline_layout,
|
||||||
vk::ShaderStageFlagBits::eFragment |
|
vk::ShaderStageFlagBits::eFragment | vk::ShaderStageFlagBits::eVertex,
|
||||||
vk::ShaderStageFlagBits::eVertex,
|
0, sizeof(info), &info);
|
||||||
0, sizeof(info), &info);
|
|
||||||
|
|
||||||
cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
||||||
cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
||||||
@@ -664,9 +661,8 @@ void RendererVulkan::DrawSingleScreen(u32 screen_id, float x, float y, float w,
|
|||||||
|
|
||||||
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer cmdbuf) {
|
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer cmdbuf) {
|
||||||
cmdbuf.pushConstants(present_pipeline_layout,
|
cmdbuf.pushConstants(present_pipeline_layout,
|
||||||
vk::ShaderStageFlagBits::eFragment |
|
vk::ShaderStageFlagBits::eFragment | vk::ShaderStageFlagBits::eVertex,
|
||||||
vk::ShaderStageFlagBits::eVertex,
|
0, sizeof(info), &info);
|
||||||
0, sizeof(info), &info);
|
|
||||||
|
|
||||||
cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
||||||
cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
||||||
@@ -703,9 +699,8 @@ void RendererVulkan::DrawSingleScreenStereoRotated(u32 screen_id_l, u32 screen_i
|
|||||||
|
|
||||||
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer cmdbuf) {
|
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer cmdbuf) {
|
||||||
cmdbuf.pushConstants(present_pipeline_layout,
|
cmdbuf.pushConstants(present_pipeline_layout,
|
||||||
vk::ShaderStageFlagBits::eFragment |
|
vk::ShaderStageFlagBits::eFragment | vk::ShaderStageFlagBits::eVertex,
|
||||||
vk::ShaderStageFlagBits::eVertex,
|
0, sizeof(info), &info);
|
||||||
0, sizeof(info), &info);
|
|
||||||
|
|
||||||
cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
||||||
cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
||||||
@@ -744,9 +739,8 @@ void RendererVulkan::DrawSingleScreenStereo(u32 screen_id_l, u32 screen_id_r, fl
|
|||||||
|
|
||||||
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer cmdbuf) {
|
scheduler.Record([this, offset = offset, info = draw_info](vk::CommandBuffer cmdbuf) {
|
||||||
cmdbuf.pushConstants(present_pipeline_layout,
|
cmdbuf.pushConstants(present_pipeline_layout,
|
||||||
vk::ShaderStageFlagBits::eFragment |
|
vk::ShaderStageFlagBits::eFragment | vk::ShaderStageFlagBits::eVertex,
|
||||||
vk::ShaderStageFlagBits::eVertex,
|
0, sizeof(info), &info);
|
||||||
0, sizeof(info), &info);
|
|
||||||
|
|
||||||
cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
cmdbuf.bindVertexBuffers(0, vertex_buffer.Handle(), {0});
|
||||||
cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
cmdbuf.draw(4, 1, offset / sizeof(ScreenRectVertex), 0);
|
||||||
|
|||||||
@@ -14,8 +14,7 @@
|
|||||||
namespace Vulkan {
|
namespace Vulkan {
|
||||||
|
|
||||||
BlitHelper::BlitHelper(const Instance& instance, Scheduler& scheduler,
|
BlitHelper::BlitHelper(const Instance& instance, Scheduler& scheduler,
|
||||||
DescriptorManager& desc_manager,
|
DescriptorManager& desc_manager, RenderpassCache& renderpass_cache)
|
||||||
RenderpassCache& renderpass_cache)
|
|
||||||
: scheduler{scheduler}, desc_manager{desc_manager},
|
: scheduler{scheduler}, desc_manager{desc_manager},
|
||||||
renderpass_cache{renderpass_cache}, device{instance.GetDevice()} {
|
renderpass_cache{renderpass_cache}, device{instance.GetDevice()} {
|
||||||
constexpr std::string_view cs_source = R"(
|
constexpr std::string_view cs_source = R"(
|
||||||
@@ -242,24 +241,24 @@ void BlitHelper::BlitD24S8ToR32(Surface& source, Surface& dest,
|
|||||||
},
|
},
|
||||||
}};
|
}};
|
||||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eEarlyFragmentTests |
|
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eEarlyFragmentTests |
|
||||||
vk::PipelineStageFlagBits::eLateFragmentTests,
|
vk::PipelineStageFlagBits::eLateFragmentTests,
|
||||||
vk::PipelineStageFlagBits::eComputeShader,
|
vk::PipelineStageFlagBits::eComputeShader,
|
||||||
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barriers);
|
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barriers);
|
||||||
|
|
||||||
cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eCompute, compute_pipeline_layout,
|
cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eCompute, compute_pipeline_layout, 0, set,
|
||||||
0, set, {});
|
{});
|
||||||
cmdbuf.bindPipeline(vk::PipelineBindPoint::eCompute, compute_pipeline);
|
cmdbuf.bindPipeline(vk::PipelineBindPoint::eCompute, compute_pipeline);
|
||||||
|
|
||||||
const auto src_offset = Common::MakeVec(blit.src_rect.left, blit.src_rect.bottom);
|
const auto src_offset = Common::MakeVec(blit.src_rect.left, blit.src_rect.bottom);
|
||||||
cmdbuf.pushConstants(compute_pipeline_layout, vk::ShaderStageFlagBits::eCompute, 0,
|
cmdbuf.pushConstants(compute_pipeline_layout, vk::ShaderStageFlagBits::eCompute, 0,
|
||||||
sizeof(Common::Vec2i), src_offset.AsArray());
|
sizeof(Common::Vec2i), src_offset.AsArray());
|
||||||
|
|
||||||
cmdbuf.dispatch(blit.src_rect.GetWidth() / 8, blit.src_rect.GetHeight() / 8, 1);
|
cmdbuf.dispatch(blit.src_rect.GetWidth() / 8, blit.src_rect.GetHeight() / 8, 1);
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eComputeShader,
|
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eComputeShader,
|
||||||
vk::PipelineStageFlagBits::eEarlyFragmentTests |
|
vk::PipelineStageFlagBits::eEarlyFragmentTests |
|
||||||
vk::PipelineStageFlagBits::eLateFragmentTests |
|
vk::PipelineStageFlagBits::eLateFragmentTests |
|
||||||
vk::PipelineStageFlagBits::eTransfer,
|
vk::PipelineStageFlagBits::eTransfer,
|
||||||
vk::DependencyFlagBits::eByRegion, {}, {}, post_barriers);
|
vk::DependencyFlagBits::eByRegion, {}, {}, post_barriers);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,8 +20,7 @@ class Surface;
|
|||||||
|
|
||||||
class BlitHelper {
|
class BlitHelper {
|
||||||
public:
|
public:
|
||||||
BlitHelper(const Instance& instance, Scheduler& scheduler,
|
BlitHelper(const Instance& instance, Scheduler& scheduler, DescriptorManager& desc_manager,
|
||||||
DescriptorManager& desc_manager,
|
|
||||||
RenderpassCache& renderpass_cache);
|
RenderpassCache& renderpass_cache);
|
||||||
~BlitHelper();
|
~BlitHelper();
|
||||||
|
|
||||||
|
|||||||
@@ -137,8 +137,8 @@ void DescriptorManager::BindDescriptorSets() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
scheduler.Record([this, bound_sets](vk::CommandBuffer cmdbuf) {
|
scheduler.Record([this, bound_sets](vk::CommandBuffer cmdbuf) {
|
||||||
cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipeline_layout, 0,
|
cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipeline_layout, 0, bound_sets,
|
||||||
bound_sets, {});
|
{});
|
||||||
});
|
});
|
||||||
|
|
||||||
descriptor_set_dirty.fill(false);
|
descriptor_set_dirty.fill(false);
|
||||||
|
|||||||
@@ -176,7 +176,8 @@ void D24S8toRGBA8::Reinterpret(Surface& source, VideoCore::Rect2D src_rect, Surf
|
|||||||
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||||
.image = src_image,
|
.image = src_image,
|
||||||
.subresourceRange{
|
.subresourceRange{
|
||||||
.aspectMask = vk::ImageAspectFlagBits::eDepth | vk::ImageAspectFlagBits::eStencil,
|
.aspectMask =
|
||||||
|
vk::ImageAspectFlagBits::eDepth | vk::ImageAspectFlagBits::eStencil,
|
||||||
.baseMipLevel = 0,
|
.baseMipLevel = 0,
|
||||||
.levelCount = VK_REMAINING_MIP_LEVELS,
|
.levelCount = VK_REMAINING_MIP_LEVELS,
|
||||||
.baseArrayLayer = 0,
|
.baseArrayLayer = 0,
|
||||||
@@ -198,8 +199,7 @@ void D24S8toRGBA8::Reinterpret(Surface& source, VideoCore::Rect2D src_rect, Surf
|
|||||||
.baseArrayLayer = 0,
|
.baseArrayLayer = 0,
|
||||||
.layerCount = VK_REMAINING_ARRAY_LAYERS,
|
.layerCount = VK_REMAINING_ARRAY_LAYERS,
|
||||||
},
|
},
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
const std::array post_barriers = {
|
const std::array post_barriers = {
|
||||||
vk::ImageMemoryBarrier{
|
vk::ImageMemoryBarrier{
|
||||||
.srcAccessMask = vk::AccessFlagBits::eShaderRead,
|
.srcAccessMask = vk::AccessFlagBits::eShaderRead,
|
||||||
@@ -237,25 +237,25 @@ void D24S8toRGBA8::Reinterpret(Surface& source, VideoCore::Rect2D src_rect, Surf
|
|||||||
};
|
};
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eColorAttachmentOutput |
|
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eColorAttachmentOutput |
|
||||||
vk::PipelineStageFlagBits::eEarlyFragmentTests |
|
vk::PipelineStageFlagBits::eEarlyFragmentTests |
|
||||||
vk::PipelineStageFlagBits::eLateFragmentTests,
|
vk::PipelineStageFlagBits::eLateFragmentTests,
|
||||||
vk::PipelineStageFlagBits::eComputeShader,
|
vk::PipelineStageFlagBits::eComputeShader,
|
||||||
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barriers);
|
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barriers);
|
||||||
|
|
||||||
cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eCompute, compute_pipeline_layout,
|
cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eCompute, compute_pipeline_layout, 0, set,
|
||||||
0, set, {});
|
{});
|
||||||
cmdbuf.bindPipeline(vk::PipelineBindPoint::eCompute, compute_pipeline);
|
cmdbuf.bindPipeline(vk::PipelineBindPoint::eCompute, compute_pipeline);
|
||||||
|
|
||||||
const auto src_offset = Common::MakeVec(src_rect.left, src_rect.bottom);
|
const auto src_offset = Common::MakeVec(src_rect.left, src_rect.bottom);
|
||||||
cmdbuf.pushConstants(compute_pipeline_layout, vk::ShaderStageFlagBits::eCompute, 0,
|
cmdbuf.pushConstants(compute_pipeline_layout, vk::ShaderStageFlagBits::eCompute, 0,
|
||||||
sizeof(Common::Vec2i), src_offset.AsArray());
|
sizeof(Common::Vec2i), src_offset.AsArray());
|
||||||
|
|
||||||
cmdbuf.dispatch(src_rect.GetWidth() / 8, src_rect.GetHeight() / 8, 1);
|
cmdbuf.dispatch(src_rect.GetWidth() / 8, src_rect.GetHeight() / 8, 1);
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eComputeShader,
|
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eComputeShader,
|
||||||
vk::PipelineStageFlagBits::eFragmentShader |
|
vk::PipelineStageFlagBits::eFragmentShader |
|
||||||
vk::PipelineStageFlagBits::eEarlyFragmentTests |
|
vk::PipelineStageFlagBits::eEarlyFragmentTests |
|
||||||
vk::PipelineStageFlagBits::eLateFragmentTests,
|
vk::PipelineStageFlagBits::eLateFragmentTests,
|
||||||
vk::DependencyFlagBits::eByRegion, {}, {}, post_barriers);
|
vk::DependencyFlagBits::eByRegion, {}, {}, post_barriers);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -107,7 +107,8 @@ Instance::Instance(bool validation, bool dump_command_buffers)
|
|||||||
VULKAN_HPP_DEFAULT_DISPATCHER.init(vkGetInstanceProcAddr);
|
VULKAN_HPP_DEFAULT_DISPATCHER.init(vkGetInstanceProcAddr);
|
||||||
|
|
||||||
// Enable the instance extensions the platform requires
|
// Enable the instance extensions the platform requires
|
||||||
const std::vector extensions = GetInstanceExtensions(Frontend::WindowSystemType::Headless, false);
|
const std::vector extensions =
|
||||||
|
GetInstanceExtensions(Frontend::WindowSystemType::Headless, false);
|
||||||
|
|
||||||
const vk::ApplicationInfo application_info = {
|
const vk::ApplicationInfo application_info = {
|
||||||
.pApplicationName = "Citra",
|
.pApplicationName = "Citra",
|
||||||
@@ -218,9 +219,10 @@ Instance::Instance(Frontend::EmuWindow& window, u32 physical_device_index)
|
|||||||
|
|
||||||
// If validation is enabled attempt to also enable debug messenger
|
// If validation is enabled attempt to also enable debug messenger
|
||||||
if (enable_validation) {
|
if (enable_validation) {
|
||||||
const auto it = std::find_if(extensions.begin(), extensions.end(), [](const char* extension) {
|
const auto it =
|
||||||
return std::strcmp(extension, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0;
|
std::find_if(extensions.begin(), extensions.end(), [](const char* extension) {
|
||||||
});
|
return std::strcmp(extension, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0;
|
||||||
|
});
|
||||||
|
|
||||||
const bool debug_messenger_supported = it != extensions.end();
|
const bool debug_messenger_supported = it != extensions.end();
|
||||||
if (debug_messenger_supported) {
|
if (debug_messenger_supported) {
|
||||||
|
|||||||
@@ -247,11 +247,10 @@ bool PipelineCache::UseProgrammableVertexShader(const Pica::Regs& regs,
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
scheduler.Record(
|
scheduler.Record([this, handle = handle, hash = config.Hash()](vk::CommandBuffer) {
|
||||||
[this, handle = handle, hash = config.Hash()](vk::CommandBuffer) {
|
current_shaders[ProgramType::VS] = handle;
|
||||||
current_shaders[ProgramType::VS] = handle;
|
shader_hashes[ProgramType::VS] = hash;
|
||||||
shader_hashes[ProgramType::VS] = hash;
|
});
|
||||||
});
|
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@@ -354,16 +353,12 @@ void PipelineCache::BindSampler(u32 binding, vk::Sampler sampler) {
|
|||||||
|
|
||||||
void PipelineCache::SetViewport(float x, float y, float width, float height) {
|
void PipelineCache::SetViewport(float x, float y, float width, float height) {
|
||||||
const vk::Viewport viewport{x, y, width, height, 0.f, 1.f};
|
const vk::Viewport viewport{x, y, width, height, 0.f, 1.f};
|
||||||
scheduler.Record([viewport](vk::CommandBuffer cmdbuf) {
|
scheduler.Record([viewport](vk::CommandBuffer cmdbuf) { cmdbuf.setViewport(0, viewport); });
|
||||||
cmdbuf.setViewport(0, viewport);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void PipelineCache::SetScissor(s32 x, s32 y, u32 width, u32 height) {
|
void PipelineCache::SetScissor(s32 x, s32 y, u32 width, u32 height) {
|
||||||
const vk::Rect2D scissor{{x, y}, {width, height}};
|
const vk::Rect2D scissor{{x, y}, {width, height}};
|
||||||
scheduler.Record([scissor](vk::CommandBuffer cmdbuf) {
|
scheduler.Record([scissor](vk::CommandBuffer cmdbuf) { cmdbuf.setScissor(0, scissor); });
|
||||||
cmdbuf.setScissor(0, scissor);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void PipelineCache::ApplyDynamic(const PipelineInfo& info) {
|
void PipelineCache::ApplyDynamic(const PipelineInfo& info) {
|
||||||
@@ -373,17 +368,17 @@ void PipelineCache::ApplyDynamic(const PipelineInfo& info) {
|
|||||||
scheduler.Record([this, info, is_dirty, current](vk::CommandBuffer cmdbuf) {
|
scheduler.Record([this, info, is_dirty, current](vk::CommandBuffer cmdbuf) {
|
||||||
if (info.dynamic.stencil_compare_mask != current.dynamic.stencil_compare_mask || is_dirty) {
|
if (info.dynamic.stencil_compare_mask != current.dynamic.stencil_compare_mask || is_dirty) {
|
||||||
cmdbuf.setStencilCompareMask(vk::StencilFaceFlagBits::eFrontAndBack,
|
cmdbuf.setStencilCompareMask(vk::StencilFaceFlagBits::eFrontAndBack,
|
||||||
info.dynamic.stencil_compare_mask);
|
info.dynamic.stencil_compare_mask);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (info.dynamic.stencil_write_mask != current.dynamic.stencil_write_mask || is_dirty) {
|
if (info.dynamic.stencil_write_mask != current.dynamic.stencil_write_mask || is_dirty) {
|
||||||
cmdbuf.setStencilWriteMask(vk::StencilFaceFlagBits::eFrontAndBack,
|
cmdbuf.setStencilWriteMask(vk::StencilFaceFlagBits::eFrontAndBack,
|
||||||
info.dynamic.stencil_write_mask);
|
info.dynamic.stencil_write_mask);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (info.dynamic.stencil_reference != current.dynamic.stencil_reference || is_dirty) {
|
if (info.dynamic.stencil_reference != current.dynamic.stencil_reference || is_dirty) {
|
||||||
cmdbuf.setStencilReference(vk::StencilFaceFlagBits::eFrontAndBack,
|
cmdbuf.setStencilReference(vk::StencilFaceFlagBits::eFrontAndBack,
|
||||||
info.dynamic.stencil_reference);
|
info.dynamic.stencil_reference);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (info.dynamic.blend_color != current.dynamic.blend_color || is_dirty) {
|
if (info.dynamic.blend_color != current.dynamic.blend_color || is_dirty) {
|
||||||
|
|||||||
@@ -151,8 +151,8 @@ std::vector<const char*> GetInstanceExtensions(Frontend::WindowSystemType window
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (it == properties.end()) {
|
if (it == properties.end()) {
|
||||||
LOG_WARNING(Render_Vulkan,
|
LOG_WARNING(Render_Vulkan, "Required instance extension {} is not available",
|
||||||
"Required instance extension {} is not available", extension);
|
extension);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|||||||
@@ -269,10 +269,9 @@ void RasterizerVulkan::SetupVertexArray(u32 vs_input_size, u32 vs_input_index_mi
|
|||||||
SetupFixedAttribs();
|
SetupFixedAttribs();
|
||||||
|
|
||||||
// Bind the generated bindings
|
// Bind the generated bindings
|
||||||
scheduler.Record([this, binding_count = layout.binding_count, vertex_offsets = binding_offsets]
|
scheduler.Record([this, binding_count = layout.binding_count,
|
||||||
(vk::CommandBuffer cmdbuf) {
|
vertex_offsets = binding_offsets](vk::CommandBuffer cmdbuf) {
|
||||||
cmdbuf.bindVertexBuffers(0, binding_count, vertex_buffers.data(),
|
cmdbuf.bindVertexBuffers(0, binding_count, vertex_buffers.data(), vertex_offsets.data());
|
||||||
vertex_offsets.data());
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -442,10 +441,10 @@ void RasterizerVulkan::SetupIndexArray() {
|
|||||||
|
|
||||||
stream_buffer.Commit(index_buffer_size);
|
stream_buffer.Commit(index_buffer_size);
|
||||||
|
|
||||||
scheduler.Record([this, index_offset = index_offset,
|
scheduler.Record(
|
||||||
index_type = index_type](vk::CommandBuffer cmdbuf) {
|
[this, index_offset = index_offset, index_type = index_type](vk::CommandBuffer cmdbuf) {
|
||||||
cmdbuf.bindIndexBuffer(stream_buffer.Handle(), index_offset, index_type);
|
cmdbuf.bindIndexBuffer(stream_buffer.Handle(), index_offset, index_type);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
void RasterizerVulkan::DrawTriangles() {
|
void RasterizerVulkan::DrawTriangles() {
|
||||||
@@ -555,16 +554,14 @@ bool RasterizerVulkan::Draw(bool accelerate, bool is_indexed) {
|
|||||||
// nothing. Always sample from the base level until mipmaps for texture cubes are
|
// nothing. Always sample from the base level until mipmaps for texture cubes are
|
||||||
// implemented
|
// implemented
|
||||||
const bool skip_mipmap = config.type == Pica::TexturingRegs::TextureConfig::TextureCube;
|
const bool skip_mipmap = config.type == Pica::TexturingRegs::TextureConfig::TextureCube;
|
||||||
info = SamplerInfo{
|
info = SamplerInfo{.mag_filter = config.mag_filter,
|
||||||
.mag_filter = config.mag_filter,
|
.min_filter = config.min_filter,
|
||||||
.min_filter = config.min_filter,
|
.mip_filter = config.mip_filter,
|
||||||
.mip_filter = config.mip_filter,
|
.wrap_s = config.wrap_s,
|
||||||
.wrap_s = config.wrap_s,
|
.wrap_t = config.wrap_t,
|
||||||
.wrap_t = config.wrap_t,
|
.border_color = config.border_color.raw,
|
||||||
.border_color = config.border_color.raw,
|
.lod_min = skip_mipmap ? 0.f : static_cast<float>(config.lod.min_level),
|
||||||
.lod_min = skip_mipmap ? 0.f : static_cast<float>(config.lod.min_level),
|
.lod_max = skip_mipmap ? 0.f : static_cast<float>(config.lod.max_level)};
|
||||||
.lod_max = skip_mipmap ? 0.f : static_cast<float>(config.lod.max_level)
|
|
||||||
};
|
|
||||||
|
|
||||||
// Search the cache and bind the appropriate sampler
|
// Search the cache and bind the appropriate sampler
|
||||||
if (auto it = samplers.find(info); it != samplers.end()) {
|
if (auto it = samplers.find(info); it != samplers.end()) {
|
||||||
@@ -762,11 +759,11 @@ bool RasterizerVulkan::Draw(bool accelerate, bool is_indexed) {
|
|||||||
std::memcpy(array_ptr, vertex_batch.data() + base_vertex, vertex_size);
|
std::memcpy(array_ptr, vertex_batch.data() + base_vertex, vertex_size);
|
||||||
stream_buffer.Commit(vertex_size);
|
stream_buffer.Commit(vertex_size);
|
||||||
|
|
||||||
scheduler.Record([this, vertices, base_vertex,
|
scheduler.Record(
|
||||||
offset = offset](vk::CommandBuffer cmdbuf) {
|
[this, vertices, base_vertex, offset = offset](vk::CommandBuffer cmdbuf) {
|
||||||
cmdbuf.bindVertexBuffers(0, stream_buffer.Handle(), offset);
|
cmdbuf.bindVertexBuffers(0, stream_buffer.Handle(), offset);
|
||||||
cmdbuf.draw(vertices, 1, base_vertex, 0);
|
cmdbuf.draw(vertices, 1, base_vertex, 0);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -94,22 +94,22 @@ void RenderpassCache::EnterRenderpass(const RenderpassState& state) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
scheduler.Record([should_end = bool(current_state.renderpass),
|
scheduler.Record(
|
||||||
state](vk::CommandBuffer cmdbuf) {
|
[should_end = bool(current_state.renderpass), state](vk::CommandBuffer cmdbuf) {
|
||||||
if (should_end) {
|
if (should_end) {
|
||||||
cmdbuf.endRenderPass();
|
cmdbuf.endRenderPass();
|
||||||
}
|
}
|
||||||
|
|
||||||
const vk::RenderPassBeginInfo renderpass_begin_info = {
|
const vk::RenderPassBeginInfo renderpass_begin_info = {
|
||||||
.renderPass = state.renderpass,
|
.renderPass = state.renderpass,
|
||||||
.framebuffer = state.framebuffer,
|
.framebuffer = state.framebuffer,
|
||||||
.renderArea = state.render_area,
|
.renderArea = state.render_area,
|
||||||
.clearValueCount = 1,
|
.clearValueCount = 1,
|
||||||
.pClearValues = &state.clear,
|
.pClearValues = &state.clear,
|
||||||
};
|
};
|
||||||
|
|
||||||
cmdbuf.beginRenderPass(renderpass_begin_info, vk::SubpassContents::eInline);
|
cmdbuf.beginRenderPass(renderpass_begin_info, vk::SubpassContents::eInline);
|
||||||
});
|
});
|
||||||
|
|
||||||
if (is_dirty) {
|
if (is_dirty) {
|
||||||
scheduler.MarkStateNonDirty(StateFlags::Renderpass);
|
scheduler.MarkStateNonDirty(StateFlags::Renderpass);
|
||||||
|
|||||||
@@ -27,8 +27,8 @@ void Scheduler::CommandChunk::ExecuteAll(vk::CommandBuffer cmdbuf) {
|
|||||||
|
|
||||||
Scheduler::Scheduler(const Instance& instance, RenderpassCache& renderpass_cache)
|
Scheduler::Scheduler(const Instance& instance, RenderpassCache& renderpass_cache)
|
||||||
: instance{instance}, renderpass_cache{renderpass_cache}, master_semaphore{instance},
|
: instance{instance}, renderpass_cache{renderpass_cache}, master_semaphore{instance},
|
||||||
command_pool{instance, master_semaphore},
|
command_pool{instance, master_semaphore}, use_worker_thread{
|
||||||
use_worker_thread{Settings::values.async_command_recording} {
|
Settings::values.async_command_recording} {
|
||||||
AllocateWorkerCommandBuffers();
|
AllocateWorkerCommandBuffers();
|
||||||
if (use_worker_thread) {
|
if (use_worker_thread) {
|
||||||
AcquireNewChunk();
|
AcquireNewChunk();
|
||||||
@@ -119,50 +119,50 @@ void Scheduler::SubmitExecution(vk::Semaphore signal_semaphore, vk::Semaphore wa
|
|||||||
state = StateFlags::AllDirty;
|
state = StateFlags::AllDirty;
|
||||||
|
|
||||||
renderpass_cache.ExitRenderpass();
|
renderpass_cache.ExitRenderpass();
|
||||||
Record([signal_semaphore, wait_semaphore, handle, signal_value,
|
Record(
|
||||||
this](vk::CommandBuffer cmdbuf) {
|
[signal_semaphore, wait_semaphore, handle, signal_value, this](vk::CommandBuffer cmdbuf) {
|
||||||
MICROPROFILE_SCOPE(Vulkan_Submit);
|
MICROPROFILE_SCOPE(Vulkan_Submit);
|
||||||
cmdbuf.end();
|
cmdbuf.end();
|
||||||
|
|
||||||
const u32 num_signal_semaphores = signal_semaphore ? 2U : 1U;
|
const u32 num_signal_semaphores = signal_semaphore ? 2U : 1U;
|
||||||
const std::array signal_values{signal_value, u64(0)};
|
const std::array signal_values{signal_value, u64(0)};
|
||||||
const std::array signal_semaphores{handle, signal_semaphore};
|
const std::array signal_semaphores{handle, signal_semaphore};
|
||||||
|
|
||||||
const u32 num_wait_semaphores = wait_semaphore ? 2U : 1U;
|
const u32 num_wait_semaphores = wait_semaphore ? 2U : 1U;
|
||||||
const std::array wait_values{signal_value - 1, u64(1)};
|
const std::array wait_values{signal_value - 1, u64(1)};
|
||||||
const std::array wait_semaphores{handle, wait_semaphore};
|
const std::array wait_semaphores{handle, wait_semaphore};
|
||||||
|
|
||||||
static constexpr std::array<vk::PipelineStageFlags, 2> wait_stage_masks = {
|
static constexpr std::array<vk::PipelineStageFlags, 2> wait_stage_masks = {
|
||||||
vk::PipelineStageFlagBits::eAllCommands,
|
vk::PipelineStageFlagBits::eAllCommands,
|
||||||
vk::PipelineStageFlagBits::eColorAttachmentOutput,
|
vk::PipelineStageFlagBits::eColorAttachmentOutput,
|
||||||
};
|
};
|
||||||
|
|
||||||
const vk::TimelineSemaphoreSubmitInfoKHR timeline_si = {
|
const vk::TimelineSemaphoreSubmitInfoKHR timeline_si = {
|
||||||
.waitSemaphoreValueCount = num_wait_semaphores,
|
.waitSemaphoreValueCount = num_wait_semaphores,
|
||||||
.pWaitSemaphoreValues = wait_values.data(),
|
.pWaitSemaphoreValues = wait_values.data(),
|
||||||
.signalSemaphoreValueCount = num_signal_semaphores,
|
.signalSemaphoreValueCount = num_signal_semaphores,
|
||||||
.pSignalSemaphoreValues = signal_values.data(),
|
.pSignalSemaphoreValues = signal_values.data(),
|
||||||
};
|
};
|
||||||
|
|
||||||
const vk::SubmitInfo submit_info = {
|
const vk::SubmitInfo submit_info = {
|
||||||
.pNext = &timeline_si,
|
.pNext = &timeline_si,
|
||||||
.waitSemaphoreCount = num_wait_semaphores,
|
.waitSemaphoreCount = num_wait_semaphores,
|
||||||
.pWaitSemaphores = wait_semaphores.data(),
|
.pWaitSemaphores = wait_semaphores.data(),
|
||||||
.pWaitDstStageMask = wait_stage_masks.data(),
|
.pWaitDstStageMask = wait_stage_masks.data(),
|
||||||
.commandBufferCount = 1u,
|
.commandBufferCount = 1u,
|
||||||
.pCommandBuffers = &cmdbuf,
|
.pCommandBuffers = &cmdbuf,
|
||||||
.signalSemaphoreCount = num_signal_semaphores,
|
.signalSemaphoreCount = num_signal_semaphores,
|
||||||
.pSignalSemaphores = signal_semaphores.data(),
|
.pSignalSemaphores = signal_semaphores.data(),
|
||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
vk::Queue queue = instance.GetGraphicsQueue();
|
vk::Queue queue = instance.GetGraphicsQueue();
|
||||||
queue.submit(submit_info);
|
queue.submit(submit_info);
|
||||||
} catch (vk::DeviceLostError& err) {
|
} catch (vk::DeviceLostError& err) {
|
||||||
LOG_CRITICAL(Render_Vulkan, "Device lost during submit: {}", err.what());
|
LOG_CRITICAL(Render_Vulkan, "Device lost during submit: {}", err.what());
|
||||||
UNREACHABLE();
|
UNREACHABLE();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!use_worker_thread) {
|
if (!use_worker_thread) {
|
||||||
AllocateWorkerCommandBuffers();
|
AllocateWorkerCommandBuffers();
|
||||||
|
|||||||
@@ -87,7 +87,7 @@ void FragmentModule::Generate() {
|
|||||||
// Take the color output as-is
|
// Take the color output as-is
|
||||||
break;
|
break;
|
||||||
case FramebufferRegs::LogicOp::CopyInverted:
|
case FramebufferRegs::LogicOp::CopyInverted:
|
||||||
//out += "color = ~color;\n";
|
// out += "color = ~color;\n";
|
||||||
break;
|
break;
|
||||||
case FramebufferRegs::LogicOp::NoOp:
|
case FramebufferRegs::LogicOp::NoOp:
|
||||||
// We need to discard the color, but not necessarily the depth. This is not possible
|
// We need to discard the color, but not necessarily the depth. This is not possible
|
||||||
@@ -975,7 +975,8 @@ void FragmentModule::DefineProcTexSampler() {
|
|||||||
const Id proctex_alpha_map_offset{GetShaderDataMember(i32_id, ConstS32(13))};
|
const Id proctex_alpha_map_offset{GetShaderDataMember(i32_id, ConstS32(13))};
|
||||||
const Id final_alpha{AppendProcTexCombineAndMap(config.state.proctex.alpha_combiner, u, v,
|
const Id final_alpha{AppendProcTexCombineAndMap(config.state.proctex.alpha_combiner, u, v,
|
||||||
proctex_alpha_map_offset)};
|
proctex_alpha_map_offset)};
|
||||||
const Id final_color_xyz{OpVectorShuffle(vec_ids.Get(3), final_color, final_color, 0, 1, 2)};
|
const Id final_color_xyz{
|
||||||
|
OpVectorShuffle(vec_ids.Get(3), final_color, final_color, 0, 1, 2)};
|
||||||
final_color = OpCompositeConstruct(vec_ids.Get(4), final_color_xyz, final_alpha);
|
final_color = OpCompositeConstruct(vec_ids.Get(4), final_color_xyz, final_alpha);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1373,7 +1374,7 @@ void FragmentModule::DefineUniformStructs() {
|
|||||||
|
|
||||||
constexpr std::array light_src_offsets{0u, 16u, 32u, 48u, 64u, 80u, 92u, 96u};
|
constexpr std::array light_src_offsets{0u, 16u, 32u, 48u, 64u, 80u, 92u, 96u};
|
||||||
constexpr std::array shader_data_offsets{
|
constexpr std::array shader_data_offsets{
|
||||||
0u, 4u, 8u, 12u, 16u, 20u, 24u, 28u, 32u, 36u, 40u, 44u, 48u, 52u, 56u,
|
0u, 4u, 8u, 12u, 16u, 20u, 24u, 28u, 32u, 36u, 40u, 44u, 48u, 52u, 56u,
|
||||||
60u, 64u, 68u, 72u, 80u, 176u, 192u, 200u, 208u, 224u, 240u, 1136u, 1232u, 1248u, 1264u};
|
60u, 64u, 68u, 72u, 80u, 176u, 192u, 200u, 208u, 224u, 240u, 1136u, 1232u, 1248u, 1264u};
|
||||||
|
|
||||||
Decorate(lighting_lut_array_id, spv::Decoration::ArrayStride, 16u);
|
Decorate(lighting_lut_array_id, spv::Decoration::ArrayStride, 16u);
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ class RenderpassCache;
|
|||||||
|
|
||||||
class Swapchain {
|
class Swapchain {
|
||||||
static constexpr u32 PREFERRED_IMAGE_COUNT = 3;
|
static constexpr u32 PREFERRED_IMAGE_COUNT = 3;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
Swapchain(const Instance& instance, Scheduler& scheduler, RenderpassCache& renderpass_cache);
|
Swapchain(const Instance& instance, Scheduler& scheduler, RenderpassCache& renderpass_cache);
|
||||||
~Swapchain();
|
~Swapchain();
|
||||||
|
|||||||
@@ -358,8 +358,8 @@ ImageAlloc TextureRuntime::Allocate(u32 width, u32 height, VideoCore::PixelForma
|
|||||||
};
|
};
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTopOfPipe,
|
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTopOfPipe,
|
||||||
vk::PipelineStageFlagBits::eTopOfPipe,
|
vk::PipelineStageFlagBits::eTopOfPipe,
|
||||||
vk::DependencyFlagBits::eByRegion, {}, {}, init_barrier);
|
vk::DependencyFlagBits::eByRegion, {}, {}, init_barrier);
|
||||||
});
|
});
|
||||||
|
|
||||||
return alloc;
|
return alloc;
|
||||||
@@ -460,25 +460,22 @@ bool TextureRuntime::ClearTexture(Surface& surface, const VideoCore::TextureClea
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(params.pipeline_flags,
|
cmdbuf.pipelineBarrier(params.pipeline_flags, vk::PipelineStageFlagBits::eTransfer,
|
||||||
vk::PipelineStageFlagBits::eTransfer,
|
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barrier);
|
||||||
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barrier);
|
|
||||||
|
|
||||||
const bool is_color =
|
const bool is_color =
|
||||||
static_cast<bool>(params.aspect & vk::ImageAspectFlagBits::eColor);
|
static_cast<bool>(params.aspect & vk::ImageAspectFlagBits::eColor);
|
||||||
if (is_color) {
|
if (is_color) {
|
||||||
cmdbuf.clearColorImage(params.src_image,
|
cmdbuf.clearColorImage(params.src_image, vk::ImageLayout::eTransferDstOptimal,
|
||||||
vk::ImageLayout::eTransferDstOptimal,
|
MakeClearColorValue(value), range);
|
||||||
MakeClearColorValue(value), range);
|
|
||||||
} else {
|
} else {
|
||||||
cmdbuf.clearDepthStencilImage(params.src_image,
|
cmdbuf.clearDepthStencilImage(params.src_image,
|
||||||
vk::ImageLayout::eTransferDstOptimal,
|
vk::ImageLayout::eTransferDstOptimal,
|
||||||
MakeClearDepthStencilValue(value), range);
|
MakeClearDepthStencilValue(value), range);
|
||||||
}
|
}
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer,
|
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer, params.pipeline_flags,
|
||||||
params.pipeline_flags, vk::DependencyFlagBits::eByRegion,
|
vk::DependencyFlagBits::eByRegion, {}, {}, post_barrier);
|
||||||
{}, {}, post_barrier);
|
|
||||||
});
|
});
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@@ -494,14 +491,14 @@ void TextureRuntime::ClearTextureWithRenderpass(Surface& surface,
|
|||||||
surface.type != VideoCore::SurfaceType::DepthStencil;
|
surface.type != VideoCore::SurfaceType::DepthStencil;
|
||||||
|
|
||||||
const vk::AccessFlags access_flag =
|
const vk::AccessFlags access_flag =
|
||||||
is_color ? vk::AccessFlagBits::eColorAttachmentRead |
|
is_color
|
||||||
vk::AccessFlagBits::eColorAttachmentWrite
|
? vk::AccessFlagBits::eColorAttachmentRead | vk::AccessFlagBits::eColorAttachmentWrite
|
||||||
: vk::AccessFlagBits::eDepthStencilAttachmentRead |
|
: vk::AccessFlagBits::eDepthStencilAttachmentRead |
|
||||||
vk::AccessFlagBits::eDepthStencilAttachmentWrite;
|
vk::AccessFlagBits::eDepthStencilAttachmentWrite;
|
||||||
|
|
||||||
const vk::PipelineStageFlags pipeline_flags =
|
const vk::PipelineStageFlags pipeline_flags =
|
||||||
is_color ? vk::PipelineStageFlagBits::eColorAttachmentOutput
|
is_color ? vk::PipelineStageFlagBits::eColorAttachmentOutput
|
||||||
: vk::PipelineStageFlagBits::eEarlyFragmentTests;
|
: vk::PipelineStageFlagBits::eEarlyFragmentTests;
|
||||||
|
|
||||||
const vk::RenderPass clear_renderpass =
|
const vk::RenderPass clear_renderpass =
|
||||||
is_color ? renderpass_cache.GetRenderpass(surface.pixel_format,
|
is_color ? renderpass_cache.GetRenderpass(surface.pixel_format,
|
||||||
@@ -698,13 +695,13 @@ bool TextureRuntime::CopyTextures(Surface& source, Surface& dest,
|
|||||||
};
|
};
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(params.pipeline_flags, vk::PipelineStageFlagBits::eTransfer,
|
cmdbuf.pipelineBarrier(params.pipeline_flags, vk::PipelineStageFlagBits::eTransfer,
|
||||||
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barriers);
|
vk::DependencyFlagBits::eByRegion, {}, {}, pre_barriers);
|
||||||
|
|
||||||
cmdbuf.copyImage(params.src_image, vk::ImageLayout::eTransferSrcOptimal,
|
cmdbuf.copyImage(params.src_image, vk::ImageLayout::eTransferSrcOptimal, params.dst_image,
|
||||||
params.dst_image, vk::ImageLayout::eTransferDstOptimal, image_copy);
|
vk::ImageLayout::eTransferDstOptimal, image_copy);
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer, params.pipeline_flags,
|
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer, params.pipeline_flags,
|
||||||
vk::DependencyFlagBits::eByRegion, {}, {}, post_barriers);
|
vk::DependencyFlagBits::eByRegion, {}, {}, post_barriers);
|
||||||
});
|
});
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
@@ -826,21 +823,19 @@ bool TextureRuntime::BlitTextures(Surface& source, Surface& dest,
|
|||||||
};
|
};
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(params.pipeline_flags, vk::PipelineStageFlagBits::eTransfer,
|
cmdbuf.pipelineBarrier(params.pipeline_flags, vk::PipelineStageFlagBits::eTransfer,
|
||||||
vk::DependencyFlagBits::eByRegion, {}, {}, read_barriers);
|
vk::DependencyFlagBits::eByRegion, {}, {}, read_barriers);
|
||||||
|
|
||||||
cmdbuf.blitImage(params.src_image, vk::ImageLayout::eTransferSrcOptimal,
|
cmdbuf.blitImage(params.src_image, vk::ImageLayout::eTransferSrcOptimal, params.dst_image,
|
||||||
params.dst_image, vk::ImageLayout::eTransferDstOptimal, blit_area,
|
vk::ImageLayout::eTransferDstOptimal, blit_area, params.filter);
|
||||||
params.filter);
|
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer, params.pipeline_flags,
|
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer, params.pipeline_flags,
|
||||||
vk::DependencyFlagBits::eByRegion, {}, {}, write_barriers);
|
vk::DependencyFlagBits::eByRegion, {}, {}, write_barriers);
|
||||||
});
|
});
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
void TextureRuntime::GenerateMipmaps(Surface& surface, u32 max_level) {
|
void TextureRuntime::GenerateMipmaps(Surface& surface, u32 max_level) {}
|
||||||
}
|
|
||||||
|
|
||||||
const ReinterpreterList& TextureRuntime::GetPossibleReinterpretations(
|
const ReinterpreterList& TextureRuntime::GetPossibleReinterpretations(
|
||||||
VideoCore::PixelFormat dest_format) const {
|
VideoCore::PixelFormat dest_format) const {
|
||||||
@@ -914,81 +909,79 @@ void Surface::Upload(const VideoCore::BufferTextureCopy& upload, const StagingDa
|
|||||||
.src_image = alloc.image,
|
.src_image = alloc.image,
|
||||||
};
|
};
|
||||||
|
|
||||||
scheduler.Record([format = alloc.format, params, staging,
|
scheduler.Record(
|
||||||
upload](vk::CommandBuffer cmdbuf) {
|
[format = alloc.format, params, staging, upload](vk::CommandBuffer cmdbuf) {
|
||||||
u32 num_copies = 1;
|
u32 num_copies = 1;
|
||||||
std::array<vk::BufferImageCopy, 2> buffer_image_copies;
|
std::array<vk::BufferImageCopy, 2> buffer_image_copies;
|
||||||
|
|
||||||
const VideoCore::Rect2D rect = upload.texture_rect;
|
const VideoCore::Rect2D rect = upload.texture_rect;
|
||||||
buffer_image_copies[0] = vk::BufferImageCopy{
|
buffer_image_copies[0] = vk::BufferImageCopy{
|
||||||
.bufferOffset = staging.buffer_offset + upload.buffer_offset,
|
.bufferOffset = staging.buffer_offset + upload.buffer_offset,
|
||||||
.bufferRowLength = rect.GetWidth(),
|
.bufferRowLength = rect.GetWidth(),
|
||||||
.bufferImageHeight = rect.GetHeight(),
|
.bufferImageHeight = rect.GetHeight(),
|
||||||
.imageSubresource{
|
.imageSubresource{
|
||||||
.aspectMask = params.aspect,
|
.aspectMask = params.aspect,
|
||||||
.mipLevel = upload.texture_level,
|
.mipLevel = upload.texture_level,
|
||||||
.baseArrayLayer = 0,
|
.baseArrayLayer = 0,
|
||||||
.layerCount = 1,
|
.layerCount = 1,
|
||||||
},
|
},
|
||||||
.imageOffset = {static_cast<s32>(rect.left), static_cast<s32>(rect.bottom), 0},
|
.imageOffset = {static_cast<s32>(rect.left), static_cast<s32>(rect.bottom), 0},
|
||||||
.imageExtent = {rect.GetWidth(), rect.GetHeight(), 1},
|
.imageExtent = {rect.GetWidth(), rect.GetHeight(), 1},
|
||||||
};
|
};
|
||||||
|
|
||||||
if (params.aspect & vk::ImageAspectFlagBits::eStencil) {
|
if (params.aspect & vk::ImageAspectFlagBits::eStencil) {
|
||||||
buffer_image_copies[0].imageSubresource.aspectMask =
|
buffer_image_copies[0].imageSubresource.aspectMask =
|
||||||
vk::ImageAspectFlagBits::eDepth;
|
vk::ImageAspectFlagBits::eDepth;
|
||||||
vk::BufferImageCopy& stencil_copy = buffer_image_copies[1];
|
vk::BufferImageCopy& stencil_copy = buffer_image_copies[1];
|
||||||
stencil_copy = buffer_image_copies[0];
|
stencil_copy = buffer_image_copies[0];
|
||||||
stencil_copy.bufferOffset += UnpackDepthStencil(staging, format);
|
stencil_copy.bufferOffset += UnpackDepthStencil(staging, format);
|
||||||
stencil_copy.imageSubresource.aspectMask = vk::ImageAspectFlagBits::eStencil;
|
stencil_copy.imageSubresource.aspectMask = vk::ImageAspectFlagBits::eStencil;
|
||||||
num_copies++;
|
num_copies++;
|
||||||
}
|
}
|
||||||
|
|
||||||
const vk::ImageMemoryBarrier read_barrier = {
|
const vk::ImageMemoryBarrier read_barrier = {
|
||||||
.srcAccessMask = params.src_access,
|
.srcAccessMask = params.src_access,
|
||||||
.dstAccessMask = vk::AccessFlagBits::eTransferWrite,
|
.dstAccessMask = vk::AccessFlagBits::eTransferWrite,
|
||||||
.oldLayout = vk::ImageLayout::eGeneral,
|
.oldLayout = vk::ImageLayout::eGeneral,
|
||||||
.newLayout = vk::ImageLayout::eTransferDstOptimal,
|
.newLayout = vk::ImageLayout::eTransferDstOptimal,
|
||||||
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||||
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||||
.image = params.src_image,
|
.image = params.src_image,
|
||||||
.subresourceRange{
|
.subresourceRange{
|
||||||
.aspectMask = params.aspect,
|
.aspectMask = params.aspect,
|
||||||
.baseMipLevel = upload.texture_level,
|
.baseMipLevel = upload.texture_level,
|
||||||
.levelCount = 1,
|
.levelCount = 1,
|
||||||
.baseArrayLayer = 0,
|
.baseArrayLayer = 0,
|
||||||
.layerCount = VK_REMAINING_ARRAY_LAYERS,
|
.layerCount = VK_REMAINING_ARRAY_LAYERS,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const vk::ImageMemoryBarrier write_barrier = {
|
const vk::ImageMemoryBarrier write_barrier = {
|
||||||
.srcAccessMask = vk::AccessFlagBits::eTransferWrite,
|
.srcAccessMask = vk::AccessFlagBits::eTransferWrite,
|
||||||
.dstAccessMask = params.src_access,
|
.dstAccessMask = params.src_access,
|
||||||
.oldLayout = vk::ImageLayout::eTransferDstOptimal,
|
.oldLayout = vk::ImageLayout::eTransferDstOptimal,
|
||||||
.newLayout = vk::ImageLayout::eGeneral,
|
.newLayout = vk::ImageLayout::eGeneral,
|
||||||
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||||
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||||
.image = params.src_image,
|
.image = params.src_image,
|
||||||
.subresourceRange{
|
.subresourceRange{
|
||||||
.aspectMask = params.aspect,
|
.aspectMask = params.aspect,
|
||||||
.baseMipLevel = upload.texture_level,
|
.baseMipLevel = upload.texture_level,
|
||||||
.levelCount = 1,
|
.levelCount = 1,
|
||||||
.baseArrayLayer = 0,
|
.baseArrayLayer = 0,
|
||||||
.layerCount = VK_REMAINING_ARRAY_LAYERS,
|
.layerCount = VK_REMAINING_ARRAY_LAYERS,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(params.pipeline_flags,
|
cmdbuf.pipelineBarrier(params.pipeline_flags, vk::PipelineStageFlagBits::eTransfer,
|
||||||
vk::PipelineStageFlagBits::eTransfer,
|
vk::DependencyFlagBits::eByRegion, {}, {}, read_barrier);
|
||||||
vk::DependencyFlagBits::eByRegion, {}, {}, read_barrier);
|
|
||||||
|
|
||||||
cmdbuf.copyBufferToImage(staging.buffer, params.src_image,
|
cmdbuf.copyBufferToImage(staging.buffer, params.src_image,
|
||||||
vk::ImageLayout::eTransferDstOptimal, num_copies,
|
vk::ImageLayout::eTransferDstOptimal, num_copies,
|
||||||
buffer_image_copies.data());
|
buffer_image_copies.data());
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer,
|
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer, params.pipeline_flags,
|
||||||
params.pipeline_flags, vk::DependencyFlagBits::eByRegion,
|
vk::DependencyFlagBits::eByRegion, {}, {}, write_barrier);
|
||||||
{}, {}, write_barrier);
|
});
|
||||||
});
|
|
||||||
|
|
||||||
runtime.upload_buffer.Commit(staging.size);
|
runtime.upload_buffer.Commit(staging.size);
|
||||||
}
|
}
|
||||||
@@ -1073,16 +1066,15 @@ void Surface::Download(const VideoCore::BufferTextureCopy& download, const Stagi
|
|||||||
.dstAccessMask = vk::AccessFlagBits::eMemoryRead | vk::AccessFlagBits::eMemoryWrite,
|
.dstAccessMask = vk::AccessFlagBits::eMemoryRead | vk::AccessFlagBits::eMemoryWrite,
|
||||||
};
|
};
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(params.pipeline_flags,
|
cmdbuf.pipelineBarrier(params.pipeline_flags, vk::PipelineStageFlagBits::eTransfer,
|
||||||
vk::PipelineStageFlagBits::eTransfer,
|
vk::DependencyFlagBits::eByRegion, {}, {}, read_barrier);
|
||||||
vk::DependencyFlagBits::eByRegion, {}, {}, read_barrier);
|
|
||||||
|
|
||||||
cmdbuf.copyImageToBuffer(params.src_image, vk::ImageLayout::eTransferSrcOptimal,
|
cmdbuf.copyImageToBuffer(params.src_image, vk::ImageLayout::eTransferSrcOptimal,
|
||||||
staging.buffer, buffer_image_copy);
|
staging.buffer, buffer_image_copy);
|
||||||
|
|
||||||
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer,
|
cmdbuf.pipelineBarrier(vk::PipelineStageFlagBits::eTransfer, params.pipeline_flags,
|
||||||
params.pipeline_flags, vk::DependencyFlagBits::eByRegion,
|
vk::DependencyFlagBits::eByRegion, memory_write_barrier, {},
|
||||||
memory_write_barrier, {}, image_write_barrier);
|
image_write_barrier);
|
||||||
});
|
});
|
||||||
runtime.download_buffer.Commit(staging.size);
|
runtime.download_buffer.Commit(staging.size);
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user