vulkan: Implement rescaling shader patching
This commit is contained in:
parent
dc72d4d4f5
commit
d2388dd0d0
|
@ -20,6 +20,8 @@
|
||||||
|
|
||||||
namespace Vulkan {
|
namespace Vulkan {
|
||||||
|
|
||||||
|
constexpr size_t MAX_RESCALING_WORDS = 4;
|
||||||
|
|
||||||
class DescriptorLayoutBuilder {
|
class DescriptorLayoutBuilder {
|
||||||
public:
|
public:
|
||||||
DescriptorLayoutBuilder(const Device& device_) : device{&device_} {}
|
DescriptorLayoutBuilder(const Device& device_) : device{&device_} {}
|
||||||
|
@ -68,18 +70,26 @@ public:
|
||||||
}
|
}
|
||||||
|
|
||||||
vk::PipelineLayout CreatePipelineLayout(VkDescriptorSetLayout descriptor_set_layout) const {
|
vk::PipelineLayout CreatePipelineLayout(VkDescriptorSetLayout descriptor_set_layout) const {
|
||||||
|
const VkPushConstantRange range{
|
||||||
|
.stageFlags = static_cast<VkShaderStageFlags>(
|
||||||
|
is_compute ? VK_SHADER_STAGE_COMPUTE_BIT : VK_SHADER_STAGE_ALL_GRAPHICS),
|
||||||
|
.offset = 0,
|
||||||
|
.size = (is_compute ? 0 : sizeof(f32)) + sizeof(std::array<u32, MAX_RESCALING_WORDS>),
|
||||||
|
};
|
||||||
return device->GetLogical().CreatePipelineLayout({
|
return device->GetLogical().CreatePipelineLayout({
|
||||||
.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
|
.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
|
||||||
.pNext = nullptr,
|
.pNext = nullptr,
|
||||||
.flags = 0,
|
.flags = 0,
|
||||||
.setLayoutCount = descriptor_set_layout ? 1U : 0U,
|
.setLayoutCount = descriptor_set_layout ? 1U : 0U,
|
||||||
.pSetLayouts = bindings.empty() ? nullptr : &descriptor_set_layout,
|
.pSetLayouts = bindings.empty() ? nullptr : &descriptor_set_layout,
|
||||||
.pushConstantRangeCount = 0,
|
.pushConstantRangeCount = 1,
|
||||||
.pPushConstantRanges = nullptr,
|
.pPushConstantRanges = &range,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
void Add(const Shader::Info& info, VkShaderStageFlags stage) {
|
void Add(const Shader::Info& info, VkShaderStageFlags stage) {
|
||||||
|
is_compute |= (stage & VK_SHADER_STAGE_COMPUTE_BIT) != 0;
|
||||||
|
|
||||||
Add(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, stage, info.constant_buffer_descriptors);
|
Add(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, stage, info.constant_buffer_descriptors);
|
||||||
Add(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, stage, info.storage_buffers_descriptors);
|
Add(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, stage, info.storage_buffers_descriptors);
|
||||||
Add(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, stage, info.texture_buffer_descriptors);
|
Add(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, stage, info.texture_buffer_descriptors);
|
||||||
|
@ -115,6 +125,7 @@ private:
|
||||||
}
|
}
|
||||||
|
|
||||||
const Device* device{};
|
const Device* device{};
|
||||||
|
bool is_compute{};
|
||||||
boost::container::small_vector<VkDescriptorSetLayoutBinding, 32> bindings;
|
boost::container::small_vector<VkDescriptorSetLayoutBinding, 32> bindings;
|
||||||
boost::container::small_vector<VkDescriptorUpdateTemplateEntryKHR, 32> entries;
|
boost::container::small_vector<VkDescriptorUpdateTemplateEntryKHR, 32> entries;
|
||||||
u32 binding{};
|
u32 binding{};
|
||||||
|
@ -122,21 +133,46 @@ private:
|
||||||
size_t offset{};
|
size_t offset{};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
class RescalingPushConstant {
|
||||||
|
public:
|
||||||
|
explicit RescalingPushConstant(u32 num_textures) noexcept {}
|
||||||
|
|
||||||
|
void PushTexture(bool is_rescaled) noexcept {
|
||||||
|
*texture_ptr |= is_rescaled ? texture_bit : 0;
|
||||||
|
texture_bit <<= 1;
|
||||||
|
if (texture_bit == 0) {
|
||||||
|
texture_bit = 1u;
|
||||||
|
++texture_ptr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const std::array<u32, MAX_RESCALING_WORDS>& Data() const noexcept {
|
||||||
|
return words;
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
std::array<u32, MAX_RESCALING_WORDS> words{};
|
||||||
|
u32* texture_ptr{words.data()};
|
||||||
|
u32 texture_bit{1u};
|
||||||
|
};
|
||||||
|
|
||||||
inline void PushImageDescriptors(const Shader::Info& info, const VkSampler*& samplers,
|
inline void PushImageDescriptors(const Shader::Info& info, const VkSampler*& samplers,
|
||||||
const ImageId*& image_view_ids, TextureCache& texture_cache,
|
const ImageId*& image_view_ids, TextureCache& texture_cache,
|
||||||
VKUpdateDescriptorQueue& update_descriptor_queue) {
|
VKUpdateDescriptorQueue& update_descriptor_queue,
|
||||||
for (const auto& desc : info.texture_buffer_descriptors) {
|
RescalingPushConstant& rescaling) {
|
||||||
image_view_ids += desc.count;
|
static constexpr VideoCommon::ImageViewId NULL_IMAGE_VIEW_ID{0};
|
||||||
}
|
image_view_ids += Shader::NumDescriptors(info.texture_buffer_descriptors);
|
||||||
for (const auto& desc : info.image_buffer_descriptors) {
|
image_view_ids += Shader::NumDescriptors(info.image_buffer_descriptors);
|
||||||
image_view_ids += desc.count;
|
|
||||||
}
|
|
||||||
for (const auto& desc : info.texture_descriptors) {
|
for (const auto& desc : info.texture_descriptors) {
|
||||||
for (u32 index = 0; index < desc.count; ++index) {
|
for (u32 index = 0; index < desc.count; ++index) {
|
||||||
|
const VideoCommon::ImageViewId image_view_id{*(image_view_ids++)};
|
||||||
const VkSampler sampler{*(samplers++)};
|
const VkSampler sampler{*(samplers++)};
|
||||||
ImageView& image_view{texture_cache.GetImageView(*(image_view_ids++))};
|
ImageView& image_view{texture_cache.GetImageView(image_view_id)};
|
||||||
|
const Image& image{texture_cache.GetImage(image_view.image_id)};
|
||||||
const VkImageView vk_image_view{image_view.Handle(desc.type)};
|
const VkImageView vk_image_view{image_view.Handle(desc.type)};
|
||||||
update_descriptor_queue.AddSampledImage(vk_image_view, sampler);
|
update_descriptor_queue.AddSampledImage(vk_image_view, sampler);
|
||||||
|
rescaling.PushTexture(image_view_id != NULL_IMAGE_VIEW_ID &&
|
||||||
|
True(image.flags & VideoCommon::ImageFlagBits::Rescaled));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (const auto& desc : info.image_descriptors) {
|
for (const auto& desc : info.image_descriptors) {
|
||||||
|
|
|
@ -180,9 +180,11 @@ void ComputePipeline::Configure(Tegra::Engines::KeplerCompute& kepler_compute,
|
||||||
buffer_cache.UpdateComputeBuffers();
|
buffer_cache.UpdateComputeBuffers();
|
||||||
buffer_cache.BindHostComputeBuffers();
|
buffer_cache.BindHostComputeBuffers();
|
||||||
|
|
||||||
|
RescalingPushConstant rescaling(num_textures);
|
||||||
const VkSampler* samplers_it{samplers.data()};
|
const VkSampler* samplers_it{samplers.data()};
|
||||||
const ImageId* views_it{image_view_ids.data()};
|
const ImageId* views_it{image_view_ids.data()};
|
||||||
PushImageDescriptors(info, samplers_it, views_it, texture_cache, update_descriptor_queue);
|
PushImageDescriptors(info, samplers_it, views_it, texture_cache, update_descriptor_queue,
|
||||||
|
rescaling);
|
||||||
|
|
||||||
if (!is_built.load(std::memory_order::relaxed)) {
|
if (!is_built.load(std::memory_order::relaxed)) {
|
||||||
// Wait for the pipeline to be built
|
// Wait for the pipeline to be built
|
||||||
|
@ -192,17 +194,21 @@ void ComputePipeline::Configure(Tegra::Engines::KeplerCompute& kepler_compute,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
const void* const descriptor_data{update_descriptor_queue.UpdateData()};
|
const void* const descriptor_data{update_descriptor_queue.UpdateData()};
|
||||||
scheduler.Record([this, descriptor_data](vk::CommandBuffer cmdbuf) {
|
scheduler.Record(
|
||||||
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
|
[this, descriptor_data, rescaling_data = rescaling.Data()](vk::CommandBuffer cmdbuf) {
|
||||||
if (!descriptor_set_layout) {
|
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
|
||||||
return;
|
if (!descriptor_set_layout) {
|
||||||
}
|
return;
|
||||||
const VkDescriptorSet descriptor_set{descriptor_allocator.Commit()};
|
}
|
||||||
const vk::Device& dev{device.GetLogical()};
|
if (num_textures > 0) {
|
||||||
dev.UpdateDescriptorSet(descriptor_set, *descriptor_update_template, descriptor_data);
|
cmdbuf.PushConstants(*pipeline_layout, VK_SHADER_STAGE_COMPUTE_BIT, rescaling_data);
|
||||||
cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline_layout, 0,
|
}
|
||||||
descriptor_set, nullptr);
|
const VkDescriptorSet descriptor_set{descriptor_allocator.Commit()};
|
||||||
});
|
const vk::Device& dev{device.GetLogical()};
|
||||||
|
dev.UpdateDescriptorSet(descriptor_set, *descriptor_update_template, descriptor_data);
|
||||||
|
cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline_layout, 0,
|
||||||
|
descriptor_set, nullptr);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Vulkan
|
} // namespace Vulkan
|
||||||
|
|
|
@ -59,6 +59,7 @@ private:
|
||||||
vk::PipelineLayout pipeline_layout;
|
vk::PipelineLayout pipeline_layout;
|
||||||
vk::DescriptorUpdateTemplateKHR descriptor_update_template;
|
vk::DescriptorUpdateTemplateKHR descriptor_update_template;
|
||||||
vk::Pipeline pipeline;
|
vk::Pipeline pipeline;
|
||||||
|
u32 num_textures{};
|
||||||
|
|
||||||
std::condition_variable build_condvar;
|
std::condition_variable build_condvar;
|
||||||
std::mutex build_mutex;
|
std::mutex build_mutex;
|
||||||
|
|
|
@ -235,6 +235,7 @@ GraphicsPipeline::GraphicsPipeline(
|
||||||
stage_infos[stage] = *info;
|
stage_infos[stage] = *info;
|
||||||
enabled_uniform_buffer_masks[stage] = info->constant_buffer_mask;
|
enabled_uniform_buffer_masks[stage] = info->constant_buffer_mask;
|
||||||
std::ranges::copy(info->constant_buffer_used_sizes, uniform_buffer_sizes[stage].begin());
|
std::ranges::copy(info->constant_buffer_used_sizes, uniform_buffer_sizes[stage].begin());
|
||||||
|
num_textures += Shader::NumDescriptors(info->texture_descriptors);
|
||||||
}
|
}
|
||||||
auto func{[this, shader_notify, &render_pass_cache, &descriptor_pool, pipeline_statistics] {
|
auto func{[this, shader_notify, &render_pass_cache, &descriptor_pool, pipeline_statistics] {
|
||||||
DescriptorLayoutBuilder builder{MakeBuilder(device, stage_infos)};
|
DescriptorLayoutBuilder builder{MakeBuilder(device, stage_infos)};
|
||||||
|
@ -428,12 +429,13 @@ void GraphicsPipeline::ConfigureImpl(bool is_indexed) {
|
||||||
|
|
||||||
update_descriptor_queue.Acquire();
|
update_descriptor_queue.Acquire();
|
||||||
|
|
||||||
|
RescalingPushConstant rescaling(num_textures);
|
||||||
const VkSampler* samplers_it{samplers.data()};
|
const VkSampler* samplers_it{samplers.data()};
|
||||||
const ImageId* views_it{image_view_ids.data()};
|
const ImageId* views_it{image_view_ids.data()};
|
||||||
const auto prepare_stage{[&](size_t stage) LAMBDA_FORCEINLINE {
|
const auto prepare_stage{[&](size_t stage) LAMBDA_FORCEINLINE {
|
||||||
buffer_cache.BindHostStageBuffers(stage);
|
buffer_cache.BindHostStageBuffers(stage);
|
||||||
PushImageDescriptors(stage_infos[stage], samplers_it, views_it, texture_cache,
|
PushImageDescriptors(stage_infos[stage], samplers_it, views_it, texture_cache,
|
||||||
update_descriptor_queue);
|
update_descriptor_queue, rescaling);
|
||||||
}};
|
}};
|
||||||
if constexpr (Spec::enabled_stages[0]) {
|
if constexpr (Spec::enabled_stages[0]) {
|
||||||
prepare_stage(0);
|
prepare_stage(0);
|
||||||
|
@ -450,10 +452,10 @@ void GraphicsPipeline::ConfigureImpl(bool is_indexed) {
|
||||||
if constexpr (Spec::enabled_stages[4]) {
|
if constexpr (Spec::enabled_stages[4]) {
|
||||||
prepare_stage(4);
|
prepare_stage(4);
|
||||||
}
|
}
|
||||||
ConfigureDraw();
|
ConfigureDraw(rescaling);
|
||||||
}
|
}
|
||||||
|
|
||||||
void GraphicsPipeline::ConfigureDraw() {
|
void GraphicsPipeline::ConfigureDraw(const RescalingPushConstant& rescaling) {
|
||||||
texture_cache.UpdateRenderTargets(false);
|
texture_cache.UpdateRenderTargets(false);
|
||||||
scheduler.RequestRenderpass(texture_cache.GetFramebuffer());
|
scheduler.RequestRenderpass(texture_cache.GetFramebuffer());
|
||||||
|
|
||||||
|
@ -464,12 +466,23 @@ void GraphicsPipeline::ConfigureDraw() {
|
||||||
build_condvar.wait(lock, [this] { return is_built.load(std::memory_order::relaxed); });
|
build_condvar.wait(lock, [this] { return is_built.load(std::memory_order::relaxed); });
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
const bool is_rescaling{texture_cache.IsRescaling()};
|
||||||
|
const bool update_rescaling{scheduler.UpdateRescaling(is_rescaling)};
|
||||||
const bool bind_pipeline{scheduler.UpdateGraphicsPipeline(this)};
|
const bool bind_pipeline{scheduler.UpdateGraphicsPipeline(this)};
|
||||||
const void* const descriptor_data{update_descriptor_queue.UpdateData()};
|
const void* const descriptor_data{update_descriptor_queue.UpdateData()};
|
||||||
scheduler.Record([this, descriptor_data, bind_pipeline](vk::CommandBuffer cmdbuf) {
|
scheduler.Record([this, descriptor_data, bind_pipeline, rescaling_data = rescaling.Data(),
|
||||||
|
is_rescaling, update_rescaling](vk::CommandBuffer cmdbuf) {
|
||||||
if (bind_pipeline) {
|
if (bind_pipeline) {
|
||||||
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
|
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
|
||||||
}
|
}
|
||||||
|
if (update_rescaling) {
|
||||||
|
const f32 config_down_factor{Settings::values.resolution_info.down_factor};
|
||||||
|
const float scale_down_factor{is_rescaling ? config_down_factor : 1.0f};
|
||||||
|
cmdbuf.PushConstants(*pipeline_layout, VK_SHADER_STAGE_ALL_GRAPHICS, 0,
|
||||||
|
sizeof(scale_down_factor), &scale_down_factor);
|
||||||
|
}
|
||||||
|
cmdbuf.PushConstants(*pipeline_layout, VK_SHADER_STAGE_ALL_GRAPHICS, sizeof(f32),
|
||||||
|
sizeof(rescaling_data), rescaling_data.data());
|
||||||
if (!descriptor_set_layout) {
|
if (!descriptor_set_layout) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -62,6 +62,7 @@ namespace Vulkan {
|
||||||
class Device;
|
class Device;
|
||||||
class PipelineStatistics;
|
class PipelineStatistics;
|
||||||
class RenderPassCache;
|
class RenderPassCache;
|
||||||
|
class RescalingPushConstant;
|
||||||
class VKScheduler;
|
class VKScheduler;
|
||||||
class VKUpdateDescriptorQueue;
|
class VKUpdateDescriptorQueue;
|
||||||
|
|
||||||
|
@ -113,7 +114,7 @@ private:
|
||||||
template <typename Spec>
|
template <typename Spec>
|
||||||
void ConfigureImpl(bool is_indexed);
|
void ConfigureImpl(bool is_indexed);
|
||||||
|
|
||||||
void ConfigureDraw();
|
void ConfigureDraw(const RescalingPushConstant& rescaling);
|
||||||
|
|
||||||
void MakePipeline(VkRenderPass render_pass);
|
void MakePipeline(VkRenderPass render_pass);
|
||||||
|
|
||||||
|
@ -138,6 +139,7 @@ private:
|
||||||
std::array<Shader::Info, NUM_STAGES> stage_infos;
|
std::array<Shader::Info, NUM_STAGES> stage_infos;
|
||||||
std::array<u32, 5> enabled_uniform_buffer_masks{};
|
std::array<u32, 5> enabled_uniform_buffer_masks{};
|
||||||
VideoCommon::UniformBufferSizes uniform_buffer_sizes{};
|
VideoCommon::UniformBufferSizes uniform_buffer_sizes{};
|
||||||
|
u32 num_textures{};
|
||||||
|
|
||||||
vk::DescriptorSetLayout descriptor_set_layout;
|
vk::DescriptorSetLayout descriptor_set_layout;
|
||||||
DescriptorAllocator descriptor_allocator;
|
DescriptorAllocator descriptor_allocator;
|
||||||
|
|
|
@ -139,6 +139,9 @@ Shader::RuntimeInfo MakeRuntimeInfo(std::span<const Shader::IR::Program> program
|
||||||
} else {
|
} else {
|
||||||
info.previous_stage_stores.mask.set();
|
info.previous_stage_stores.mask.set();
|
||||||
}
|
}
|
||||||
|
for (const auto& stage : programs) {
|
||||||
|
info.num_textures += Shader::NumDescriptors(stage.info.texture_descriptors);
|
||||||
|
}
|
||||||
const Shader::Stage stage{program.stage};
|
const Shader::Stage stage{program.stage};
|
||||||
const bool has_geometry{key.unique_hashes[4] != 0 && !programs[4].is_geometry_passthrough};
|
const bool has_geometry{key.unique_hashes[4] != 0 && !programs[4].is_geometry_passthrough};
|
||||||
const bool gl_ndc{key.state.ndc_minus_one_to_one != 0};
|
const bool gl_ndc{key.state.ndc_minus_one_to_one != 0};
|
||||||
|
|
|
@ -128,6 +128,15 @@ bool VKScheduler::UpdateGraphicsPipeline(GraphicsPipeline* pipeline) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool VKScheduler::UpdateRescaling(bool is_rescaling) {
|
||||||
|
if (state.rescaling_defined && is_rescaling == state.is_rescaling) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
state.rescaling_defined = true;
|
||||||
|
state.is_rescaling = is_rescaling;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
void VKScheduler::WorkerThread(std::stop_token stop_token) {
|
void VKScheduler::WorkerThread(std::stop_token stop_token) {
|
||||||
Common::SetCurrentThreadName("yuzu:VulkanWorker");
|
Common::SetCurrentThreadName("yuzu:VulkanWorker");
|
||||||
do {
|
do {
|
||||||
|
@ -227,6 +236,7 @@ void VKScheduler::AllocateNewContext() {
|
||||||
|
|
||||||
void VKScheduler::InvalidateState() {
|
void VKScheduler::InvalidateState() {
|
||||||
state.graphics_pipeline = nullptr;
|
state.graphics_pipeline = nullptr;
|
||||||
|
state.rescaling_defined = false;
|
||||||
state_tracker.InvalidateCommandBufferState();
|
state_tracker.InvalidateCommandBufferState();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -56,6 +56,9 @@ public:
|
||||||
/// Update the pipeline to the current execution context.
|
/// Update the pipeline to the current execution context.
|
||||||
bool UpdateGraphicsPipeline(GraphicsPipeline* pipeline);
|
bool UpdateGraphicsPipeline(GraphicsPipeline* pipeline);
|
||||||
|
|
||||||
|
/// Update the rescaling state. Returns true if the state has to be updated.
|
||||||
|
bool UpdateRescaling(bool is_rescaling);
|
||||||
|
|
||||||
/// Invalidates current command buffer state except for render passes
|
/// Invalidates current command buffer state except for render passes
|
||||||
void InvalidateState();
|
void InvalidateState();
|
||||||
|
|
||||||
|
@ -185,6 +188,8 @@ private:
|
||||||
VkFramebuffer framebuffer = nullptr;
|
VkFramebuffer framebuffer = nullptr;
|
||||||
VkExtent2D render_area = {0, 0};
|
VkExtent2D render_area = {0, 0};
|
||||||
GraphicsPipeline* graphics_pipeline = nullptr;
|
GraphicsPipeline* graphics_pipeline = nullptr;
|
||||||
|
bool is_rescaling = false;
|
||||||
|
bool rescaling_defined = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
void WorkerThread(std::stop_token stop_token);
|
void WorkerThread(std::stop_token stop_token);
|
||||||
|
|
Loading…
Reference in New Issue