shader: Properly blacklist and scale image loads
This commit is contained in:
parent
c7a1cbad44
commit
fc9bb3c3fe
|
@ -84,10 +84,8 @@ void PatchImageQueryDimensions(IR::Block& block, IR::Inst& inst) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void PatchImageFetch(IR::Block& block, IR::Inst& inst) {
|
void ScaleIntegerCoord(IR::IREmitter& ir, IR::Inst& inst, const IR::U1& is_scaled) {
|
||||||
IR::IREmitter ir{block, IR::Block::InstructionList::s_iterator_to(inst)};
|
|
||||||
const auto info{inst.Flags<IR::TextureInstInfo>()};
|
const auto info{inst.Flags<IR::TextureInstInfo>()};
|
||||||
const IR::U1 is_scaled{ir.IsTextureScaled(ir.Imm32(info.descriptor_index))};
|
|
||||||
const IR::Value coord{inst.Arg(1)};
|
const IR::Value coord{inst.Arg(1)};
|
||||||
switch (info.type) {
|
switch (info.type) {
|
||||||
case TextureType::Color1D:
|
case TextureType::Color1D:
|
||||||
|
@ -121,6 +119,21 @@ void PatchImageFetch(IR::Block& block, IR::Inst& inst) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void PatchImageFetch(IR::Block& block, IR::Inst& inst) {
|
||||||
|
IR::IREmitter ir{block, IR::Block::InstructionList::s_iterator_to(inst)};
|
||||||
|
const auto info{inst.Flags<IR::TextureInstInfo>()};
|
||||||
|
const IR::U1 is_scaled{ir.IsTextureScaled(ir.Imm32(info.descriptor_index))};
|
||||||
|
ScaleIntegerCoord(ir, inst, is_scaled);
|
||||||
|
}
|
||||||
|
|
||||||
|
void PatchImageRead(IR::Block& block, IR::Inst& inst) {
|
||||||
|
IR::IREmitter ir{block, IR::Block::InstructionList::s_iterator_to(inst)};
|
||||||
|
const auto info{inst.Flags<IR::TextureInstInfo>()};
|
||||||
|
// TODO: Scale conditionally
|
||||||
|
const IR::U1 is_scaled{IR::Value{true}};
|
||||||
|
ScaleIntegerCoord(ir, inst, is_scaled);
|
||||||
|
}
|
||||||
|
|
||||||
void Visit(const IR::Program& program, IR::Block& block, IR::Inst& inst) {
|
void Visit(const IR::Program& program, IR::Block& block, IR::Inst& inst) {
|
||||||
const bool is_fragment_shader{program.stage == Stage::Fragment};
|
const bool is_fragment_shader{program.stage == Stage::Fragment};
|
||||||
switch (inst.GetOpcode()) {
|
switch (inst.GetOpcode()) {
|
||||||
|
@ -144,6 +157,9 @@ void Visit(const IR::Program& program, IR::Block& block, IR::Inst& inst) {
|
||||||
case IR::Opcode::ImageFetch:
|
case IR::Opcode::ImageFetch:
|
||||||
PatchImageFetch(block, inst);
|
PatchImageFetch(block, inst);
|
||||||
break;
|
break;
|
||||||
|
case IR::Opcode::ImageRead:
|
||||||
|
PatchImageRead(block, inst);
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
@ -139,7 +139,7 @@ void ComputePipeline::Configure() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (const auto& desc : info.image_descriptors) {
|
for (const auto& desc : info.image_descriptors) {
|
||||||
add_image(desc, true);
|
add_image(desc, desc.is_written);
|
||||||
}
|
}
|
||||||
texture_cache.FillComputeImageViews(std::span(views.data(), views.size()));
|
texture_cache.FillComputeImageViews(std::span(views.data(), views.size()));
|
||||||
|
|
||||||
|
|
|
@ -362,7 +362,7 @@ void GraphicsPipeline::ConfigureImpl(bool is_indexed) {
|
||||||
}
|
}
|
||||||
if constexpr (Spec::has_images) {
|
if constexpr (Spec::has_images) {
|
||||||
for (const auto& desc : info.image_descriptors) {
|
for (const auto& desc : info.image_descriptors) {
|
||||||
add_image(desc, true);
|
add_image(desc, desc.is_written);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}};
|
}};
|
||||||
|
|
|
@ -159,7 +159,7 @@ void ComputePipeline::Configure(Tegra::Engines::KeplerCompute& kepler_compute,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (const auto& desc : info.image_descriptors) {
|
for (const auto& desc : info.image_descriptors) {
|
||||||
add_image(desc, true);
|
add_image(desc, desc.is_written);
|
||||||
}
|
}
|
||||||
texture_cache.FillComputeImageViews(std::span(views.data(), views.size()));
|
texture_cache.FillComputeImageViews(std::span(views.data(), views.size()));
|
||||||
|
|
||||||
|
|
|
@ -322,20 +322,24 @@ void GraphicsPipeline::ConfigureImpl(bool is_indexed) {
|
||||||
}
|
}
|
||||||
return TexturePair(gpu_memory.Read<u32>(addr), via_header_index);
|
return TexturePair(gpu_memory.Read<u32>(addr), via_header_index);
|
||||||
}};
|
}};
|
||||||
const auto add_image{[&](const auto& desc) {
|
const auto add_image{[&](const auto& desc, bool blacklist) LAMBDA_FORCEINLINE {
|
||||||
for (u32 index = 0; index < desc.count; ++index) {
|
for (u32 index = 0; index < desc.count; ++index) {
|
||||||
const auto handle{read_handle(desc, index)};
|
const auto handle{read_handle(desc, index)};
|
||||||
views[view_index++] = {handle.first};
|
views[view_index++] = {
|
||||||
|
.index = handle.first,
|
||||||
|
.blacklist = blacklist,
|
||||||
|
.id = {},
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}};
|
}};
|
||||||
if constexpr (Spec::has_texture_buffers) {
|
if constexpr (Spec::has_texture_buffers) {
|
||||||
for (const auto& desc : info.texture_buffer_descriptors) {
|
for (const auto& desc : info.texture_buffer_descriptors) {
|
||||||
add_image(desc);
|
add_image(desc, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if constexpr (Spec::has_image_buffers) {
|
if constexpr (Spec::has_image_buffers) {
|
||||||
for (const auto& desc : info.image_buffer_descriptors) {
|
for (const auto& desc : info.image_buffer_descriptors) {
|
||||||
add_image(desc);
|
add_image(desc, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (const auto& desc : info.texture_descriptors) {
|
for (const auto& desc : info.texture_descriptors) {
|
||||||
|
@ -349,7 +353,7 @@ void GraphicsPipeline::ConfigureImpl(bool is_indexed) {
|
||||||
}
|
}
|
||||||
if constexpr (Spec::has_images) {
|
if constexpr (Spec::has_images) {
|
||||||
for (const auto& desc : info.image_descriptors) {
|
for (const auto& desc : info.image_descriptors) {
|
||||||
add_image(desc);
|
add_image(desc, desc.is_written);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}};
|
}};
|
||||||
|
|
Loading…
Reference in New Issue