2021-03-08 21:31:53 +00:00
|
|
|
// Copyright 2021 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
|
|
|
#include <optional>
|
|
|
|
|
|
|
|
#include <boost/container/flat_set.hpp>
|
|
|
|
#include <boost/container/small_vector.hpp>
|
|
|
|
|
|
|
|
#include "shader_recompiler/environment.h"
|
|
|
|
#include "shader_recompiler/frontend/ir/basic_block.h"
|
|
|
|
#include "shader_recompiler/frontend/ir/ir_emitter.h"
|
|
|
|
#include "shader_recompiler/ir_opt/passes.h"
|
|
|
|
#include "shader_recompiler/shader_info.h"
|
|
|
|
|
|
|
|
namespace Shader::Optimization {
|
|
|
|
namespace {
|
|
|
|
struct ConstBufferAddr {
|
|
|
|
u32 index;
|
|
|
|
u32 offset;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct TextureInst {
|
|
|
|
ConstBufferAddr cbuf;
|
|
|
|
IR::Inst* inst;
|
|
|
|
IR::Block* block;
|
|
|
|
};
|
|
|
|
|
|
|
|
using TextureInstVector = boost::container::small_vector<TextureInst, 24>;
|
|
|
|
|
|
|
|
using VisitedBlocks = boost::container::flat_set<IR::Block*, std::less<IR::Block*>,
|
|
|
|
boost::container::small_vector<IR::Block*, 2>>;
|
|
|
|
|
|
|
|
IR::Opcode IndexedInstruction(const IR::Inst& inst) {
|
|
|
|
switch (inst.Opcode()) {
|
|
|
|
case IR::Opcode::BindlessImageSampleImplicitLod:
|
|
|
|
case IR::Opcode::BoundImageSampleImplicitLod:
|
|
|
|
return IR::Opcode::ImageSampleImplicitLod;
|
|
|
|
case IR::Opcode::BoundImageSampleExplicitLod:
|
|
|
|
case IR::Opcode::BindlessImageSampleExplicitLod:
|
|
|
|
return IR::Opcode::ImageSampleExplicitLod;
|
|
|
|
case IR::Opcode::BoundImageSampleDrefImplicitLod:
|
|
|
|
case IR::Opcode::BindlessImageSampleDrefImplicitLod:
|
|
|
|
return IR::Opcode::ImageSampleDrefImplicitLod;
|
|
|
|
case IR::Opcode::BoundImageSampleDrefExplicitLod:
|
|
|
|
case IR::Opcode::BindlessImageSampleDrefExplicitLod:
|
|
|
|
return IR::Opcode::ImageSampleDrefExplicitLod;
|
2021-03-24 22:41:55 +00:00
|
|
|
case IR::Opcode::BindlessImageGather:
|
|
|
|
case IR::Opcode::BoundImageGather:
|
|
|
|
return IR::Opcode::ImageGather;
|
|
|
|
case IR::Opcode::BindlessImageGatherDref:
|
|
|
|
case IR::Opcode::BoundImageGatherDref:
|
|
|
|
return IR::Opcode::ImageGatherDref;
|
2021-03-26 18:24:50 +00:00
|
|
|
case IR::Opcode::BindlessImageFetch:
|
|
|
|
case IR::Opcode::BoundImageFetch:
|
|
|
|
return IR::Opcode::ImageFetch;
|
2021-03-26 21:45:38 +00:00
|
|
|
case IR::Opcode::BoundImageQueryDimensions:
|
|
|
|
case IR::Opcode::BindlessImageQueryDimensions:
|
|
|
|
return IR::Opcode::ImageQueryDimensions;
|
2021-03-28 17:47:52 +00:00
|
|
|
case IR::Opcode::BoundImageQueryLod:
|
|
|
|
case IR::Opcode::BindlessImageQueryLod:
|
|
|
|
return IR::Opcode::ImageQueryLod;
|
2021-03-29 00:00:43 +00:00
|
|
|
case IR::Opcode::BoundImageGradient:
|
|
|
|
case IR::Opcode::BindlessImageGradient:
|
|
|
|
return IR::Opcode::ImageGradient;
|
2021-03-08 21:31:53 +00:00
|
|
|
default:
|
|
|
|
return IR::Opcode::Void;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool IsBindless(const IR::Inst& inst) {
|
|
|
|
switch (inst.Opcode()) {
|
|
|
|
case IR::Opcode::BindlessImageSampleImplicitLod:
|
|
|
|
case IR::Opcode::BindlessImageSampleExplicitLod:
|
|
|
|
case IR::Opcode::BindlessImageSampleDrefImplicitLod:
|
|
|
|
case IR::Opcode::BindlessImageSampleDrefExplicitLod:
|
2021-03-24 22:41:55 +00:00
|
|
|
case IR::Opcode::BindlessImageGather:
|
|
|
|
case IR::Opcode::BindlessImageGatherDref:
|
2021-03-26 18:24:50 +00:00
|
|
|
case IR::Opcode::BindlessImageFetch:
|
2021-03-26 21:45:38 +00:00
|
|
|
case IR::Opcode::BindlessImageQueryDimensions:
|
2021-03-28 17:47:52 +00:00
|
|
|
case IR::Opcode::BindlessImageQueryLod:
|
2021-03-29 00:00:43 +00:00
|
|
|
case IR::Opcode::BindlessImageGradient:
|
2021-03-08 21:31:53 +00:00
|
|
|
return true;
|
|
|
|
case IR::Opcode::BoundImageSampleImplicitLod:
|
|
|
|
case IR::Opcode::BoundImageSampleExplicitLod:
|
|
|
|
case IR::Opcode::BoundImageSampleDrefImplicitLod:
|
|
|
|
case IR::Opcode::BoundImageSampleDrefExplicitLod:
|
2021-03-24 22:41:55 +00:00
|
|
|
case IR::Opcode::BoundImageGather:
|
|
|
|
case IR::Opcode::BoundImageGatherDref:
|
2021-03-26 18:24:50 +00:00
|
|
|
case IR::Opcode::BoundImageFetch:
|
2021-03-26 21:45:38 +00:00
|
|
|
case IR::Opcode::BoundImageQueryDimensions:
|
2021-03-28 17:47:52 +00:00
|
|
|
case IR::Opcode::BoundImageQueryLod:
|
2021-03-29 00:00:43 +00:00
|
|
|
case IR::Opcode::BoundImageGradient:
|
2021-03-08 21:31:53 +00:00
|
|
|
return false;
|
|
|
|
default:
|
|
|
|
throw InvalidArgument("Invalid opcode {}", inst.Opcode());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool IsTextureInstruction(const IR::Inst& inst) {
|
|
|
|
return IndexedInstruction(inst) != IR::Opcode::Void;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::optional<ConstBufferAddr> Track(IR::Block* block, const IR::Value& value,
|
|
|
|
VisitedBlocks& visited) {
|
|
|
|
if (value.IsImmediate()) {
|
|
|
|
// Immediates can't be a storage buffer
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
const IR::Inst* const inst{value.InstRecursive()};
|
2021-03-09 20:14:57 +00:00
|
|
|
if (inst->Opcode() == IR::Opcode::GetCbufU32) {
|
2021-03-08 21:31:53 +00:00
|
|
|
const IR::Value index{inst->Arg(0)};
|
|
|
|
const IR::Value offset{inst->Arg(1)};
|
|
|
|
if (!index.IsImmediate()) {
|
|
|
|
// Reading a bindless texture from variable indices is valid
|
|
|
|
// but not supported here at the moment
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
if (!offset.IsImmediate()) {
|
|
|
|
// TODO: Support arrays of textures
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
return ConstBufferAddr{
|
|
|
|
.index{index.U32()},
|
|
|
|
.offset{offset.U32()},
|
|
|
|
};
|
|
|
|
}
|
|
|
|
// Reversed loops are more likely to find the right result
|
|
|
|
for (size_t arg = inst->NumArgs(); arg--;) {
|
|
|
|
IR::Block* inst_block{block};
|
|
|
|
if (inst->Opcode() == IR::Opcode::Phi) {
|
|
|
|
// If we are going through a phi node, mark the current block as visited
|
|
|
|
visited.insert(block);
|
|
|
|
// and skip already visited blocks to avoid looping forever
|
|
|
|
IR::Block* const phi_block{inst->PhiBlock(arg)};
|
|
|
|
if (visited.contains(phi_block)) {
|
|
|
|
// Already visited, skip
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
inst_block = phi_block;
|
|
|
|
}
|
|
|
|
const std::optional storage_buffer{Track(inst_block, inst->Arg(arg), visited)};
|
|
|
|
if (storage_buffer) {
|
|
|
|
return *storage_buffer;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
|
|
|
|
TextureInst MakeInst(Environment& env, IR::Block* block, IR::Inst& inst) {
|
|
|
|
ConstBufferAddr addr;
|
|
|
|
if (IsBindless(inst)) {
|
|
|
|
VisitedBlocks visited;
|
2021-03-24 22:38:37 +00:00
|
|
|
const std::optional<ConstBufferAddr> track_addr{Track(block, inst.Arg(0), visited)};
|
2021-03-08 21:31:53 +00:00
|
|
|
if (!track_addr) {
|
|
|
|
throw NotImplementedException("Failed to track bindless texture constant buffer");
|
|
|
|
}
|
|
|
|
addr = *track_addr;
|
|
|
|
} else {
|
|
|
|
addr = ConstBufferAddr{
|
|
|
|
.index{env.TextureBoundBuffer()},
|
|
|
|
.offset{inst.Arg(0).U32()},
|
|
|
|
};
|
|
|
|
}
|
|
|
|
return TextureInst{
|
|
|
|
.cbuf{addr},
|
|
|
|
.inst{&inst},
|
|
|
|
.block{block},
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
class Descriptors {
|
|
|
|
public:
|
|
|
|
explicit Descriptors(TextureDescriptors& descriptors_) : descriptors{descriptors_} {}
|
|
|
|
|
|
|
|
u32 Add(const TextureDescriptor& descriptor) {
|
|
|
|
// TODO: Handle arrays
|
|
|
|
auto it{std::ranges::find_if(descriptors, [&descriptor](const TextureDescriptor& existing) {
|
|
|
|
return descriptor.cbuf_index == existing.cbuf_index &&
|
|
|
|
descriptor.cbuf_offset == existing.cbuf_offset &&
|
|
|
|
descriptor.type == existing.type;
|
|
|
|
})};
|
|
|
|
if (it != descriptors.end()) {
|
|
|
|
return static_cast<u32>(std::distance(descriptors.begin(), it));
|
|
|
|
}
|
|
|
|
descriptors.push_back(descriptor);
|
|
|
|
return static_cast<u32>(descriptors.size()) - 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
TextureDescriptors& descriptors;
|
|
|
|
};
|
|
|
|
} // Anonymous namespace
|
|
|
|
|
|
|
|
void TexturePass(Environment& env, IR::Program& program) {
|
|
|
|
TextureInstVector to_replace;
|
2021-03-14 06:41:05 +00:00
|
|
|
for (IR::Block* const block : program.post_order_blocks) {
|
|
|
|
for (IR::Inst& inst : block->Instructions()) {
|
|
|
|
if (!IsTextureInstruction(inst)) {
|
|
|
|
continue;
|
2021-03-08 21:31:53 +00:00
|
|
|
}
|
2021-03-14 06:41:05 +00:00
|
|
|
to_replace.push_back(MakeInst(env, block, inst));
|
2021-03-08 21:31:53 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
// Sort instructions to visit textures by constant buffer index, then by offset
|
|
|
|
std::ranges::sort(to_replace, [](const auto& lhs, const auto& rhs) {
|
|
|
|
return lhs.cbuf.offset < rhs.cbuf.offset;
|
|
|
|
});
|
|
|
|
std::stable_sort(to_replace.begin(), to_replace.end(), [](const auto& lhs, const auto& rhs) {
|
|
|
|
return lhs.cbuf.index < rhs.cbuf.index;
|
|
|
|
});
|
|
|
|
Descriptors descriptors{program.info.texture_descriptors};
|
|
|
|
for (TextureInst& texture_inst : to_replace) {
|
|
|
|
// TODO: Handle arrays
|
|
|
|
IR::Inst* const inst{texture_inst.inst};
|
2021-03-26 21:45:38 +00:00
|
|
|
inst->ReplaceOpcode(IndexedInstruction(*inst));
|
|
|
|
|
|
|
|
const auto& cbuf{texture_inst.cbuf};
|
|
|
|
auto flags{inst->Flags<IR::TextureInstInfo>()};
|
|
|
|
if (inst->Opcode() == IR::Opcode::ImageQueryDimensions) {
|
|
|
|
flags.type.Assign(env.ReadTextureType(cbuf.index, cbuf.offset));
|
|
|
|
inst->SetFlags(flags);
|
|
|
|
}
|
2021-03-08 21:31:53 +00:00
|
|
|
const u32 index{descriptors.Add(TextureDescriptor{
|
2021-03-26 21:45:38 +00:00
|
|
|
.type{flags.type},
|
|
|
|
.cbuf_index{cbuf.index},
|
|
|
|
.cbuf_offset{cbuf.offset},
|
2021-03-08 21:31:53 +00:00
|
|
|
.count{1},
|
|
|
|
})};
|
|
|
|
inst->SetArg(0, IR::Value{index});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace Shader::Optimization
|