summaryrefslogtreecommitdiff
path: root/src/shader_recompiler/ir_opt/texture_pass.cpp
diff options
context:
space:
mode:
authorGravatar ReinUsesLisp2021-03-08 18:31:53 -0300
committerGravatar ameerj2021-07-22 21:51:23 -0400
commitab463712474de5f99eec137a9c6233e55fe184f0 (patch)
tree30d79ac64dd03d5cfafd07c0c42c2baadc82de98 /src/shader_recompiler/ir_opt/texture_pass.cpp
parentshader: Implement R2P (diff)
downloadyuzu-ab463712474de5f99eec137a9c6233e55fe184f0.tar.gz
yuzu-ab463712474de5f99eec137a9c6233e55fe184f0.tar.xz
yuzu-ab463712474de5f99eec137a9c6233e55fe184f0.zip
shader: Initial support for textures and TEX
Diffstat (limited to 'src/shader_recompiler/ir_opt/texture_pass.cpp')
-rw-r--r--src/shader_recompiler/ir_opt/texture_pass.cpp199
1 files changed, 199 insertions, 0 deletions
diff --git a/src/shader_recompiler/ir_opt/texture_pass.cpp b/src/shader_recompiler/ir_opt/texture_pass.cpp
new file mode 100644
index 000000000..80e4ad6a9
--- /dev/null
+++ b/src/shader_recompiler/ir_opt/texture_pass.cpp
@@ -0,0 +1,199 @@
1// Copyright 2021 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include <optional>
6
7#include <boost/container/flat_set.hpp>
8#include <boost/container/small_vector.hpp>
9
10#include "shader_recompiler/environment.h"
11#include "shader_recompiler/frontend/ir/basic_block.h"
12#include "shader_recompiler/frontend/ir/ir_emitter.h"
13#include "shader_recompiler/ir_opt/passes.h"
14#include "shader_recompiler/shader_info.h"
15
16namespace Shader::Optimization {
17namespace {
18struct ConstBufferAddr {
19 u32 index;
20 u32 offset;
21};
22
23struct TextureInst {
24 ConstBufferAddr cbuf;
25 IR::Inst* inst;
26 IR::Block* block;
27};
28
29using TextureInstVector = boost::container::small_vector<TextureInst, 24>;
30
31using VisitedBlocks = boost::container::flat_set<IR::Block*, std::less<IR::Block*>,
32 boost::container::small_vector<IR::Block*, 2>>;
33
34IR::Opcode IndexedInstruction(const IR::Inst& inst) {
35 switch (inst.Opcode()) {
36 case IR::Opcode::BindlessImageSampleImplicitLod:
37 case IR::Opcode::BoundImageSampleImplicitLod:
38 return IR::Opcode::ImageSampleImplicitLod;
39 case IR::Opcode::BoundImageSampleExplicitLod:
40 case IR::Opcode::BindlessImageSampleExplicitLod:
41 return IR::Opcode::ImageSampleExplicitLod;
42 case IR::Opcode::BoundImageSampleDrefImplicitLod:
43 case IR::Opcode::BindlessImageSampleDrefImplicitLod:
44 return IR::Opcode::ImageSampleDrefImplicitLod;
45 case IR::Opcode::BoundImageSampleDrefExplicitLod:
46 case IR::Opcode::BindlessImageSampleDrefExplicitLod:
47 return IR::Opcode::ImageSampleDrefExplicitLod;
48 default:
49 return IR::Opcode::Void;
50 }
51}
52
53bool IsBindless(const IR::Inst& inst) {
54 switch (inst.Opcode()) {
55 case IR::Opcode::BindlessImageSampleImplicitLod:
56 case IR::Opcode::BindlessImageSampleExplicitLod:
57 case IR::Opcode::BindlessImageSampleDrefImplicitLod:
58 case IR::Opcode::BindlessImageSampleDrefExplicitLod:
59 return true;
60 case IR::Opcode::BoundImageSampleImplicitLod:
61 case IR::Opcode::BoundImageSampleExplicitLod:
62 case IR::Opcode::BoundImageSampleDrefImplicitLod:
63 case IR::Opcode::BoundImageSampleDrefExplicitLod:
64 return false;
65 default:
66 throw InvalidArgument("Invalid opcode {}", inst.Opcode());
67 }
68}
69
70bool IsTextureInstruction(const IR::Inst& inst) {
71 return IndexedInstruction(inst) != IR::Opcode::Void;
72}
73
74std::optional<ConstBufferAddr> Track(IR::Block* block, const IR::Value& value,
75 VisitedBlocks& visited) {
76 if (value.IsImmediate()) {
77 // Immediates can't be a storage buffer
78 return std::nullopt;
79 }
80 const IR::Inst* const inst{value.InstRecursive()};
81 if (inst->Opcode() == IR::Opcode::GetCbuf) {
82 const IR::Value index{inst->Arg(0)};
83 const IR::Value offset{inst->Arg(1)};
84 if (!index.IsImmediate()) {
85 // Reading a bindless texture from variable indices is valid
86 // but not supported here at the moment
87 return std::nullopt;
88 }
89 if (!offset.IsImmediate()) {
90 // TODO: Support arrays of textures
91 return std::nullopt;
92 }
93 return ConstBufferAddr{
94 .index{index.U32()},
95 .offset{offset.U32()},
96 };
97 }
98 // Reversed loops are more likely to find the right result
99 for (size_t arg = inst->NumArgs(); arg--;) {
100 IR::Block* inst_block{block};
101 if (inst->Opcode() == IR::Opcode::Phi) {
102 // If we are going through a phi node, mark the current block as visited
103 visited.insert(block);
104 // and skip already visited blocks to avoid looping forever
105 IR::Block* const phi_block{inst->PhiBlock(arg)};
106 if (visited.contains(phi_block)) {
107 // Already visited, skip
108 continue;
109 }
110 inst_block = phi_block;
111 }
112 const std::optional storage_buffer{Track(inst_block, inst->Arg(arg), visited)};
113 if (storage_buffer) {
114 return *storage_buffer;
115 }
116 }
117 return std::nullopt;
118}
119
120TextureInst MakeInst(Environment& env, IR::Block* block, IR::Inst& inst) {
121 ConstBufferAddr addr;
122 if (IsBindless(inst)) {
123 VisitedBlocks visited;
124 const std::optional<ConstBufferAddr> track_addr{Track(block, IR::Value{&inst}, visited)};
125 if (!track_addr) {
126 throw NotImplementedException("Failed to track bindless texture constant buffer");
127 }
128 addr = *track_addr;
129 } else {
130 addr = ConstBufferAddr{
131 .index{env.TextureBoundBuffer()},
132 .offset{inst.Arg(0).U32()},
133 };
134 }
135 return TextureInst{
136 .cbuf{addr},
137 .inst{&inst},
138 .block{block},
139 };
140}
141
142class Descriptors {
143public:
144 explicit Descriptors(TextureDescriptors& descriptors_) : descriptors{descriptors_} {}
145
146 u32 Add(const TextureDescriptor& descriptor) {
147 // TODO: Handle arrays
148 auto it{std::ranges::find_if(descriptors, [&descriptor](const TextureDescriptor& existing) {
149 return descriptor.cbuf_index == existing.cbuf_index &&
150 descriptor.cbuf_offset == existing.cbuf_offset &&
151 descriptor.type == existing.type;
152 })};
153 if (it != descriptors.end()) {
154 return static_cast<u32>(std::distance(descriptors.begin(), it));
155 }
156 descriptors.push_back(descriptor);
157 return static_cast<u32>(descriptors.size()) - 1;
158 }
159
160private:
161 TextureDescriptors& descriptors;
162};
163} // Anonymous namespace
164
165void TexturePass(Environment& env, IR::Program& program) {
166 TextureInstVector to_replace;
167 for (IR::Function& function : program.functions) {
168 for (IR::Block* const block : function.post_order_blocks) {
169 for (IR::Inst& inst : block->Instructions()) {
170 if (!IsTextureInstruction(inst)) {
171 continue;
172 }
173 to_replace.push_back(MakeInst(env, block, inst));
174 }
175 }
176 }
177 // Sort instructions to visit textures by constant buffer index, then by offset
178 std::ranges::sort(to_replace, [](const auto& lhs, const auto& rhs) {
179 return lhs.cbuf.offset < rhs.cbuf.offset;
180 });
181 std::stable_sort(to_replace.begin(), to_replace.end(), [](const auto& lhs, const auto& rhs) {
182 return lhs.cbuf.index < rhs.cbuf.index;
183 });
184 Descriptors descriptors{program.info.texture_descriptors};
185 for (TextureInst& texture_inst : to_replace) {
186 // TODO: Handle arrays
187 IR::Inst* const inst{texture_inst.inst};
188 const u32 index{descriptors.Add(TextureDescriptor{
189 .type{inst->Flags<IR::TextureInstInfo>().type},
190 .cbuf_index{texture_inst.cbuf.index},
191 .cbuf_offset{texture_inst.cbuf.offset},
192 .count{1},
193 })};
194 inst->ReplaceOpcode(IndexedInstruction(*inst));
195 inst->SetArg(0, IR::Value{index});
196 }
197}
198
199} // namespace Shader::Optimization