summaryrefslogtreecommitdiff
path: root/src/shader_recompiler/ir_opt
diff options
context:
space:
mode:
authorGravatar ReinUsesLisp2021-02-05 19:19:36 -0300
committerGravatar ameerj2021-07-22 21:51:21 -0400
commitbe94ee88d227d0d3dbeabe9ade98bacd910c7a7e (patch)
tree68a2043d48b8d1ecb7df23d03c1f92f277c70f9a /src/shader_recompiler/ir_opt
parentshader: Remove illegal character in SSA pass (diff)
downloadyuzu-be94ee88d227d0d3dbeabe9ade98bacd910c7a7e.tar.gz
yuzu-be94ee88d227d0d3dbeabe9ade98bacd910c7a7e.tar.xz
yuzu-be94ee88d227d0d3dbeabe9ade98bacd910c7a7e.zip
shader: Make typed IR
Diffstat (limited to 'src/shader_recompiler/ir_opt')
-rw-r--r--src/shader_recompiler/ir_opt/constant_propagation_pass.cpp20
-rw-r--r--src/shader_recompiler/ir_opt/global_memory_to_storage_buffer_pass.cpp21
2 files changed, 31 insertions, 10 deletions
diff --git a/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp b/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp
index 02f5b653d..7fb3192d8 100644
--- a/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp
+++ b/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp
@@ -5,6 +5,7 @@
5#include <algorithm> 5#include <algorithm>
6#include <type_traits> 6#include <type_traits>
7 7
8#include "common/bit_cast.h"
8#include "common/bit_util.h" 9#include "common/bit_util.h"
9#include "shader_recompiler/exception.h" 10#include "shader_recompiler/exception.h"
10#include "shader_recompiler/frontend/ir/microinstruction.h" 11#include "shader_recompiler/frontend/ir/microinstruction.h"
@@ -25,6 +26,8 @@ template <typename T>
25 return value.U1(); 26 return value.U1();
26 } else if constexpr (std::is_same_v<T, u32>) { 27 } else if constexpr (std::is_same_v<T, u32>) {
27 return value.U32(); 28 return value.U32();
29 } else if constexpr (std::is_same_v<T, f32>) {
30 return value.F32();
28 } else if constexpr (std::is_same_v<T, u64>) { 31 } else if constexpr (std::is_same_v<T, u64>) {
29 return value.U64(); 32 return value.U64();
30 } 33 }
@@ -115,6 +118,19 @@ void FoldLogicalAnd(IR::Inst& inst) {
115 } 118 }
116} 119}
117 120
121template <typename Dest, typename Source>
122void FoldBitCast(IR::Inst& inst, IR::Opcode reverse) {
123 const IR::Value value{inst.Arg(0)};
124 if (value.IsImmediate()) {
125 inst.ReplaceUsesWith(IR::Value{Common::BitCast<Dest>(Arg<Source>(value))});
126 return;
127 }
128 IR::Inst* const arg_inst{value.InstRecursive()};
129 if (value.InstRecursive()->Opcode() == reverse) {
130 inst.ReplaceUsesWith(arg_inst->Arg(0));
131 }
132}
133
118void ConstantPropagation(IR::Inst& inst) { 134void ConstantPropagation(IR::Inst& inst) {
119 switch (inst.Opcode()) { 135 switch (inst.Opcode()) {
120 case IR::Opcode::GetRegister: 136 case IR::Opcode::GetRegister:
@@ -123,6 +139,10 @@ void ConstantPropagation(IR::Inst& inst) {
123 return FoldGetPred(inst); 139 return FoldGetPred(inst);
124 case IR::Opcode::IAdd32: 140 case IR::Opcode::IAdd32:
125 return FoldAdd<u32>(inst); 141 return FoldAdd<u32>(inst);
142 case IR::Opcode::BitCastF32U32:
143 return FoldBitCast<f32, u32>(inst, IR::Opcode::BitCastU32F32);
144 case IR::Opcode::BitCastU32F32:
145 return FoldBitCast<u32, f32>(inst, IR::Opcode::BitCastF32U32);
126 case IR::Opcode::IAdd64: 146 case IR::Opcode::IAdd64:
127 return FoldAdd<u64>(inst); 147 return FoldAdd<u64>(inst);
128 case IR::Opcode::BitFieldUExtract: 148 case IR::Opcode::BitFieldUExtract:
diff --git a/src/shader_recompiler/ir_opt/global_memory_to_storage_buffer_pass.cpp b/src/shader_recompiler/ir_opt/global_memory_to_storage_buffer_pass.cpp
index ee69a5c9d..34393e1d5 100644
--- a/src/shader_recompiler/ir_opt/global_memory_to_storage_buffer_pass.cpp
+++ b/src/shader_recompiler/ir_opt/global_memory_to_storage_buffer_pass.cpp
@@ -108,8 +108,8 @@ bool MeetsBias(const StorageBufferAddr& storage_buffer, const Bias& bias) noexce
108 storage_buffer.offset < bias.offset_end; 108 storage_buffer.offset < bias.offset_end;
109} 109}
110 110
111/// Ignores a global memory operation, reads return zero and writes are ignored 111/// Discards a global memory operation, reads return zero and writes are ignored
112void IgnoreGlobalMemory(IR::Block& block, IR::Block::iterator inst) { 112void DiscardGlobalMemory(IR::Block& block, IR::Block::iterator inst) {
113 const IR::Value zero{u32{0}}; 113 const IR::Value zero{u32{0}};
114 switch (inst->Opcode()) { 114 switch (inst->Opcode()) {
115 case IR::Opcode::LoadGlobalS8: 115 case IR::Opcode::LoadGlobalS8:
@@ -120,12 +120,12 @@ void IgnoreGlobalMemory(IR::Block& block, IR::Block::iterator inst) {
120 inst->ReplaceUsesWith(zero); 120 inst->ReplaceUsesWith(zero);
121 break; 121 break;
122 case IR::Opcode::LoadGlobal64: 122 case IR::Opcode::LoadGlobal64:
123 inst->ReplaceUsesWith( 123 inst->ReplaceUsesWith(IR::Value{
124 IR::Value{&*block.PrependNewInst(inst, IR::Opcode::CompositeConstruct2, {zero, zero})}); 124 &*block.PrependNewInst(inst, IR::Opcode::CompositeConstructU32x2, {zero, zero})});
125 break; 125 break;
126 case IR::Opcode::LoadGlobal128: 126 case IR::Opcode::LoadGlobal128:
127 inst->ReplaceUsesWith(IR::Value{&*block.PrependNewInst( 127 inst->ReplaceUsesWith(IR::Value{&*block.PrependNewInst(
128 inst, IR::Opcode::CompositeConstruct4, {zero, zero, zero, zero})}); 128 inst, IR::Opcode::CompositeConstructU32x4, {zero, zero, zero, zero})});
129 break; 129 break;
130 case IR::Opcode::WriteGlobalS8: 130 case IR::Opcode::WriteGlobalS8:
131 case IR::Opcode::WriteGlobalU8: 131 case IR::Opcode::WriteGlobalU8:
@@ -137,7 +137,8 @@ void IgnoreGlobalMemory(IR::Block& block, IR::Block::iterator inst) {
137 inst->Invalidate(); 137 inst->Invalidate();
138 break; 138 break;
139 default: 139 default:
140 throw LogicError("Invalid opcode to ignore its global memory operation {}", inst->Opcode()); 140 throw LogicError("Invalid opcode to discard its global memory operation {}",
141 inst->Opcode());
141 } 142 }
142} 143}
143 144
@@ -196,7 +197,7 @@ void CollectStorageBuffers(IR::Block& block, IR::Block::iterator inst,
196 storage_buffer = Track(addr, nullptr); 197 storage_buffer = Track(addr, nullptr);
197 if (!storage_buffer) { 198 if (!storage_buffer) {
198 // If that also failed, drop the global memory usage 199 // If that also failed, drop the global memory usage
199 IgnoreGlobalMemory(block, inst); 200 DiscardGlobalMemory(block, inst);
200 } 201 }
201 } 202 }
202 // Collect storage buffer and the instruction 203 // Collect storage buffer and the instruction
@@ -242,12 +243,12 @@ std::optional<IR::U32> TrackLowAddress(IR::IREmitter& ir, IR::Inst* inst) {
242 if (vector.IsImmediate()) { 243 if (vector.IsImmediate()) {
243 return std::nullopt; 244 return std::nullopt;
244 } 245 }
245 // This vector is expected to be a CompositeConstruct2 246 // This vector is expected to be a CompositeConstructU32x2
246 IR::Inst* const vector_inst{vector.InstRecursive()}; 247 IR::Inst* const vector_inst{vector.InstRecursive()};
247 if (vector_inst->Opcode() != IR::Opcode::CompositeConstruct2) { 248 if (vector_inst->Opcode() != IR::Opcode::CompositeConstructU32x2) {
248 return std::nullopt; 249 return std::nullopt;
249 } 250 }
250 // Grab the first argument from the CompositeConstruct2, this is the low address. 251 // Grab the first argument from the CompositeConstructU32x2, this is the low address.
251 // Re-apply the offset in case we found one. 252 // Re-apply the offset in case we found one.
252 const IR::U32 low_addr{vector_inst->Arg(0)}; 253 const IR::U32 low_addr{vector_inst->Arg(0)};
253 return imm_offset != 0 ? IR::U32{ir.IAdd(low_addr, ir.Imm32(imm_offset))} : low_addr; 254 return imm_offset != 0 ? IR::U32{ir.IAdd(low_addr, ir.Imm32(imm_offset))} : low_addr;