summaryrefslogtreecommitdiff
path: root/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/shader_recompiler/ir_opt/constant_propagation_pass.cpp')
-rw-r--r--src/shader_recompiler/ir_opt/constant_propagation_pass.cpp26
1 files changed, 12 insertions, 14 deletions
diff --git a/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp b/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp
index 8dd6d6c2c..08a06da02 100644
--- a/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp
+++ b/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp
@@ -116,33 +116,31 @@ bool FoldXmadMultiply(IR::Block& block, IR::Inst& inst) {
116 * 116 *
117 * This optimization has been proven safe by LLVM and MSVC. 117 * This optimization has been proven safe by LLVM and MSVC.
118 */ 118 */
119 const IR::Value lhs_arg{inst.Arg(0)}; 119 IR::Inst* const lhs_shl{inst.Arg(0).TryInstRecursive()};
120 const IR::Value rhs_arg{inst.Arg(1)}; 120 IR::Inst* const rhs_mul{inst.Arg(1).TryInstRecursive()};
121 if (lhs_arg.IsImmediate() || rhs_arg.IsImmediate()) { 121 if (!lhs_shl || !rhs_mul) {
122 return false; 122 return false;
123 } 123 }
124 IR::Inst* const lhs_shl{lhs_arg.InstRecursive()};
125 if (lhs_shl->GetOpcode() != IR::Opcode::ShiftLeftLogical32 || 124 if (lhs_shl->GetOpcode() != IR::Opcode::ShiftLeftLogical32 ||
126 lhs_shl->Arg(1) != IR::Value{16U}) { 125 lhs_shl->Arg(1) != IR::Value{16U}) {
127 return false; 126 return false;
128 } 127 }
129 if (lhs_shl->Arg(0).IsImmediate()) { 128 IR::Inst* const lhs_mul{lhs_shl->Arg(0).TryInstRecursive()};
129 if (!lhs_mul) {
130 return false; 130 return false;
131 } 131 }
132 IR::Inst* const lhs_mul{lhs_shl->Arg(0).InstRecursive()};
133 IR::Inst* const rhs_mul{rhs_arg.InstRecursive()};
134 if (lhs_mul->GetOpcode() != IR::Opcode::IMul32 || rhs_mul->GetOpcode() != IR::Opcode::IMul32) { 132 if (lhs_mul->GetOpcode() != IR::Opcode::IMul32 || rhs_mul->GetOpcode() != IR::Opcode::IMul32) {
135 return false; 133 return false;
136 } 134 }
137 if (lhs_mul->Arg(1).Resolve() != rhs_mul->Arg(1).Resolve()) { 135 const IR::U32 factor_b{lhs_mul->Arg(1)};
136 if (factor_b.Resolve() != rhs_mul->Arg(1).Resolve()) {
138 return false; 137 return false;
139 } 138 }
140 const IR::U32 factor_b{lhs_mul->Arg(1)}; 139 IR::Inst* const lhs_bfe{lhs_mul->Arg(0).TryInstRecursive()};
141 if (lhs_mul->Arg(0).IsImmediate() || rhs_mul->Arg(0).IsImmediate()) { 140 IR::Inst* const rhs_bfe{rhs_mul->Arg(0).TryInstRecursive()};
141 if (!lhs_bfe || !rhs_bfe) {
142 return false; 142 return false;
143 } 143 }
144 IR::Inst* const lhs_bfe{lhs_mul->Arg(0).InstRecursive()};
145 IR::Inst* const rhs_bfe{rhs_mul->Arg(0).InstRecursive()};
146 if (lhs_bfe->GetOpcode() != IR::Opcode::BitFieldUExtract) { 144 if (lhs_bfe->GetOpcode() != IR::Opcode::BitFieldUExtract) {
147 return false; 145 return false;
148 } 146 }
@@ -155,10 +153,10 @@ bool FoldXmadMultiply(IR::Block& block, IR::Inst& inst) {
155 if (rhs_bfe->Arg(1) != IR::Value{0U} || rhs_bfe->Arg(2) != IR::Value{16U}) { 153 if (rhs_bfe->Arg(1) != IR::Value{0U} || rhs_bfe->Arg(2) != IR::Value{16U}) {
156 return false; 154 return false;
157 } 155 }
158 if (lhs_bfe->Arg(0).Resolve() != rhs_bfe->Arg(0).Resolve()) { 156 const IR::U32 factor_a{lhs_bfe->Arg(0)};
157 if (factor_a.Resolve() != rhs_bfe->Arg(0).Resolve()) {
159 return false; 158 return false;
160 } 159 }
161 const IR::U32 factor_a{lhs_bfe->Arg(0)};
162 IR::IREmitter ir{block, IR::Block::InstructionList::s_iterator_to(inst)}; 160 IR::IREmitter ir{block, IR::Block::InstructionList::s_iterator_to(inst)};
163 inst.ReplaceUsesWith(ir.IMul(factor_a, factor_b)); 161 inst.ReplaceUsesWith(ir.IMul(factor_a, factor_b));
164 return true; 162 return true;