summaryrefslogtreecommitdiff
path: root/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp
diff options
context:
space:
mode:
authorGravatar ReinUsesLisp2021-04-12 19:41:53 -0300
committerGravatar ameerj2021-07-22 21:51:27 -0400
commit2597cee85b74be40bfecf0dc9cda90263d6cce40 (patch)
treefb128718fc6da4aa94aff121be577dd747115cb4 /src/shader_recompiler/ir_opt/constant_propagation_pass.cpp
parentshader: Implement geometry shaders (diff)
downloadyuzu-2597cee85b74be40bfecf0dc9cda90263d6cce40.tar.gz
yuzu-2597cee85b74be40bfecf0dc9cda90263d6cce40.tar.xz
yuzu-2597cee85b74be40bfecf0dc9cda90263d6cce40.zip
shader: Add constant propagation for *&^| binary operations
Diffstat (limited to 'src/shader_recompiler/ir_opt/constant_propagation_pass.cpp')
-rw-r--r--src/shader_recompiler/ir_opt/constant_propagation_pass.cpp12
1 files changed, 12 insertions, 0 deletions
diff --git a/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp b/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp
index 61fbbe04c..ee73b5b60 100644
--- a/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp
+++ b/src/shader_recompiler/ir_opt/constant_propagation_pass.cpp
@@ -422,6 +422,9 @@ void ConstantPropagation(IR::Block& block, IR::Inst& inst) {
422 return FoldAdd<u32>(block, inst); 422 return FoldAdd<u32>(block, inst);
423 case IR::Opcode::ISub32: 423 case IR::Opcode::ISub32:
424 return FoldISub32(inst); 424 return FoldISub32(inst);
425 case IR::Opcode::IMul32:
426 FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a * b; });
427 return;
425 case IR::Opcode::BitCastF32U32: 428 case IR::Opcode::BitCastF32U32:
426 return FoldBitCast<IR::Opcode::BitCastF32U32, f32, u32>(inst, IR::Opcode::BitCastU32F32); 429 return FoldBitCast<IR::Opcode::BitCastF32U32, f32, u32>(inst, IR::Opcode::BitCastU32F32);
427 case IR::Opcode::BitCastU32F32: 430 case IR::Opcode::BitCastU32F32:
@@ -479,6 +482,15 @@ void ConstantPropagation(IR::Block& block, IR::Inst& inst) {
479 case IR::Opcode::INotEqual: 482 case IR::Opcode::INotEqual:
480 FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a != b; }); 483 FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a != b; });
481 return; 484 return;
485 case IR::Opcode::BitwiseAnd32:
486 FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a & b; });
487 return;
488 case IR::Opcode::BitwiseOr32:
489 FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a | b; });
490 return;
491 case IR::Opcode::BitwiseXor32:
492 FoldWhenAllImmediates(inst, [](u32 a, u32 b) { return a ^ b; });
493 return;
482 case IR::Opcode::BitFieldUExtract: 494 case IR::Opcode::BitFieldUExtract:
483 FoldWhenAllImmediates(inst, [](u32 base, u32 shift, u32 count) { 495 FoldWhenAllImmediates(inst, [](u32 base, u32 shift, u32 count) {
484 if (static_cast<size_t>(shift) + static_cast<size_t>(count) > Common::BitSize<u32>()) { 496 if (static_cast<size_t>(shift) + static_cast<size_t>(count) > Common::BitSize<u32>()) {