diff options
| author | 2021-05-23 04:14:35 -0300 | |
|---|---|---|
| committer | 2021-07-22 21:51:29 -0400 | |
| commit | d2e811db2edd3829b344e96ad56ae979bccd28d2 (patch) | |
| tree | 14160c4fb0bd1dac9fb604f3a6b4767e3292a0e7 /src/shader_recompiler/backend/spirv/emit_spirv_image.cpp | |
| parent | spirv: Add int8 and int16 capabilities only when supported (diff) | |
| download | yuzu-d2e811db2edd3829b344e96ad56ae979bccd28d2.tar.gz yuzu-d2e811db2edd3829b344e96ad56ae979bccd28d2.tar.xz yuzu-d2e811db2edd3829b344e96ad56ae979bccd28d2.zip | |
spirv: Workaround image unsigned offset bug
Workaround bug on Nvidia's OpenGL SPIR-V compiler when using unsigned
texture offsets.
Diffstat (limited to 'src/shader_recompiler/backend/spirv/emit_spirv_image.cpp')
| -rw-r--r-- | src/shader_recompiler/backend/spirv/emit_spirv_image.cpp | 10 |
1 files changed, 5 insertions, 5 deletions
diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_image.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_image.cpp index 6008980af..a6cb67b97 100644 --- a/src/shader_recompiler/backend/spirv/emit_spirv_image.cpp +++ b/src/shader_recompiler/backend/spirv/emit_spirv_image.cpp | |||
| @@ -104,7 +104,7 @@ private: | |||
| 104 | return; | 104 | return; |
| 105 | } | 105 | } |
| 106 | if (offset.IsImmediate()) { | 106 | if (offset.IsImmediate()) { |
| 107 | Add(spv::ImageOperandsMask::ConstOffset, ctx.Const(offset.U32())); | 107 | Add(spv::ImageOperandsMask::ConstOffset, ctx.SConst(offset.U32())); |
| 108 | return; | 108 | return; |
| 109 | } | 109 | } |
| 110 | IR::Inst* const inst{offset.InstRecursive()}; | 110 | IR::Inst* const inst{offset.InstRecursive()}; |
| @@ -112,16 +112,16 @@ private: | |||
| 112 | switch (inst->GetOpcode()) { | 112 | switch (inst->GetOpcode()) { |
| 113 | case IR::Opcode::CompositeConstructU32x2: | 113 | case IR::Opcode::CompositeConstructU32x2: |
| 114 | Add(spv::ImageOperandsMask::ConstOffset, | 114 | Add(spv::ImageOperandsMask::ConstOffset, |
| 115 | ctx.Const(inst->Arg(0).U32(), inst->Arg(1).U32())); | 115 | ctx.SConst(inst->Arg(0).U32(), inst->Arg(1).U32())); |
| 116 | return; | 116 | return; |
| 117 | case IR::Opcode::CompositeConstructU32x3: | 117 | case IR::Opcode::CompositeConstructU32x3: |
| 118 | Add(spv::ImageOperandsMask::ConstOffset, | 118 | Add(spv::ImageOperandsMask::ConstOffset, |
| 119 | ctx.Const(inst->Arg(0).U32(), inst->Arg(1).U32(), inst->Arg(2).U32())); | 119 | ctx.SConst(inst->Arg(0).U32(), inst->Arg(1).U32(), inst->Arg(2).U32())); |
| 120 | return; | 120 | return; |
| 121 | case IR::Opcode::CompositeConstructU32x4: | 121 | case IR::Opcode::CompositeConstructU32x4: |
| 122 | Add(spv::ImageOperandsMask::ConstOffset, | 122 | Add(spv::ImageOperandsMask::ConstOffset, |
| 123 | ctx.Const(inst->Arg(0).U32(), inst->Arg(1).U32(), inst->Arg(2).U32(), | 123 | ctx.SConst(inst->Arg(0).U32(), inst->Arg(1).U32(), inst->Arg(2).U32(), |
| 124 | inst->Arg(3).U32())); | 124 | inst->Arg(3).U32())); |
| 125 | return; | 125 | return; |
| 126 | default: | 126 | default: |
| 127 | break; | 127 | break; |