summaryrefslogtreecommitdiff
path: root/src/shader_recompiler/backend
diff options
context:
space:
mode:
authorGravatar ameerj2021-05-25 01:35:30 -0400
committerGravatar ameerj2021-07-22 21:51:36 -0400
commit11ba190462c7b69a47598b2d1572fac3bccc4adc (patch)
treee24682860686eba5710716579a583312c1db2652 /src/shader_recompiler/backend
parentglsl: implement phi nodes (diff)
downloadyuzu-11ba190462c7b69a47598b2d1572fac3bccc4adc.tar.gz
yuzu-11ba190462c7b69a47598b2d1572fac3bccc4adc.tar.xz
yuzu-11ba190462c7b69a47598b2d1572fac3bccc4adc.zip
glsl: Revert ssbo aliasing. Storage Atomics impl
Diffstat (limited to 'src/shader_recompiler/backend')
-rw-r--r--src/shader_recompiler/backend/glsl/emit_context.cpp42
-rw-r--r--src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp141
-rw-r--r--src/shader_recompiler/backend/glsl/emit_glsl_instructions.h3
-rw-r--r--src/shader_recompiler/backend/glsl/emit_glsl_memory.cpp14
4 files changed, 132 insertions, 68 deletions
diff --git a/src/shader_recompiler/backend/glsl/emit_context.cpp b/src/shader_recompiler/backend/glsl/emit_context.cpp
index 3c610a08a..7986bf78f 100644
--- a/src/shader_recompiler/backend/glsl/emit_context.cpp
+++ b/src/shader_recompiler/backend/glsl/emit_context.cpp
@@ -59,27 +59,8 @@ void EmitContext::DefineStorageBuffers() {
59 } 59 }
60 u32 binding{}; 60 u32 binding{};
61 for (const auto& desc : info.storage_buffers_descriptors) { 61 for (const auto& desc : info.storage_buffers_descriptors) {
62 if (info.uses_s32_atomics) { 62 Add("layout(std430,binding={}) buffer ssbo_{}{{uint ssbo{}[];}};", binding, binding,
63 Add("layout(std430,binding={}) buffer ssbo_{}_s32{{int ssbo{}_s32[];}};", binding, 63 desc.cbuf_index, desc.count);
64 binding, desc.cbuf_index, desc.count);
65 }
66 if (True(info.used_storage_buffer_types & IR::Type::U32)) {
67 Add("layout(std430,binding={}) buffer ssbo_{}_u32{{uint ssbo{}_u32[];}};", binding,
68 binding, desc.cbuf_index, desc.count);
69 }
70 if (True(info.used_storage_buffer_types & IR::Type::F32)) {
71 Add("layout(std430,binding={}) buffer ssbo_{}_f32{{float ssbo{}_f32[];}};", binding,
72 binding, desc.cbuf_index, desc.count);
73 }
74 if (True(info.used_storage_buffer_types & IR::Type::U32x2)) {
75 Add("layout(std430,binding={}) buffer ssbo_{}_u32x2{{uvec2 ssbo{}_u32x2[];}};", binding,
76 binding, desc.cbuf_index, desc.count);
77 }
78 if (True(info.used_storage_buffer_types & IR::Type::U64) ||
79 True(info.used_storage_buffer_types & IR::Type::F64)) {
80 Add("layout(std430,binding={}) buffer ssbo_{}_u64{{uint64_t ssbo{}_u64[];}};", binding,
81 binding, desc.cbuf_index, desc.count);
82 }
83 ++binding; 64 ++binding;
84 } 65 }
85} 66}
@@ -92,6 +73,25 @@ void EmitContext::DefineHelperFunctions() {
92 code += 73 code +=
93 "uint CasDecrement(uint op_a,uint op_b){return(op_a==0||op_a>op_b)?op_b:(op_a-1u);}\n"; 74 "uint CasDecrement(uint op_a,uint op_b){return(op_a==0||op_a>op_b)?op_b:(op_a-1u);}\n";
94 } 75 }
76 if (info.uses_atomic_f32_add) {
77 code += "uint CasFloatAdd(uint op_a,uint op_b){return "
78 "floatBitsToUint(uintBitsToFloat(op_a)+uintBitsToFloat(op_b));}\n";
79 }
80 if (info.uses_atomic_f32x2_add) {
81 code += "uint CasFloatAdd32x2(uint op_a,uint op_b){return "
82 "packHalf2x16(unpackHalf2x16(op_a)+unpackHalf2x16(op_b));}\n";
83 }
84 if (info.uses_atomic_f32x2_min) {
85 code += "uint CasFloatMin32x2(uint op_a,uint op_b){return "
86 "packHalf2x16(min(unpackHalf2x16(op_a),unpackHalf2x16(op_b)));}\n";
87 }
88 if (info.uses_atomic_f32x2_max) {
89 code += "uint CasFloatMax32x2(uint op_a,uint op_b){return "
90 "packHalf2x16(max(unpackHalf2x16(op_a),unpackHalf2x16(op_b)));}\n";
91 }
92 // TODO: Track this usage
93 code += "uint CasMinS32(uint op_a,uint op_b){return uint(min(int(op_a),int(op_b)));}";
94 code += "uint CasMaxS32(uint op_a,uint op_b){return uint(max(int(op_a),int(op_b)));}";
95} 95}
96 96
97} // namespace Shader::Backend::GLSL 97} // namespace Shader::Backend::GLSL
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
index f3ef37873..0b29c213b 100644
--- a/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
+++ b/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
@@ -13,132 +13,193 @@
13namespace Shader::Backend::GLSL { 13namespace Shader::Backend::GLSL {
14namespace { 14namespace {
15static constexpr std::string_view cas_loop{R"( 15static constexpr std::string_view cas_loop{R"(
16{} {}; 16uint {};
17for (;;){{ 17for (;;){{
18 {} old_value={}; 18 uint old_value={};
19 {} = atomicCompSwap({},old_value,{}({},{})); 19 {}=atomicCompSwap({},old_value,{}({},{}));
20 if ({}==old_value){{break;}} 20 if ({}==old_value){{break;}}
21}})"}; 21}})"};
22 22
23void CasFunction(EmitContext& ctx, IR::Inst& inst, std::string_view ssbo, std::string_view value, 23void CasFunction(EmitContext& ctx, std::string_view ret, std::string_view ssbo,
24 std::string_view type, std::string_view function) { 24 std::string_view value, std::string_view function) {
25 ctx.Add(cas_loop.data(), ret, ssbo, ret, ssbo, function, ssbo, value, ret);
26}
27
28void CasFunctionInt32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
29 const IR::Value& offset, std::string_view value, std::string_view function) {
30 const auto ret{ctx.reg_alloc.Define(inst)};
31 const std::string ssbo{fmt::format("ssbo{}[{}]", binding.U32(), offset.U32())};
32 CasFunction(ctx, ret, ssbo, value, function);
33}
34
35void CasFunctionF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
36 const IR::Value& offset, std::string_view value, std::string_view function) {
37 const std::string ssbo{fmt::format("ssbo{}[{}]", binding.U32(), offset.U32())};
38 const std::string u32_value{fmt::format("floatBitsToUint({})", value)};
39 const auto ret{ctx.reg_alloc.Define(inst)};
40 const auto ret_32{ret + "_u32"};
41 CasFunction(ctx, ret_32, ssbo, u32_value, function);
42 ctx.Add("float {}=uintBitsToFloat({});", ret, ret_32);
43}
44
45void CasFunctionF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
46 const IR::Value& offset, std::string_view value, std::string_view function) {
47 const std::string ssbo{fmt::format("ssbo{}[{}]", binding.U32(), offset.U32())};
48 const std::string u32_value{fmt::format("packHalf2x16({})", value)};
25 const auto ret{ctx.reg_alloc.Define(inst)}; 49 const auto ret{ctx.reg_alloc.Define(inst)};
26 ctx.Add(cas_loop.data(), type, ret, type, ssbo, ret, ssbo, function, ssbo, value, ret); 50 CasFunction(ctx, ret, ssbo, u32_value, function);
27} 51}
28} // namespace 52} // namespace
29 53
30void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 54void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
31 const IR::Value& offset, std::string_view value) { 55 const IR::Value& offset, std::string_view value) {
32 ctx.AddU32("{}=atomicAdd(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); 56 ctx.AddU32("{}=atomicAdd(ssbo{}[{}],{});", inst, binding.U32(), offset.U32(), value);
33} 57}
34 58
35void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 59void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
36 const IR::Value& offset, std::string_view value) { 60 const IR::Value& offset, std::string_view value) {
37 ctx.AddS32("{}=atomicMin(ssbo{}_s32[{}],int({}));", inst, binding.U32(), offset.U32(), value); 61 const std::string u32_value{fmt::format("uint({})", value)};
62 CasFunctionInt32(ctx, inst, binding, offset, u32_value, "CasMinS32");
38} 63}
39 64
40void EmitStorageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 65void EmitStorageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
41 const IR::Value& offset, std::string_view value) { 66 const IR::Value& offset, std::string_view value) {
42 ctx.AddU32("{}=atomicMin(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); 67 ctx.AddU32("{}=atomicMin(ssbo{}[{}],{});", inst, binding.U32(), offset.U32(), value);
43} 68}
44 69
45void EmitStorageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 70void EmitStorageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
46 const IR::Value& offset, std::string_view value) { 71 const IR::Value& offset, std::string_view value) {
47 ctx.AddS32("{}=atomicMax(ssbo{}_s32[{}],int({}));", inst, binding.U32(), offset.U32(), value); 72 const std::string u32_value{fmt::format("uint({})", value)};
73 CasFunctionInt32(ctx, inst, binding, offset, u32_value, "CasMaxS32");
48} 74}
49 75
50void EmitStorageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 76void EmitStorageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
51 const IR::Value& offset, std::string_view value) { 77 const IR::Value& offset, std::string_view value) {
52 ctx.AddU32("{}=atomicMax(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); 78 ctx.AddU32("{}=atomicMax(ssbo{}[{}],{});", inst, binding.U32(), offset.U32(), value);
53} 79}
54 80
55void EmitStorageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 81void EmitStorageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
56 [[maybe_unused]] const IR::Value& offset, std::string_view value) { 82 const IR::Value& offset, std::string_view value) {
57 // const auto ret{ctx.reg_alloc.Define(inst)}; 83 CasFunctionInt32(ctx, inst, binding, offset, value, "CasIncrement");
58 // const auto type{"uint"};
59 // ctx.Add(cas_loop.data(), type, ret, type, ssbo, ret, ssbo, "CasIncrement", ssbo, value, ret);
60 const std::string ssbo{fmt::format("ssbo{}_u32[{}]", binding.U32(), offset.U32())};
61 CasFunction(ctx, inst, ssbo, value, "uint", "CasIncrement");
62} 84}
63 85
64void EmitStorageAtomicDec32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 86void EmitStorageAtomicDec32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
65 const IR::Value& offset, std::string_view value) { 87 const IR::Value& offset, std::string_view value) {
66 const std::string ssbo{fmt::format("ssbo{}_u32[{}]", binding.U32(), offset.U32())}; 88 CasFunctionInt32(ctx, inst, binding, offset, value, "CasDecrement");
67 CasFunction(ctx, inst, ssbo, value, "uint", "CasDecrement");
68} 89}
69 90
70void EmitStorageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 91void EmitStorageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
71 const IR::Value& offset, std::string_view value) { 92 const IR::Value& offset, std::string_view value) {
72 ctx.AddU32("{}=atomicAnd(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); 93 ctx.AddU32("{}=atomicAnd(ssbo{}[{}],{});", inst, binding.U32(), offset.U32(), value);
73} 94}
74 95
75void EmitStorageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 96void EmitStorageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
76 const IR::Value& offset, std::string_view value) { 97 const IR::Value& offset, std::string_view value) {
77 ctx.AddU32("{}=atomicOr(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); 98 ctx.AddU32("{}=atomicOr(ssbo{}[{}],{});", inst, binding.U32(), offset.U32(), value);
78} 99}
79 100
80void EmitStorageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 101void EmitStorageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
81 const IR::Value& offset, std::string_view value) { 102 const IR::Value& offset, std::string_view value) {
82 ctx.AddU32("{}=atomicXor(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); 103 ctx.AddU32("{}=atomicXor(ssbo{}[{}],{});", inst, binding.U32(), offset.U32(), value);
83} 104}
84 105
85void EmitStorageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 106void EmitStorageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
86 const IR::Value& offset, std::string_view value) { 107 const IR::Value& offset, std::string_view value) {
87 ctx.AddU32("{}=atomicExchange(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); 108 ctx.AddU32("{}=atomicExchange(ssbo{}[{}],{});", inst, binding.U32(), offset.U32(), value);
88} 109}
89 110
90void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 111void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
91 const IR::Value& offset, std::string_view value) { 112 const IR::Value& offset, std::string_view value) {
92 // ctx.AddU64("{}=atomicAdd(ssbo{}_u64[{}],{});", inst, binding.U32(), offset.U32(), value); 113 // LOG_WARNING(..., "Op falling to non-atomic");
93 ctx.AddU64("{}=ssbo{}_u64[{}];", inst, binding.U32(), offset.U32()); 114 ctx.AddU64("{}=uint64_t(uvec2(ssbo{}[{}],ssbo{}[{}]));", inst, binding.U32(), offset.U32(),
94 ctx.Add("ssbo{}_u64[{}]+={};", binding.U32(), offset.U32(), value); 115 binding.U32(), offset.U32() + 1);
116 ctx.Add("ssbo{}[{}]+=unpackUint2x32({}).x;ssbo{}[{}]+=unpackUint2x32({}).y;", binding.U32(),
117 offset.U32(), value, binding.U32(), offset.U32() + 1, value);
95} 118}
96 119
97void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 120void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
98 const IR::Value& offset, std::string_view value) { 121 const IR::Value& offset, std::string_view value) {
99 ctx.AddS64("{}=atomicMin(int64_t(ssbo{}_u64[{}]),int64_t({}));", inst, binding.U32(), 122 // LOG_WARNING(..., "Op falling to non-atomic");
100 offset.U32(), value); 123 ctx.AddS64("{}=int64_t(ivec2(ssbo{}[{}],ssbo{}[{}]));", inst, binding.U32(), offset.U32(),
124 binding.U32(), offset.U32() + 1);
125 ctx.Add(R"(
126for(int i=0;i<2;++i){{
127ssbo{}[{}+i]=uint(min(int(ssbo{}[{}+i]),unpackInt2x32(int64_t({}))[i]));
128}}
129)",
130 binding.U32(), offset.U32(), binding.U32(), offset.U32(), value);
101} 131}
102 132
103void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 133void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
104 const IR::Value& offset, std::string_view value) { 134 const IR::Value& offset, std::string_view value) {
105 ctx.AddU64("{}=atomicMin(ssbo{}_u64[{}],{});", inst, binding.U32(), offset.U32(), value); 135 // LOG_WARNING(..., "Op falling to non-atomic");
136 ctx.AddU64("{}=uint64_t(uvec2(ssbo{}[{}],ssbo{}[{}]));", inst, binding.U32(), offset.U32(),
137 binding.U32(), offset.U32() + 1);
138 ctx.Add(R"(
139for(int i=0;i<2;++i){{
140ssbo{}[{}+i]=min(ssbo{}[{}+i],unpackUint2x32(uint64_t({}))[i]);
141}}
142)",
143 binding.U32(), offset.U32(), binding.U32(), offset.U32(), value);
106} 144}
107 145
108void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 146void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
109 const IR::Value& offset, std::string_view value) { 147 const IR::Value& offset, std::string_view value) {
110 ctx.AddS64("{}=atomicMax(int64_t(ssbo{}_u64[{}]),int64_t({}));", inst, binding.U32(), 148 // LOG_WARNING(..., "Op falling to non-atomic");
111 offset.U32(), value); 149 ctx.AddS64("{}=int64_t(ivec2(ssbo{}[{}],ssbo{}[{}]));", inst, binding.U32(), offset.U32(),
150 binding.U32(), offset.U32() + 1);
151 ctx.Add(R"(
152for(int i=0;i<2;++i){{
153ssbo{}[{}+i]=uint(max(int(ssbo{}[{}+i]),unpackInt2x32(int64_t({}))[i]));
154}}
155)",
156 binding.U32(), offset.U32(), binding.U32(), offset.U32(), value);
112} 157}
113 158
114void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 159void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
115 const IR::Value& offset, std::string_view value) { 160 const IR::Value& offset, std::string_view value) {
116 ctx.AddU64("{}=atomicMax(ssbo{}_u64[{}],{});", inst, binding.U32(), offset.U32(), value); 161 // LOG_WARNING(..., "Op falling to non-atomic");
162 ctx.AddU64("{}=uint64_t(uvec2(ssbo{}[{}],ssbo{}[{}]));", inst, binding.U32(), offset.U32(),
163 binding.U32(), offset.U32() + 1);
164 ctx.Add(R"(
165for(int i=0;i<2;++i){{
166ssbo{}[{}+i]=max(ssbo{}[{}+i],unpackUint2x32(uint64_t({}))[i]);
167}}
168)",
169 binding.U32(), offset.U32(), binding.U32(), offset.U32(), value);
117} 170}
118 171
119void EmitStorageAtomicAnd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 172void EmitStorageAtomicAnd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
120 const IR::Value& offset, std::string_view value) { 173 const IR::Value& offset, std::string_view value) {
121 ctx.AddU64("{}=atomicAnd(ssbo{}_u64[{}],{});", inst, binding.U32(), offset.U32(), value); 174 ctx.AddU64("{}=uint64_t(uvec2(atomicAnd(ssbo{}[{}],unpackUint2x32({}).x),atomicAnd(ssbo{}[{}],"
175 "unpackUint2x32({}).y)));",
176 inst, binding.U32(), offset.U32(), value, binding.U32(), offset.U32() + 1, value);
122} 177}
123 178
124void EmitStorageAtomicOr64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 179void EmitStorageAtomicOr64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
125 const IR::Value& offset, std::string_view value) { 180 const IR::Value& offset, std::string_view value) {
126 ctx.AddU64("{}=atomicOr(ssbo{}_u64[{}],{});", inst, binding.U32(), offset.U32(), value); 181 ctx.AddU64("{}=uint64_t(uvec2(atomicOr(ssbo{}[{}],unpackUint2x32({}).x),atomicOr(ssbo{}[{}],"
182 "unpackUint2x32({}).y)));",
183 inst, binding.U32(), offset.U32(), value, binding.U32(), offset.U32() + 1, value);
127} 184}
128 185
129void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 186void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
130 const IR::Value& offset, std::string_view value) { 187 const IR::Value& offset, std::string_view value) {
131 ctx.AddU64("{}=atomicXor(ssbo{}_u64[{}],{});", inst, binding.U32(), offset.U32(), value); 188 ctx.AddU64("{}=uint64_t(uvec2(atomicXor(ssbo{}[{}],unpackUint2x32({}).x),atomicXor(ssbo{}[{}],"
189 "unpackUint2x32({}).y)));",
190 inst, binding.U32(), offset.U32(), value, binding.U32(), offset.U32() + 1, value);
132} 191}
133 192
134void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 193void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
135 const IR::Value& offset, std::string_view value) { 194 const IR::Value& offset, std::string_view value) {
136 ctx.AddU64("{}=atomicExchange(ssbo{}_u64[{}],{});", inst, binding.U32(), offset.U32(), value); 195 ctx.AddU64("{}=uint64_t(uvec2(atomicExchange(ssbo{}[{}],unpackUint2x32({}).x),atomicExchange("
196 "ssbo{}[{}],unpackUint2x32({}).y)));",
197 inst, binding.U32(), offset.U32(), value, binding.U32(), offset.U32() + 1, value);
137} 198}
138 199
139void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 200void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
140 const IR::Value& offset, std::string_view value) { 201 const IR::Value& offset, std::string_view value) {
141 ctx.AddF32("{}=atomicAdd(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); 202 CasFunctionF32(ctx, inst, binding, offset, value, "CasFloatAdd");
142} 203}
143 204
144void EmitStorageAtomicAddF16x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, 205void EmitStorageAtomicAddF16x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
@@ -152,7 +213,7 @@ void EmitStorageAtomicAddF32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused
152 [[maybe_unused]] const IR::Value& binding, 213 [[maybe_unused]] const IR::Value& binding,
153 [[maybe_unused]] const IR::Value& offset, 214 [[maybe_unused]] const IR::Value& offset,
154 [[maybe_unused]] std::string_view value) { 215 [[maybe_unused]] std::string_view value) {
155 throw NotImplementedException("GLSL Instrucion"); 216 CasFunctionF32x2(ctx, inst, binding, offset, value, "CasFloatAdd32x2");
156} 217}
157 218
158void EmitStorageAtomicMinF16x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, 219void EmitStorageAtomicMinF16x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
@@ -166,7 +227,7 @@ void EmitStorageAtomicMinF32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused
166 [[maybe_unused]] const IR::Value& binding, 227 [[maybe_unused]] const IR::Value& binding,
167 [[maybe_unused]] const IR::Value& offset, 228 [[maybe_unused]] const IR::Value& offset,
168 [[maybe_unused]] std::string_view value) { 229 [[maybe_unused]] std::string_view value) {
169 throw NotImplementedException("GLSL Instrucion"); 230 CasFunctionF32x2(ctx, inst, binding, offset, value, "CasFloatMin32x2");
170} 231}
171 232
172void EmitStorageAtomicMaxF16x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, 233void EmitStorageAtomicMaxF16x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
@@ -180,7 +241,7 @@ void EmitStorageAtomicMaxF32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused
180 [[maybe_unused]] const IR::Value& binding, 241 [[maybe_unused]] const IR::Value& binding,
181 [[maybe_unused]] const IR::Value& offset, 242 [[maybe_unused]] const IR::Value& offset,
182 [[maybe_unused]] std::string_view value) { 243 [[maybe_unused]] std::string_view value) {
183 throw NotImplementedException("GLSL Instrucion"); 244 CasFunctionF32x2(ctx, inst, binding, offset, value, "CasFloatMax32x2");
184} 245}
185 246
186void EmitGlobalAtomicIAdd32(EmitContext&) { 247void EmitGlobalAtomicIAdd32(EmitContext&) {
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h b/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h
index c2836898f..56b812d84 100644
--- a/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h
+++ b/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h
@@ -113,7 +113,8 @@ void EmitLoadStorageU16(EmitContext& ctx, const IR::Value& binding, const IR::Va
113void EmitLoadStorageS16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); 113void EmitLoadStorageS16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset);
114void EmitLoadStorage32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 114void EmitLoadStorage32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
115 const IR::Value& offset); 115 const IR::Value& offset);
116void EmitLoadStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); 116void EmitLoadStorage64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
117 const IR::Value& offset);
117void EmitLoadStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); 118void EmitLoadStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset);
118void EmitWriteStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, 119void EmitWriteStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
119 std::string_view value); 120 std::string_view value);
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_memory.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_memory.cpp
index d1e6f074d..8994c02a2 100644
--- a/src/shader_recompiler/backend/glsl/emit_glsl_memory.cpp
+++ b/src/shader_recompiler/backend/glsl/emit_glsl_memory.cpp
@@ -34,12 +34,13 @@ void EmitLoadStorageS16([[maybe_unused]] EmitContext& ctx,
34 34
35void EmitLoadStorage32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 35void EmitLoadStorage32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
36 const IR::Value& offset) { 36 const IR::Value& offset) {
37 ctx.AddU32("{}=ssbo{}_u32[{}];", inst, binding.U32(), offset.U32()); 37 ctx.AddU32("{}=ssbo{}[{}];", inst, binding.U32(), offset.U32());
38} 38}
39 39
40void EmitLoadStorage64([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] const IR::Value& binding, 40void EmitLoadStorage64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
41 [[maybe_unused]] const IR::Value& offset) { 41 const IR::Value& offset) {
42 throw NotImplementedException("GLSL Instrucion"); 42 ctx.AddU32x2("{}=uvec2(ssbo{}[{}],ssbo{}[{}]);", inst, binding.U32(), offset.U32(),
43 binding.U32(), offset.U32() + 1);
43} 44}
44 45
45void EmitLoadStorage128([[maybe_unused]] EmitContext& ctx, 46void EmitLoadStorage128([[maybe_unused]] EmitContext& ctx,
@@ -78,12 +79,13 @@ void EmitWriteStorageS16([[maybe_unused]] EmitContext& ctx,
78 79
79void EmitWriteStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, 80void EmitWriteStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
80 std::string_view value) { 81 std::string_view value) {
81 ctx.Add("ssbo{}_u32[{}]={};", binding.U32(), offset.U32(), value); 82 ctx.Add("ssbo{}[{}]={};", binding.U32(), offset.U32(), value);
82} 83}
83 84
84void EmitWriteStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, 85void EmitWriteStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
85 std::string_view value) { 86 std::string_view value) {
86 ctx.Add("ssbo{}_u32x2[{}]={};", binding.U32(), offset.U32(), value); 87 ctx.Add("ssbo{}[{}]={}.x;", binding.U32(), offset.U32(), value);
88 ctx.Add("ssbo{}[{}]={}.y;", binding.U32(), offset.U32() + 1, value);
87} 89}
88 90
89void EmitWriteStorage128([[maybe_unused]] EmitContext& ctx, 91void EmitWriteStorage128([[maybe_unused]] EmitContext& ctx,