summaryrefslogtreecommitdiff
path: root/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
diff options
context:
space:
mode:
authorGravatar ameerj2021-06-14 23:02:07 -0400
committerGravatar ameerj2021-07-22 21:51:38 -0400
commit6eea88d6149f7122777b325c7fc8549e2a974e64 (patch)
treeadc74d8545021665fee7e46f8d4b1b0e14b1c823 /src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
parentgl_shader_cache: Implement async shaders (diff)
downloadyuzu-6eea88d6149f7122777b325c7fc8549e2a974e64.tar.gz
yuzu-6eea88d6149f7122777b325c7fc8549e2a974e64.tar.xz
yuzu-6eea88d6149f7122777b325c7fc8549e2a974e64.zip
glsl: Cleanup/Address feedback
Diffstat (limited to 'src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp')
-rw-r--r--src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp12
1 files changed, 6 insertions, 6 deletions
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
index 9152ace98..772acc5a4 100644
--- a/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
+++ b/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
@@ -98,7 +98,7 @@ void EmitSharedAtomicExchange32(EmitContext& ctx, IR::Inst& inst, std::string_vi
98 98
99void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, 99void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
100 std::string_view value) { 100 std::string_view value) {
101 LOG_WARNING(Shader_GLSL, "Int64 Atomics not supported, fallback to non-atomic"); 101 LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic");
102 ctx.AddU64("{}=packUint2x32(uvec2(smem[{}>>2],smem[({}+4)>>2]));", inst, pointer_offset, 102 ctx.AddU64("{}=packUint2x32(uvec2(smem[{}>>2],smem[({}+4)>>2]));", inst, pointer_offset,
103 pointer_offset); 103 pointer_offset);
104 ctx.Add("smem[{}>>2]=unpackUint2x32({}).x;smem[({}+4)>>2]=unpackUint2x32({}).y;", 104 ctx.Add("smem[{}>>2]=unpackUint2x32({}).x;smem[({}+4)>>2]=unpackUint2x32({}).y;",
@@ -171,7 +171,7 @@ void EmitStorageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Val
171 171
172void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 172void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
173 const IR::Value& offset, std::string_view value) { 173 const IR::Value& offset, std::string_view value) {
174 LOG_WARNING(Shader_GLSL, "Int64 Atomics not supported, fallback to non-atomic"); 174 LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic");
175 ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst, 175 ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst,
176 ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, 176 ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
177 binding.U32(), ctx.var_alloc.Consume(offset)); 177 binding.U32(), ctx.var_alloc.Consume(offset));
@@ -182,7 +182,7 @@ void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value&
182 182
183void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 183void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
184 const IR::Value& offset, std::string_view value) { 184 const IR::Value& offset, std::string_view value) {
185 LOG_WARNING(Shader_GLSL, "Int64 Atomics not supported, fallback to non-atomic"); 185 LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic");
186 ctx.AddU64("{}=packInt2x32(ivec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst, 186 ctx.AddU64("{}=packInt2x32(ivec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst,
187 ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, 187 ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
188 binding.U32(), ctx.var_alloc.Consume(offset)); 188 binding.U32(), ctx.var_alloc.Consume(offset));
@@ -195,7 +195,7 @@ void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value&
195 195
196void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 196void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
197 const IR::Value& offset, std::string_view value) { 197 const IR::Value& offset, std::string_view value) {
198 LOG_WARNING(Shader_GLSL, "Int64 Atomics not supported, fallback to non-atomic"); 198 LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic");
199 ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst, 199 ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst,
200 ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, 200 ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
201 binding.U32(), ctx.var_alloc.Consume(offset)); 201 binding.U32(), ctx.var_alloc.Consume(offset));
@@ -207,7 +207,7 @@ void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value&
207 207
208void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 208void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
209 const IR::Value& offset, std::string_view value) { 209 const IR::Value& offset, std::string_view value) {
210 LOG_WARNING(Shader_GLSL, "Int64 Atomics not supported, fallback to non-atomic"); 210 LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic");
211 ctx.AddU64("{}=packInt2x32(ivec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst, 211 ctx.AddU64("{}=packInt2x32(ivec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst,
212 ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, 212 ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
213 binding.U32(), ctx.var_alloc.Consume(offset)); 213 binding.U32(), ctx.var_alloc.Consume(offset));
@@ -220,7 +220,7 @@ void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value&
220 220
221void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 221void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
222 const IR::Value& offset, std::string_view value) { 222 const IR::Value& offset, std::string_view value) {
223 LOG_WARNING(Shader_GLSL, "Int64 Atomics not supported, fallback to non-atomic"); 223 LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic");
224 ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst, 224 ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst,
225 ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, 225 ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
226 binding.U32(), ctx.var_alloc.Consume(offset)); 226 binding.U32(), ctx.var_alloc.Consume(offset));