diff options
| author | 2021-05-31 14:17:00 -0400 | |
|---|---|---|
| committer | 2021-07-22 21:51:37 -0400 | |
| commit | 68d075d1e8af66c3f8044b162344bffc943168a8 (patch) | |
| tree | 14eff63022bdfc599b128af5795eb77c4878fcdd /src/shader_recompiler/backend/glsl | |
| parent | glsl: Implement geometry shaders (diff) | |
| download | yuzu-68d075d1e8af66c3f8044b162344bffc943168a8.tar.gz yuzu-68d075d1e8af66c3f8044b162344bffc943168a8.tar.xz yuzu-68d075d1e8af66c3f8044b162344bffc943168a8.zip | |
glsl: Fix atomic SSBO offsets
and implement misc getters
Diffstat (limited to 'src/shader_recompiler/backend/glsl')
4 files changed, 74 insertions, 67 deletions
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp index 9714ffe33..5394f4a8c 100644 --- a/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp +++ b/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp | |||
| @@ -27,16 +27,16 @@ void SharedCasFunction(EmitContext& ctx, IR::Inst& inst, std::string_view offset | |||
| 27 | void SsboCasFunction(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 27 | void SsboCasFunction(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 28 | const IR::Value& offset, std::string_view value, std::string_view function) { | 28 | const IR::Value& offset, std::string_view value, std::string_view function) { |
| 29 | const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32)}; | 29 | const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32)}; |
| 30 | const std::string ssbo{ | 30 | const std::string ssbo{fmt::format("{}_ssbo{}[{}>>2]", ctx.stage_name, binding.U32(), |
| 31 | fmt::format("{}_ssbo{}[{}]", ctx.stage_name, binding.U32(), offset.U32())}; | 31 | ctx.var_alloc.Consume(offset))}; |
| 32 | ctx.Add(cas_loop.data(), ssbo, ret, ssbo, function, ssbo, value, ret); | 32 | ctx.Add(cas_loop.data(), ssbo, ret, ssbo, function, ssbo, value, ret); |
| 33 | } | 33 | } |
| 34 | 34 | ||
| 35 | void SsboCasFunctionF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 35 | void SsboCasFunctionF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 36 | const IR::Value& offset, std::string_view value, | 36 | const IR::Value& offset, std::string_view value, |
| 37 | std::string_view function) { | 37 | std::string_view function) { |
| 38 | const std::string ssbo{ | 38 | const std::string ssbo{fmt::format("{}_ssbo{}[{}>>2]", ctx.stage_name, binding.U32(), |
| 39 | fmt::format("{}_ssbo{}[{}]", ctx.stage_name, binding.U32(), offset.U32())}; | 39 | ctx.var_alloc.Consume(offset))}; |
| 40 | const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32)}; | 40 | const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32)}; |
| 41 | ctx.Add(cas_loop.data(), ssbo, ret, ssbo, function, ssbo, value, ret); | 41 | ctx.Add(cas_loop.data(), ssbo, ret, ssbo, function, ssbo, value, ret); |
| 42 | ctx.AddF32("{}=uintBitsToFloat({});", inst, ret); | 42 | ctx.AddF32("{}=uintBitsToFloat({});", inst, ret); |
| @@ -111,8 +111,8 @@ void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, std::string_vi | |||
| 111 | 111 | ||
| 112 | void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 112 | void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 113 | const IR::Value& offset, std::string_view value) { | 113 | const IR::Value& offset, std::string_view value) { |
| 114 | ctx.AddU32("{}=atomicAdd({}_ssbo{}[{}],{});", inst, ctx.stage_name, binding.U32(), offset.U32(), | 114 | ctx.AddU32("{}=atomicAdd({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(), |
| 115 | value); | 115 | ctx.var_alloc.Consume(offset), value); |
| 116 | } | 116 | } |
| 117 | 117 | ||
| 118 | void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 118 | void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| @@ -123,8 +123,8 @@ void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& | |||
| 123 | 123 | ||
| 124 | void EmitStorageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 124 | void EmitStorageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 125 | const IR::Value& offset, std::string_view value) { | 125 | const IR::Value& offset, std::string_view value) { |
| 126 | ctx.AddU32("{}=atomicMin({}_ssbo{}[{}],{});", inst, ctx.stage_name, binding.U32(), offset.U32(), | 126 | ctx.AddU32("{}=atomicMin({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(), |
| 127 | value); | 127 | ctx.var_alloc.Consume(offset), value); |
| 128 | } | 128 | } |
| 129 | 129 | ||
| 130 | void EmitStorageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 130 | void EmitStorageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| @@ -135,8 +135,8 @@ void EmitStorageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& | |||
| 135 | 135 | ||
| 136 | void EmitStorageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 136 | void EmitStorageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 137 | const IR::Value& offset, std::string_view value) { | 137 | const IR::Value& offset, std::string_view value) { |
| 138 | ctx.AddU32("{}=atomicMax({}_ssbo{}[{}],{});", inst, ctx.stage_name, binding.U32(), offset.U32(), | 138 | ctx.AddU32("{}=atomicMax({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(), |
| 139 | value); | 139 | ctx.var_alloc.Consume(offset), value); |
| 140 | } | 140 | } |
| 141 | 141 | ||
| 142 | void EmitStorageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 142 | void EmitStorageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| @@ -151,116 +151,123 @@ void EmitStorageAtomicDec32(EmitContext& ctx, IR::Inst& inst, const IR::Value& b | |||
| 151 | 151 | ||
| 152 | void EmitStorageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 152 | void EmitStorageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 153 | const IR::Value& offset, std::string_view value) { | 153 | const IR::Value& offset, std::string_view value) { |
| 154 | ctx.AddU32("{}=atomicAnd({}_ssbo{}[{}],{});", inst, ctx.stage_name, binding.U32(), offset.U32(), | 154 | ctx.AddU32("{}=atomicAnd({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(), |
| 155 | value); | 155 | ctx.var_alloc.Consume(offset), value); |
| 156 | } | 156 | } |
| 157 | 157 | ||
| 158 | void EmitStorageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 158 | void EmitStorageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 159 | const IR::Value& offset, std::string_view value) { | 159 | const IR::Value& offset, std::string_view value) { |
| 160 | ctx.AddU32("{}=atomicOr({}_ssbo{}[{}],{});", inst, ctx.stage_name, binding.U32(), offset.U32(), | 160 | ctx.AddU32("{}=atomicOr({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(), |
| 161 | value); | 161 | ctx.var_alloc.Consume(offset), value); |
| 162 | } | 162 | } |
| 163 | 163 | ||
| 164 | void EmitStorageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 164 | void EmitStorageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 165 | const IR::Value& offset, std::string_view value) { | 165 | const IR::Value& offset, std::string_view value) { |
| 166 | ctx.AddU32("{}=atomicXor({}_ssbo{}[{}],{});", inst, ctx.stage_name, binding.U32(), offset.U32(), | 166 | ctx.AddU32("{}=atomicXor({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(), |
| 167 | value); | 167 | ctx.var_alloc.Consume(offset), value); |
| 168 | } | 168 | } |
| 169 | 169 | ||
| 170 | void EmitStorageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 170 | void EmitStorageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 171 | const IR::Value& offset, std::string_view value) { | 171 | const IR::Value& offset, std::string_view value) { |
| 172 | ctx.AddU32("{}=atomicExchange({}_ssbo{}[{}],{});", inst, ctx.stage_name, binding.U32(), | 172 | ctx.AddU32("{}=atomicExchange({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(), |
| 173 | offset.U32(), value); | 173 | ctx.var_alloc.Consume(offset), value); |
| 174 | } | 174 | } |
| 175 | 175 | ||
| 176 | void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 176 | void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 177 | const IR::Value& offset, std::string_view value) { | 177 | const IR::Value& offset, std::string_view value) { |
| 178 | // LOG_WARNING(..., "Op falling to non-atomic"); | 178 | // LOG_WARNING(..., "Op falling to non-atomic"); |
| 179 | ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}],{}_ssbo{}[{}]));", inst, ctx.stage_name, | 179 | ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst, |
| 180 | binding.U32(), offset.U32(), ctx.stage_name, binding.U32(), offset.U32() + 1); | 180 | ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, |
| 181 | ctx.Add("{}_ssbo{}[{}]+=unpackUint2x32({}).x;{}_ssbo{}[{}]+=unpackUint2x32({}).y;", | 181 | binding.U32(), ctx.var_alloc.Consume(offset)); |
| 182 | ctx.stage_name, binding.U32(), offset.U32(), value, ctx.stage_name, binding.U32(), | 182 | ctx.Add("{}_ssbo{}[{}>>2]+=unpackUint2x32({}).x;{}_ssbo{}[({}>>2)+1]+=unpackUint2x32({}).y;", |
| 183 | offset.U32() + 1, value); | 183 | ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value, ctx.stage_name, |
| 184 | binding.U32(), ctx.var_alloc.Consume(offset), value); | ||
| 184 | } | 185 | } |
| 185 | 186 | ||
| 186 | void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 187 | void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 187 | const IR::Value& offset, std::string_view value) { | 188 | const IR::Value& offset, std::string_view value) { |
| 188 | // LOG_WARNING(..., "Op falling to non-atomic"); | 189 | // LOG_WARNING(..., "Op falling to non-atomic"); |
| 189 | ctx.AddS64("{}=packInt2x32(ivec2({}_ssbo{}[{}],{}_ssbo{}[{}]));", inst, ctx.stage_name, | 190 | ctx.AddS64("{}=packInt2x32(ivec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst, |
| 190 | binding.U32(), offset.U32(), ctx.stage_name, binding.U32(), offset.U32() + 1); | 191 | ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, |
| 192 | binding.U32(), ctx.var_alloc.Consume(offset)); | ||
| 191 | ctx.Add("for(int i=0;i<2;++i){{ " | 193 | ctx.Add("for(int i=0;i<2;++i){{ " |
| 192 | "{}_ssbo{}[{}+i]=uint(min(int({}_ssbo{}[{}+i]),unpackInt2x32(int64_t({}))[i]));}}", | 194 | "{}_ssbo{}[({}>>2)+i]=uint(min(int({}_ssbo{}[({}>>2)+i]),unpackInt2x32(int64_t({}))[i])" |
| 193 | ctx.stage_name, binding.U32(), offset.U32(), ctx.stage_name, binding.U32(), | 195 | ");}}", |
| 194 | offset.U32(), value); | 196 | ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, |
| 197 | binding.U32(), ctx.var_alloc.Consume(offset), value); | ||
| 195 | } | 198 | } |
| 196 | 199 | ||
| 197 | void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 200 | void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 198 | const IR::Value& offset, std::string_view value) { | 201 | const IR::Value& offset, std::string_view value) { |
| 199 | // LOG_WARNING(..., "Op falling to non-atomic"); | 202 | // LOG_WARNING(..., "Op falling to non-atomic"); |
| 200 | ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}],{}_ssbo{}[{}]));", inst, ctx.stage_name, | 203 | ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst, |
| 201 | binding.U32(), offset.U32(), ctx.stage_name, binding.U32(), offset.U32() + 1); | 204 | ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, |
| 205 | binding.U32(), ctx.var_alloc.Consume(offset)); | ||
| 202 | ctx.Add("for(int i=0;i<2;++i){{ " | 206 | ctx.Add("for(int i=0;i<2;++i){{ " |
| 203 | "{}_ssbo{}[{}+i]=min({}_ssbo{}[{}+i],unpackUint2x32(uint64_t({}))[i]);}}", | 207 | "{}_ssbo{}[({}>>2)+i]=min({}_ssbo{}[({}>>2)+i],unpackUint2x32(uint64_t({}))[i]);}}", |
| 204 | ctx.stage_name, binding.U32(), offset.U32(), ctx.stage_name, binding.U32(), | 208 | ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, |
| 205 | offset.U32(), value); | 209 | binding.U32(), ctx.var_alloc.Consume(offset), value); |
| 206 | } | 210 | } |
| 207 | 211 | ||
| 208 | void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 212 | void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 209 | const IR::Value& offset, std::string_view value) { | 213 | const IR::Value& offset, std::string_view value) { |
| 210 | // LOG_WARNING(..., "Op falling to non-atomic"); | 214 | // LOG_WARNING(..., "Op falling to non-atomic"); |
| 211 | ctx.AddS64("{}=packInt2x32(ivec2({}_ssbo{}[{}],{}_ssbo{}[{}]));", inst, ctx.stage_name, | 215 | ctx.AddS64("{}=packInt2x32(ivec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst, |
| 212 | binding.U32(), offset.U32(), ctx.stage_name, binding.U32(), offset.U32() + 1); | 216 | ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, |
| 217 | binding.U32(), ctx.var_alloc.Consume(offset)); | ||
| 213 | ctx.Add("for(int i=0;i<2;++i){{ " | 218 | ctx.Add("for(int i=0;i<2;++i){{ " |
| 214 | "{}_ssbo{}[{}+i]=uint(max(int({}_ssbo{}[{}+i]),unpackInt2x32(int64_t({}))[i]));}}", | 219 | "{}_ssbo{}[({}>>2)+i]=uint(max(int({}_ssbo{}[({}>>2)+i]),unpackInt2x32(int64_t({}))[i])" |
| 215 | ctx.stage_name, binding.U32(), offset.U32(), ctx.stage_name, binding.U32(), | 220 | ");}}", |
| 216 | offset.U32(), value); | 221 | ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, |
| 222 | binding.U32(), ctx.var_alloc.Consume(offset), value); | ||
| 217 | } | 223 | } |
| 218 | 224 | ||
| 219 | void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 225 | void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 220 | const IR::Value& offset, std::string_view value) { | 226 | const IR::Value& offset, std::string_view value) { |
| 221 | // LOG_WARNING(..., "Op falling to non-atomic"); | 227 | // LOG_WARNING(..., "Op falling to non-atomic"); |
| 222 | ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}],{}_ssbo{}[{}]));", inst, ctx.stage_name, | 228 | |
| 223 | binding.U32(), offset.U32(), ctx.stage_name, binding.U32(), offset.U32() + 1); | 229 | ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst, |
| 230 | ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, | ||
| 231 | binding.U32(), ctx.var_alloc.Consume(offset)); | ||
| 224 | ctx.Add("for(int " | 232 | ctx.Add("for(int " |
| 225 | "i=0;i<2;++i){{{}_ssbo{}[{}+i]=max({}_ssbo{}[{}+i],unpackUint2x32(uint64_t({}))[i]);}}", | 233 | "i=0;i<2;++i){{{}_ssbo{}[({}>>2)+i]=max({}_ssbo{}[({}>>2)+i],unpackUint2x32(uint64_t({}" |
| 226 | ctx.stage_name, binding.U32(), offset.U32(), ctx.stage_name, binding.U32(), | 234 | "))[i]);}}", |
| 227 | offset.U32(), value); | 235 | ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name, |
| 236 | binding.U32(), ctx.var_alloc.Consume(offset), value); | ||
| 228 | } | 237 | } |
| 229 | 238 | ||
| 230 | void EmitStorageAtomicAnd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 239 | void EmitStorageAtomicAnd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 231 | const IR::Value& offset, std::string_view value) { | 240 | const IR::Value& offset, std::string_view value) { |
| 232 | ctx.AddU64("{}=packUint2x32(uvec2(atomicAnd({}_ssbo{}[{}],unpackUint2x32({}).x),atomicAnd({}_" | 241 | ctx.AddU64( |
| 233 | "ssbo{}[{}]," | 242 | "{}=packUint2x32(uvec2(atomicAnd({}_ssbo{}[{}>>2],unpackUint2x32({}).x),atomicAnd({}_" |
| 234 | "unpackUint2x32({}).y)));", | 243 | "ssbo{}[({}>>2)+1],unpackUint2x32({}).y)));", |
| 235 | inst, ctx.stage_name, binding.U32(), offset.U32(), value, ctx.stage_name, | 244 | inst, ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value, ctx.stage_name, |
| 236 | binding.U32(), offset.U32() + 1, value); | 245 | binding.U32(), ctx.var_alloc.Consume(offset), value); |
| 237 | } | 246 | } |
| 238 | 247 | ||
| 239 | void EmitStorageAtomicOr64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 248 | void EmitStorageAtomicOr64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 240 | const IR::Value& offset, std::string_view value) { | 249 | const IR::Value& offset, std::string_view value) { |
| 241 | ctx.AddU64( | 250 | ctx.AddU64("{}=packUint2x32(uvec2(atomicOr({}_ssbo{}[{}>>2],unpackUint2x32({}).x),atomicOr({}_" |
| 242 | "{}=packUint2x32(uvec2(atomicOr({}_ssbo{}[{}],unpackUint2x32({}).x),atomicOr({}_ssbo{}[{}]," | 251 | "ssbo{}[({}>>2)+1],unpackUint2x32({}).y)));", |
| 243 | "unpackUint2x32({}).y)));", | 252 | inst, ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value, |
| 244 | inst, ctx.stage_name, binding.U32(), offset.U32(), value, ctx.stage_name, binding.U32(), | 253 | ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value); |
| 245 | offset.U32() + 1, value); | ||
| 246 | } | 254 | } |
| 247 | 255 | ||
| 248 | void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 256 | void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 249 | const IR::Value& offset, std::string_view value) { | 257 | const IR::Value& offset, std::string_view value) { |
| 250 | ctx.AddU64("{}=packUint2x32(uvec2(atomicXor({}_ssbo{}[{}],unpackUint2x32({}).x),atomicXor({}_" | 258 | ctx.AddU64( |
| 251 | "ssbo{}[{}]," | 259 | "{}=packUint2x32(uvec2(atomicXor({}_ssbo{}[{}>>2],unpackUint2x32({}).x),atomicXor({}_" |
| 252 | "unpackUint2x32({}).y)));", | 260 | "ssbo{}[({}>>2)+1],unpackUint2x32({}).y)));", |
| 253 | inst, ctx.stage_name, binding.U32(), offset.U32(), value, ctx.stage_name, | 261 | inst, ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value, ctx.stage_name, |
| 254 | binding.U32(), offset.U32() + 1, value); | 262 | binding.U32(), ctx.var_alloc.Consume(offset), value); |
| 255 | } | 263 | } |
| 256 | 264 | ||
| 257 | void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 265 | void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 258 | const IR::Value& offset, std::string_view value) { | 266 | const IR::Value& offset, std::string_view value) { |
| 259 | ctx.AddU64( | 267 | ctx.AddU64("{}=packUint2x32(uvec2(atomicExchange({}_ssbo{}[{}>>2],unpackUint2x32({}).x)," |
| 260 | "{}=packUint2x32(uvec2(atomicExchange({}_ssbo{}[{}],unpackUint2x32({}).x),atomicExchange(" | 268 | "atomicExchange({}_ssbo{}[({}>>2)+1],unpackUint2x32({}).y)));", |
| 261 | "{}_ssbo{}[{}],unpackUint2x32({}).y)));", | 269 | inst, ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value, |
| 262 | inst, ctx.stage_name, binding.U32(), offset.U32(), value, ctx.stage_name, binding.U32(), | 270 | ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value); |
| 263 | offset.U32() + 1, value); | ||
| 264 | } | 271 | } |
| 265 | 272 | ||
| 266 | void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | 273 | void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h b/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h index 5e0195b0f..24c36bbda 100644 --- a/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h +++ b/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h | |||
| @@ -92,7 +92,7 @@ void EmitWorkgroupId(EmitContext& ctx, IR::Inst& inst); | |||
| 92 | void EmitLocalInvocationId(EmitContext& ctx, IR::Inst& inst); | 92 | void EmitLocalInvocationId(EmitContext& ctx, IR::Inst& inst); |
| 93 | void EmitInvocationId(EmitContext& ctx, IR::Inst& inst); | 93 | void EmitInvocationId(EmitContext& ctx, IR::Inst& inst); |
| 94 | void EmitSampleId(EmitContext& ctx, IR::Inst& inst); | 94 | void EmitSampleId(EmitContext& ctx, IR::Inst& inst); |
| 95 | void EmitIsHelperInvocation(EmitContext& ctx); | 95 | void EmitIsHelperInvocation(EmitContext& ctx, IR::Inst& inst); |
| 96 | void EmitYDirection(EmitContext& ctx, IR::Inst& inst); | 96 | void EmitYDirection(EmitContext& ctx, IR::Inst& inst); |
| 97 | void EmitLoadLocal(EmitContext& ctx, IR::Inst& inst, std::string_view word_offset); | 97 | void EmitLoadLocal(EmitContext& ctx, IR::Inst& inst, std::string_view word_offset); |
| 98 | void EmitWriteLocal(EmitContext& ctx, std::string_view word_offset, std::string_view value); | 98 | void EmitWriteLocal(EmitContext& ctx, std::string_view word_offset, std::string_view value); |
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_not_implemented.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_not_implemented.cpp index 9e5715605..3ed4e04d3 100644 --- a/src/shader_recompiler/backend/glsl/emit_glsl_not_implemented.cpp +++ b/src/shader_recompiler/backend/glsl/emit_glsl_not_implemented.cpp | |||
| @@ -211,8 +211,8 @@ void EmitSampleId(EmitContext& ctx, IR::Inst& inst) { | |||
| 211 | NotImplemented(); | 211 | NotImplemented(); |
| 212 | } | 212 | } |
| 213 | 213 | ||
| 214 | void EmitIsHelperInvocation(EmitContext& ctx) { | 214 | void EmitIsHelperInvocation(EmitContext& ctx, IR::Inst& inst) { |
| 215 | NotImplemented(); | 215 | ctx.AddU1("{}=gl_HelperInvocation;", inst); |
| 216 | } | 216 | } |
| 217 | 217 | ||
| 218 | void EmitYDirection(EmitContext& ctx, IR::Inst& inst) { | 218 | void EmitYDirection(EmitContext& ctx, IR::Inst& inst) { |
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_warp.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_warp.cpp index fa11c656f..38c49b164 100644 --- a/src/shader_recompiler/backend/glsl/emit_glsl_warp.cpp +++ b/src/shader_recompiler/backend/glsl/emit_glsl_warp.cpp | |||
| @@ -38,7 +38,7 @@ std::string GetMaxThreadId(std::string_view thread_id, std::string_view clamp, | |||
| 38 | } // namespace | 38 | } // namespace |
| 39 | 39 | ||
| 40 | void EmitLaneId([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst) { | 40 | void EmitLaneId([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst) { |
| 41 | NotImplemented(); | 41 | ctx.AddU32("{}=gl_SubGroupInvocationARB&31u;", inst); |
| 42 | } | 42 | } |
| 43 | 43 | ||
| 44 | void EmitVoteAll(EmitContext& ctx, IR::Inst& inst, std::string_view pred) { | 44 | void EmitVoteAll(EmitContext& ctx, IR::Inst& inst, std::string_view pred) { |