diff options
Diffstat (limited to 'src/shader_recompiler/backend/glsl')
8 files changed, 406 insertions, 325 deletions
diff --git a/src/shader_recompiler/backend/glsl/emit_context.cpp b/src/shader_recompiler/backend/glsl/emit_context.cpp index 67772c46d..3c610a08a 100644 --- a/src/shader_recompiler/backend/glsl/emit_context.cpp +++ b/src/shader_recompiler/backend/glsl/emit_context.cpp | |||
| @@ -19,8 +19,10 @@ EmitContext::EmitContext(IR::Program& program, [[maybe_unused]] Bindings& bindin | |||
| 19 | program.workgroup_size[2]); | 19 | program.workgroup_size[2]); |
| 20 | } | 20 | } |
| 21 | code += header; | 21 | code += header; |
| 22 | |||
| 22 | DefineConstantBuffers(); | 23 | DefineConstantBuffers(); |
| 23 | DefineStorageBuffers(); | 24 | DefineStorageBuffers(); |
| 25 | DefineHelperFunctions(); | ||
| 24 | code += "void main(){\n"; | 26 | code += "void main(){\n"; |
| 25 | } | 27 | } |
| 26 | 28 | ||
| @@ -28,6 +30,15 @@ void EmitContext::SetupExtensions(std::string& header) { | |||
| 28 | if (info.uses_int64) { | 30 | if (info.uses_int64) { |
| 29 | header += "#extension GL_ARB_gpu_shader_int64 : enable\n"; | 31 | header += "#extension GL_ARB_gpu_shader_int64 : enable\n"; |
| 30 | } | 32 | } |
| 33 | if (info.uses_int64_bit_atomics) { | ||
| 34 | header += "#extension GL_NV_shader_atomic_int64 : enable\n"; | ||
| 35 | } | ||
| 36 | if (info.uses_atomic_f32_add) { | ||
| 37 | header += "#extension GL_NV_shader_atomic_float : enable\n"; | ||
| 38 | } | ||
| 39 | if (info.uses_atomic_f16x2_add || info.uses_atomic_f16x2_min || info.uses_atomic_f16x2_max) { | ||
| 40 | header += "#extension NV_shader_atomic_fp16_vector : enable\n"; | ||
| 41 | } | ||
| 31 | } | 42 | } |
| 32 | 43 | ||
| 33 | void EmitContext::DefineConstantBuffers() { | 44 | void EmitContext::DefineConstantBuffers() { |
| @@ -48,18 +59,39 @@ void EmitContext::DefineStorageBuffers() { | |||
| 48 | } | 59 | } |
| 49 | u32 binding{}; | 60 | u32 binding{}; |
| 50 | for (const auto& desc : info.storage_buffers_descriptors) { | 61 | for (const auto& desc : info.storage_buffers_descriptors) { |
| 51 | if (True(info.used_storage_buffer_types & IR::Type::U32) || | 62 | if (info.uses_s32_atomics) { |
| 52 | True(info.used_storage_buffer_types & IR::Type::F32)) { | 63 | Add("layout(std430,binding={}) buffer ssbo_{}_s32{{int ssbo{}_s32[];}};", binding, |
| 64 | binding, desc.cbuf_index, desc.count); | ||
| 65 | } | ||
| 66 | if (True(info.used_storage_buffer_types & IR::Type::U32)) { | ||
| 53 | Add("layout(std430,binding={}) buffer ssbo_{}_u32{{uint ssbo{}_u32[];}};", binding, | 67 | Add("layout(std430,binding={}) buffer ssbo_{}_u32{{uint ssbo{}_u32[];}};", binding, |
| 54 | binding, desc.cbuf_index, desc.count); | 68 | binding, desc.cbuf_index, desc.count); |
| 55 | } | 69 | } |
| 56 | if (True(info.used_storage_buffer_types & IR::Type::U32x2) || | 70 | if (True(info.used_storage_buffer_types & IR::Type::F32)) { |
| 57 | True(info.used_storage_buffer_types & IR::Type::F32x2)) { | 71 | Add("layout(std430,binding={}) buffer ssbo_{}_f32{{float ssbo{}_f32[];}};", binding, |
| 58 | Add("layout(std430,binding={}) buffer ssbo_{}_u64{{uvec2 ssbo{}_u64[];}};", binding, | 72 | binding, desc.cbuf_index, desc.count); |
| 73 | } | ||
| 74 | if (True(info.used_storage_buffer_types & IR::Type::U32x2)) { | ||
| 75 | Add("layout(std430,binding={}) buffer ssbo_{}_u32x2{{uvec2 ssbo{}_u32x2[];}};", binding, | ||
| 76 | binding, desc.cbuf_index, desc.count); | ||
| 77 | } | ||
| 78 | if (True(info.used_storage_buffer_types & IR::Type::U64) || | ||
| 79 | True(info.used_storage_buffer_types & IR::Type::F64)) { | ||
| 80 | Add("layout(std430,binding={}) buffer ssbo_{}_u64{{uint64_t ssbo{}_u64[];}};", binding, | ||
| 59 | binding, desc.cbuf_index, desc.count); | 81 | binding, desc.cbuf_index, desc.count); |
| 60 | } | 82 | } |
| 61 | ++binding; | 83 | ++binding; |
| 62 | } | 84 | } |
| 63 | } | 85 | } |
| 64 | 86 | ||
| 87 | void EmitContext::DefineHelperFunctions() { | ||
| 88 | if (info.uses_global_increment) { | ||
| 89 | code += "uint CasIncrement(uint op_a,uint op_b){return(op_a>=op_b)?0u:(op_a+1u);}\n"; | ||
| 90 | } | ||
| 91 | if (info.uses_global_decrement) { | ||
| 92 | code += | ||
| 93 | "uint CasDecrement(uint op_a,uint op_b){return(op_a==0||op_a>op_b)?op_b:(op_a-1u);}\n"; | ||
| 94 | } | ||
| 95 | } | ||
| 96 | |||
| 65 | } // namespace Shader::Backend::GLSL | 97 | } // namespace Shader::Backend::GLSL |
diff --git a/src/shader_recompiler/backend/glsl/emit_context.h b/src/shader_recompiler/backend/glsl/emit_context.h index 9472f71b9..ca5657fe7 100644 --- a/src/shader_recompiler/backend/glsl/emit_context.h +++ b/src/shader_recompiler/backend/glsl/emit_context.h | |||
| @@ -31,13 +31,6 @@ class EmitContext { | |||
| 31 | public: | 31 | public: |
| 32 | explicit EmitContext(IR::Program& program, Bindings& bindings, const Profile& profile_); | 32 | explicit EmitContext(IR::Program& program, Bindings& bindings, const Profile& profile_); |
| 33 | 33 | ||
| 34 | // template <typename... Args> | ||
| 35 | // void Add(const char* format_str, IR::Inst& inst, Args&&... args) { | ||
| 36 | // code += fmt::format(format_str, reg_alloc.Define(inst), std::forward<Args>(args)...); | ||
| 37 | // // TODO: Remove this | ||
| 38 | // code += '\n'; | ||
| 39 | // } | ||
| 40 | |||
| 41 | template <Type type, typename... Args> | 34 | template <Type type, typename... Args> |
| 42 | void Add(const char* format_str, IR::Inst& inst, Args&&... args) { | 35 | void Add(const char* format_str, IR::Inst& inst, Args&&... args) { |
| 43 | code += fmt::format(format_str, reg_alloc.Define(inst, type), std::forward<Args>(args)...); | 36 | code += fmt::format(format_str, reg_alloc.Define(inst, type), std::forward<Args>(args)...); |
| @@ -106,6 +99,7 @@ private: | |||
| 106 | void SetupExtensions(std::string& header); | 99 | void SetupExtensions(std::string& header); |
| 107 | void DefineConstantBuffers(); | 100 | void DefineConstantBuffers(); |
| 108 | void DefineStorageBuffers(); | 101 | void DefineStorageBuffers(); |
| 102 | void DefineHelperFunctions(); | ||
| 109 | }; | 103 | }; |
| 110 | 104 | ||
| 111 | } // namespace Shader::Backend::GLSL | 105 | } // namespace Shader::Backend::GLSL |
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp index e69de29bb..f3ef37873 100644 --- a/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp +++ b/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp | |||
| @@ -0,0 +1,301 @@ | |||
| 1 | |||
| 2 | // Copyright 2021 yuzu Emulator Project | ||
| 3 | // Licensed under GPLv2 or any later version | ||
| 4 | // Refer to the license.txt file included. | ||
| 5 | |||
| 6 | #include <string_view> | ||
| 7 | |||
| 8 | #include "shader_recompiler/backend/glsl/emit_context.h" | ||
| 9 | #include "shader_recompiler/backend/glsl/emit_glsl_instructions.h" | ||
| 10 | #include "shader_recompiler/frontend/ir/value.h" | ||
| 11 | #include "shader_recompiler/profile.h" | ||
| 12 | |||
| 13 | namespace Shader::Backend::GLSL { | ||
| 14 | namespace { | ||
| 15 | static constexpr std::string_view cas_loop{R"( | ||
| 16 | {} {}; | ||
| 17 | for (;;){{ | ||
| 18 | {} old_value={}; | ||
| 19 | {} = atomicCompSwap({},old_value,{}({},{})); | ||
| 20 | if ({}==old_value){{break;}} | ||
| 21 | }})"}; | ||
| 22 | |||
| 23 | void CasFunction(EmitContext& ctx, IR::Inst& inst, std::string_view ssbo, std::string_view value, | ||
| 24 | std::string_view type, std::string_view function) { | ||
| 25 | const auto ret{ctx.reg_alloc.Define(inst)}; | ||
| 26 | ctx.Add(cas_loop.data(), type, ret, type, ssbo, ret, ssbo, function, ssbo, value, ret); | ||
| 27 | } | ||
| 28 | } // namespace | ||
| 29 | |||
| 30 | void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 31 | const IR::Value& offset, std::string_view value) { | ||
| 32 | ctx.AddU32("{}=atomicAdd(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); | ||
| 33 | } | ||
| 34 | |||
| 35 | void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 36 | const IR::Value& offset, std::string_view value) { | ||
| 37 | ctx.AddS32("{}=atomicMin(ssbo{}_s32[{}],int({}));", inst, binding.U32(), offset.U32(), value); | ||
| 38 | } | ||
| 39 | |||
| 40 | void EmitStorageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 41 | const IR::Value& offset, std::string_view value) { | ||
| 42 | ctx.AddU32("{}=atomicMin(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); | ||
| 43 | } | ||
| 44 | |||
| 45 | void EmitStorageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 46 | const IR::Value& offset, std::string_view value) { | ||
| 47 | ctx.AddS32("{}=atomicMax(ssbo{}_s32[{}],int({}));", inst, binding.U32(), offset.U32(), value); | ||
| 48 | } | ||
| 49 | |||
| 50 | void EmitStorageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 51 | const IR::Value& offset, std::string_view value) { | ||
| 52 | ctx.AddU32("{}=atomicMax(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); | ||
| 53 | } | ||
| 54 | |||
| 55 | void EmitStorageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 56 | [[maybe_unused]] const IR::Value& offset, std::string_view value) { | ||
| 57 | // const auto ret{ctx.reg_alloc.Define(inst)}; | ||
| 58 | // const auto type{"uint"}; | ||
| 59 | // ctx.Add(cas_loop.data(), type, ret, type, ssbo, ret, ssbo, "CasIncrement", ssbo, value, ret); | ||
| 60 | const std::string ssbo{fmt::format("ssbo{}_u32[{}]", binding.U32(), offset.U32())}; | ||
| 61 | CasFunction(ctx, inst, ssbo, value, "uint", "CasIncrement"); | ||
| 62 | } | ||
| 63 | |||
| 64 | void EmitStorageAtomicDec32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 65 | const IR::Value& offset, std::string_view value) { | ||
| 66 | const std::string ssbo{fmt::format("ssbo{}_u32[{}]", binding.U32(), offset.U32())}; | ||
| 67 | CasFunction(ctx, inst, ssbo, value, "uint", "CasDecrement"); | ||
| 68 | } | ||
| 69 | |||
| 70 | void EmitStorageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 71 | const IR::Value& offset, std::string_view value) { | ||
| 72 | ctx.AddU32("{}=atomicAnd(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); | ||
| 73 | } | ||
| 74 | |||
| 75 | void EmitStorageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 76 | const IR::Value& offset, std::string_view value) { | ||
| 77 | ctx.AddU32("{}=atomicOr(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); | ||
| 78 | } | ||
| 79 | |||
| 80 | void EmitStorageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 81 | const IR::Value& offset, std::string_view value) { | ||
| 82 | ctx.AddU32("{}=atomicXor(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); | ||
| 83 | } | ||
| 84 | |||
| 85 | void EmitStorageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 86 | const IR::Value& offset, std::string_view value) { | ||
| 87 | ctx.AddU32("{}=atomicExchange(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); | ||
| 88 | } | ||
| 89 | |||
| 90 | void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 91 | const IR::Value& offset, std::string_view value) { | ||
| 92 | // ctx.AddU64("{}=atomicAdd(ssbo{}_u64[{}],{});", inst, binding.U32(), offset.U32(), value); | ||
| 93 | ctx.AddU64("{}=ssbo{}_u64[{}];", inst, binding.U32(), offset.U32()); | ||
| 94 | ctx.Add("ssbo{}_u64[{}]+={};", binding.U32(), offset.U32(), value); | ||
| 95 | } | ||
| 96 | |||
| 97 | void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 98 | const IR::Value& offset, std::string_view value) { | ||
| 99 | ctx.AddS64("{}=atomicMin(int64_t(ssbo{}_u64[{}]),int64_t({}));", inst, binding.U32(), | ||
| 100 | offset.U32(), value); | ||
| 101 | } | ||
| 102 | |||
| 103 | void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 104 | const IR::Value& offset, std::string_view value) { | ||
| 105 | ctx.AddU64("{}=atomicMin(ssbo{}_u64[{}],{});", inst, binding.U32(), offset.U32(), value); | ||
| 106 | } | ||
| 107 | |||
| 108 | void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 109 | const IR::Value& offset, std::string_view value) { | ||
| 110 | ctx.AddS64("{}=atomicMax(int64_t(ssbo{}_u64[{}]),int64_t({}));", inst, binding.U32(), | ||
| 111 | offset.U32(), value); | ||
| 112 | } | ||
| 113 | |||
| 114 | void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 115 | const IR::Value& offset, std::string_view value) { | ||
| 116 | ctx.AddU64("{}=atomicMax(ssbo{}_u64[{}],{});", inst, binding.U32(), offset.U32(), value); | ||
| 117 | } | ||
| 118 | |||
| 119 | void EmitStorageAtomicAnd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 120 | const IR::Value& offset, std::string_view value) { | ||
| 121 | ctx.AddU64("{}=atomicAnd(ssbo{}_u64[{}],{});", inst, binding.U32(), offset.U32(), value); | ||
| 122 | } | ||
| 123 | |||
| 124 | void EmitStorageAtomicOr64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 125 | const IR::Value& offset, std::string_view value) { | ||
| 126 | ctx.AddU64("{}=atomicOr(ssbo{}_u64[{}],{});", inst, binding.U32(), offset.U32(), value); | ||
| 127 | } | ||
| 128 | |||
| 129 | void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 130 | const IR::Value& offset, std::string_view value) { | ||
| 131 | ctx.AddU64("{}=atomicXor(ssbo{}_u64[{}],{});", inst, binding.U32(), offset.U32(), value); | ||
| 132 | } | ||
| 133 | |||
| 134 | void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 135 | const IR::Value& offset, std::string_view value) { | ||
| 136 | ctx.AddU64("{}=atomicExchange(ssbo{}_u64[{}],{});", inst, binding.U32(), offset.U32(), value); | ||
| 137 | } | ||
| 138 | |||
| 139 | void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, | ||
| 140 | const IR::Value& offset, std::string_view value) { | ||
| 141 | ctx.AddF32("{}=atomicAdd(ssbo{}_u32[{}],{});", inst, binding.U32(), offset.U32(), value); | ||
| 142 | } | ||
| 143 | |||
| 144 | void EmitStorageAtomicAddF16x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, | ||
| 145 | [[maybe_unused]] const IR::Value& binding, | ||
| 146 | [[maybe_unused]] const IR::Value& offset, | ||
| 147 | [[maybe_unused]] std::string_view value) { | ||
| 148 | throw NotImplementedException("GLSL Instrucion"); | ||
| 149 | } | ||
| 150 | |||
| 151 | void EmitStorageAtomicAddF32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, | ||
| 152 | [[maybe_unused]] const IR::Value& binding, | ||
| 153 | [[maybe_unused]] const IR::Value& offset, | ||
| 154 | [[maybe_unused]] std::string_view value) { | ||
| 155 | throw NotImplementedException("GLSL Instrucion"); | ||
| 156 | } | ||
| 157 | |||
| 158 | void EmitStorageAtomicMinF16x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, | ||
| 159 | [[maybe_unused]] const IR::Value& binding, | ||
| 160 | [[maybe_unused]] const IR::Value& offset, | ||
| 161 | [[maybe_unused]] std::string_view value) { | ||
| 162 | throw NotImplementedException("GLSL Instrucion"); | ||
| 163 | } | ||
| 164 | |||
| 165 | void EmitStorageAtomicMinF32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, | ||
| 166 | [[maybe_unused]] const IR::Value& binding, | ||
| 167 | [[maybe_unused]] const IR::Value& offset, | ||
| 168 | [[maybe_unused]] std::string_view value) { | ||
| 169 | throw NotImplementedException("GLSL Instrucion"); | ||
| 170 | } | ||
| 171 | |||
| 172 | void EmitStorageAtomicMaxF16x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, | ||
| 173 | [[maybe_unused]] const IR::Value& binding, | ||
| 174 | [[maybe_unused]] const IR::Value& offset, | ||
| 175 | [[maybe_unused]] std::string_view value) { | ||
| 176 | throw NotImplementedException("GLSL Instrucion"); | ||
| 177 | } | ||
| 178 | |||
| 179 | void EmitStorageAtomicMaxF32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, | ||
| 180 | [[maybe_unused]] const IR::Value& binding, | ||
| 181 | [[maybe_unused]] const IR::Value& offset, | ||
| 182 | [[maybe_unused]] std::string_view value) { | ||
| 183 | throw NotImplementedException("GLSL Instrucion"); | ||
| 184 | } | ||
| 185 | |||
| 186 | void EmitGlobalAtomicIAdd32(EmitContext&) { | ||
| 187 | throw NotImplementedException("GLSL Instrucion"); | ||
| 188 | } | ||
| 189 | |||
| 190 | void EmitGlobalAtomicSMin32(EmitContext&) { | ||
| 191 | throw NotImplementedException("GLSL Instrucion"); | ||
| 192 | } | ||
| 193 | |||
| 194 | void EmitGlobalAtomicUMin32(EmitContext&) { | ||
| 195 | throw NotImplementedException("GLSL Instrucion"); | ||
| 196 | } | ||
| 197 | |||
| 198 | void EmitGlobalAtomicSMax32(EmitContext&) { | ||
| 199 | throw NotImplementedException("GLSL Instrucion"); | ||
| 200 | } | ||
| 201 | |||
| 202 | void EmitGlobalAtomicUMax32(EmitContext&) { | ||
| 203 | throw NotImplementedException("GLSL Instrucion"); | ||
| 204 | } | ||
| 205 | |||
| 206 | void EmitGlobalAtomicInc32(EmitContext&) { | ||
| 207 | throw NotImplementedException("GLSL Instrucion"); | ||
| 208 | } | ||
| 209 | |||
| 210 | void EmitGlobalAtomicDec32(EmitContext&) { | ||
| 211 | throw NotImplementedException("GLSL Instrucion"); | ||
| 212 | } | ||
| 213 | |||
| 214 | void EmitGlobalAtomicAnd32(EmitContext&) { | ||
| 215 | throw NotImplementedException("GLSL Instrucion"); | ||
| 216 | } | ||
| 217 | |||
| 218 | void EmitGlobalAtomicOr32(EmitContext&) { | ||
| 219 | throw NotImplementedException("GLSL Instrucion"); | ||
| 220 | } | ||
| 221 | |||
| 222 | void EmitGlobalAtomicXor32(EmitContext&) { | ||
| 223 | throw NotImplementedException("GLSL Instrucion"); | ||
| 224 | } | ||
| 225 | |||
| 226 | void EmitGlobalAtomicExchange32(EmitContext&) { | ||
| 227 | throw NotImplementedException("GLSL Instrucion"); | ||
| 228 | } | ||
| 229 | |||
| 230 | void EmitGlobalAtomicIAdd64(EmitContext&) { | ||
| 231 | throw NotImplementedException("GLSL Instrucion"); | ||
| 232 | } | ||
| 233 | |||
| 234 | void EmitGlobalAtomicSMin64(EmitContext&) { | ||
| 235 | throw NotImplementedException("GLSL Instrucion"); | ||
| 236 | } | ||
| 237 | |||
| 238 | void EmitGlobalAtomicUMin64(EmitContext&) { | ||
| 239 | throw NotImplementedException("GLSL Instrucion"); | ||
| 240 | } | ||
| 241 | |||
| 242 | void EmitGlobalAtomicSMax64(EmitContext&) { | ||
| 243 | throw NotImplementedException("GLSL Instrucion"); | ||
| 244 | } | ||
| 245 | |||
| 246 | void EmitGlobalAtomicUMax64(EmitContext&) { | ||
| 247 | throw NotImplementedException("GLSL Instrucion"); | ||
| 248 | } | ||
| 249 | |||
| 250 | void EmitGlobalAtomicInc64(EmitContext&) { | ||
| 251 | throw NotImplementedException("GLSL Instrucion"); | ||
| 252 | } | ||
| 253 | |||
| 254 | void EmitGlobalAtomicDec64(EmitContext&) { | ||
| 255 | throw NotImplementedException("GLSL Instrucion"); | ||
| 256 | } | ||
| 257 | |||
| 258 | void EmitGlobalAtomicAnd64(EmitContext&) { | ||
| 259 | throw NotImplementedException("GLSL Instrucion"); | ||
| 260 | } | ||
| 261 | |||
| 262 | void EmitGlobalAtomicOr64(EmitContext&) { | ||
| 263 | throw NotImplementedException("GLSL Instrucion"); | ||
| 264 | } | ||
| 265 | |||
| 266 | void EmitGlobalAtomicXor64(EmitContext&) { | ||
| 267 | throw NotImplementedException("GLSL Instrucion"); | ||
| 268 | } | ||
| 269 | |||
| 270 | void EmitGlobalAtomicExchange64(EmitContext&) { | ||
| 271 | throw NotImplementedException("GLSL Instrucion"); | ||
| 272 | } | ||
| 273 | |||
| 274 | void EmitGlobalAtomicAddF32(EmitContext&) { | ||
| 275 | throw NotImplementedException("GLSL Instrucion"); | ||
| 276 | } | ||
| 277 | |||
| 278 | void EmitGlobalAtomicAddF16x2(EmitContext&) { | ||
| 279 | throw NotImplementedException("GLSL Instrucion"); | ||
| 280 | } | ||
| 281 | |||
| 282 | void EmitGlobalAtomicAddF32x2(EmitContext&) { | ||
| 283 | throw NotImplementedException("GLSL Instrucion"); | ||
| 284 | } | ||
| 285 | |||
| 286 | void EmitGlobalAtomicMinF16x2(EmitContext&) { | ||
| 287 | throw NotImplementedException("GLSL Instrucion"); | ||
| 288 | } | ||
| 289 | |||
| 290 | void EmitGlobalAtomicMinF32x2(EmitContext&) { | ||
| 291 | throw NotImplementedException("GLSL Instrucion"); | ||
| 292 | } | ||
| 293 | |||
| 294 | void EmitGlobalAtomicMaxF16x2(EmitContext&) { | ||
| 295 | throw NotImplementedException("GLSL Instrucion"); | ||
| 296 | } | ||
| 297 | |||
| 298 | void EmitGlobalAtomicMaxF32x2(EmitContext&) { | ||
| 299 | throw NotImplementedException("GLSL Instrucion"); | ||
| 300 | } | ||
| 301 | } // namespace Shader::Backend::GLSL | ||
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h b/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h index 4e0487543..c2836898f 100644 --- a/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h +++ b/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h | |||
| @@ -15,10 +15,7 @@ class Inst; | |||
| 15 | class Value; | 15 | class Value; |
| 16 | } // namespace Shader::IR | 16 | } // namespace Shader::IR |
| 17 | 17 | ||
| 18 | #pragma optimize("", off) | ||
| 19 | |||
| 20 | namespace Shader::Backend::GLSL { | 18 | namespace Shader::Backend::GLSL { |
| 21 | |||
| 22 | class EmitContext; | 19 | class EmitContext; |
| 23 | 20 | ||
| 24 | inline void EmitSetLoopSafetyVariable(EmitContext&) {} | 21 | inline void EmitSetLoopSafetyVariable(EmitContext&) {} |
| @@ -114,7 +111,8 @@ void EmitLoadStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Val | |||
| 114 | void EmitLoadStorageS8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); | 111 | void EmitLoadStorageS8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); |
| 115 | void EmitLoadStorageU16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); | 112 | void EmitLoadStorageU16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); |
| 116 | void EmitLoadStorageS16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); | 113 | void EmitLoadStorageS16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); |
| 117 | void EmitLoadStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); | 114 | void EmitLoadStorage32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 115 | const IR::Value& offset); | ||
| 118 | void EmitLoadStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); | 116 | void EmitLoadStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); |
| 119 | void EmitLoadStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); | 117 | void EmitLoadStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset); |
| 120 | void EmitWriteStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 118 | void EmitWriteStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, |
| @@ -431,60 +429,60 @@ void EmitSharedAtomicExchange32(EmitContext& ctx, std::string_view pointer_offse | |||
| 431 | std::string_view value); | 429 | std::string_view value); |
| 432 | void EmitSharedAtomicExchange64(EmitContext& ctx, std::string_view pointer_offset, | 430 | void EmitSharedAtomicExchange64(EmitContext& ctx, std::string_view pointer_offset, |
| 433 | std::string_view value); | 431 | std::string_view value); |
| 434 | void EmitStorageAtomicIAdd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 432 | void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 435 | std::string_view value); | 433 | const IR::Value& offset, std::string_view value); |
| 436 | void EmitStorageAtomicSMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 434 | void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 437 | std::string_view value); | 435 | const IR::Value& offset, std::string_view value); |
| 438 | void EmitStorageAtomicUMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 436 | void EmitStorageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 439 | std::string_view value); | 437 | const IR::Value& offset, std::string_view value); |
| 440 | void EmitStorageAtomicSMax32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 438 | void EmitStorageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 441 | std::string_view value); | 439 | const IR::Value& offset, std::string_view value); |
| 442 | void EmitStorageAtomicUMax32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 440 | void EmitStorageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 443 | std::string_view value); | 441 | const IR::Value& offset, std::string_view value); |
| 444 | void EmitStorageAtomicInc32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 442 | void EmitStorageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 445 | std::string_view value); | 443 | const IR::Value& offset, std::string_view value); |
| 446 | void EmitStorageAtomicDec32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 444 | void EmitStorageAtomicDec32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 447 | std::string_view value); | 445 | const IR::Value& offset, std::string_view value); |
| 448 | void EmitStorageAtomicAnd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 446 | void EmitStorageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 449 | std::string_view value); | 447 | const IR::Value& offset, std::string_view value); |
| 450 | void EmitStorageAtomicOr32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 448 | void EmitStorageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 451 | std::string_view value); | 449 | const IR::Value& offset, std::string_view value); |
| 452 | void EmitStorageAtomicXor32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 450 | void EmitStorageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 453 | std::string_view value); | 451 | const IR::Value& offset, std::string_view value); |
| 454 | void EmitStorageAtomicExchange32(EmitContext& ctx, const IR::Value& binding, | 452 | void EmitStorageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 455 | const IR::Value& offset, std::string_view value); | 453 | const IR::Value& offset, std::string_view value); |
| 456 | void EmitStorageAtomicIAdd64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 454 | void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 457 | std::string_view value); | 455 | const IR::Value& offset, std::string_view value); |
| 458 | void EmitStorageAtomicSMin64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 456 | void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 459 | std::string_view value); | 457 | const IR::Value& offset, std::string_view value); |
| 460 | void EmitStorageAtomicUMin64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 458 | void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 461 | std::string_view value); | 459 | const IR::Value& offset, std::string_view value); |
| 462 | void EmitStorageAtomicSMax64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 460 | void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 463 | std::string_view value); | 461 | const IR::Value& offset, std::string_view value); |
| 464 | void EmitStorageAtomicUMax64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 462 | void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 465 | std::string_view value); | 463 | const IR::Value& offset, std::string_view value); |
| 466 | void EmitStorageAtomicAnd64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 464 | void EmitStorageAtomicAnd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 467 | std::string_view value); | 465 | const IR::Value& offset, std::string_view value); |
| 468 | void EmitStorageAtomicOr64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 466 | void EmitStorageAtomicOr64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 469 | std::string_view value); | 467 | const IR::Value& offset, std::string_view value); |
| 470 | void EmitStorageAtomicXor64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 468 | void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 471 | std::string_view value); | 469 | const IR::Value& offset, std::string_view value); |
| 472 | void EmitStorageAtomicExchange64(EmitContext& ctx, const IR::Value& binding, | 470 | void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 473 | const IR::Value& offset, std::string_view value); | 471 | const IR::Value& offset, std::string_view value); |
| 474 | void EmitStorageAtomicAddF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 472 | void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 475 | std::string_view value); | 473 | const IR::Value& offset, std::string_view value); |
| 476 | void EmitStorageAtomicAddF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 474 | void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 477 | std::string_view value); | 475 | const IR::Value& offset, std::string_view value); |
| 478 | void EmitStorageAtomicAddF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 476 | void EmitStorageAtomicAddF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 479 | std::string_view value); | 477 | const IR::Value& offset, std::string_view value); |
| 480 | void EmitStorageAtomicMinF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 478 | void EmitStorageAtomicMinF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 481 | std::string_view value); | 479 | const IR::Value& offset, std::string_view value); |
| 482 | void EmitStorageAtomicMinF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 480 | void EmitStorageAtomicMinF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 483 | std::string_view value); | 481 | const IR::Value& offset, std::string_view value); |
| 484 | void EmitStorageAtomicMaxF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 482 | void EmitStorageAtomicMaxF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 485 | std::string_view value); | 483 | const IR::Value& offset, std::string_view value); |
| 486 | void EmitStorageAtomicMaxF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 484 | void EmitStorageAtomicMaxF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 487 | std::string_view value); | 485 | const IR::Value& offset, std::string_view value); |
| 488 | void EmitGlobalAtomicIAdd32(EmitContext& ctx); | 486 | void EmitGlobalAtomicIAdd32(EmitContext& ctx); |
| 489 | void EmitGlobalAtomicSMin32(EmitContext& ctx); | 487 | void EmitGlobalAtomicSMin32(EmitContext& ctx); |
| 490 | void EmitGlobalAtomicUMin32(EmitContext& ctx); | 488 | void EmitGlobalAtomicUMin32(EmitContext& ctx); |
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_memory.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_memory.cpp index b042ae853..d1e6f074d 100644 --- a/src/shader_recompiler/backend/glsl/emit_glsl_memory.cpp +++ b/src/shader_recompiler/backend/glsl/emit_glsl_memory.cpp | |||
| @@ -32,9 +32,9 @@ void EmitLoadStorageS16([[maybe_unused]] EmitContext& ctx, | |||
| 32 | throw NotImplementedException("GLSL Instrucion"); | 32 | throw NotImplementedException("GLSL Instrucion"); |
| 33 | } | 33 | } |
| 34 | 34 | ||
| 35 | void EmitLoadStorage32([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] const IR::Value& binding, | 35 | void EmitLoadStorage32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, |
| 36 | [[maybe_unused]] const IR::Value& offset) { | 36 | const IR::Value& offset) { |
| 37 | throw NotImplementedException("GLSL Instrucion"); | 37 | ctx.AddU32("{}=ssbo{}_u32[{}];", inst, binding.U32(), offset.U32()); |
| 38 | } | 38 | } |
| 39 | 39 | ||
| 40 | void EmitLoadStorage64([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] const IR::Value& binding, | 40 | void EmitLoadStorage64([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] const IR::Value& binding, |
| @@ -83,7 +83,7 @@ void EmitWriteStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Va | |||
| 83 | 83 | ||
| 84 | void EmitWriteStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | 84 | void EmitWriteStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, |
| 85 | std::string_view value) { | 85 | std::string_view value) { |
| 86 | ctx.Add("ssbo{}_u64[{}]={};", binding.U32(), offset.U32(), value); | 86 | ctx.Add("ssbo{}_u32x2[{}]={};", binding.U32(), offset.U32(), value); |
| 87 | } | 87 | } |
| 88 | 88 | ||
| 89 | void EmitWriteStorage128([[maybe_unused]] EmitContext& ctx, | 89 | void EmitWriteStorage128([[maybe_unused]] EmitContext& ctx, |
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_not_implemented.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_not_implemented.cpp index cb6562ebf..65eccaece 100644 --- a/src/shader_recompiler/backend/glsl/emit_glsl_not_implemented.cpp +++ b/src/shader_recompiler/backend/glsl/emit_glsl_not_implemented.cpp | |||
| @@ -20,7 +20,7 @@ static void NotImplemented() { | |||
| 20 | } | 20 | } |
| 21 | 21 | ||
| 22 | void EmitPhi(EmitContext& ctx, IR::Inst& inst) { | 22 | void EmitPhi(EmitContext& ctx, IR::Inst& inst) { |
| 23 | NotImplemented(); | 23 | // NotImplemented(); |
| 24 | } | 24 | } |
| 25 | 25 | ||
| 26 | void EmitVoid(EmitContext& ctx) { | 26 | void EmitVoid(EmitContext& ctx) { |
| @@ -439,257 +439,6 @@ void EmitSharedAtomicExchange64(EmitContext& ctx, std::string_view pointer_offse | |||
| 439 | NotImplemented(); | 439 | NotImplemented(); |
| 440 | } | 440 | } |
| 441 | 441 | ||
| 442 | void EmitStorageAtomicIAdd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 443 | std::string_view value) { | ||
| 444 | NotImplemented(); | ||
| 445 | } | ||
| 446 | |||
| 447 | void EmitStorageAtomicSMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 448 | std::string_view value) { | ||
| 449 | NotImplemented(); | ||
| 450 | } | ||
| 451 | |||
| 452 | void EmitStorageAtomicUMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 453 | std::string_view value) { | ||
| 454 | NotImplemented(); | ||
| 455 | } | ||
| 456 | |||
| 457 | void EmitStorageAtomicSMax32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 458 | std::string_view value) { | ||
| 459 | NotImplemented(); | ||
| 460 | } | ||
| 461 | |||
| 462 | void EmitStorageAtomicUMax32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 463 | std::string_view value) { | ||
| 464 | NotImplemented(); | ||
| 465 | } | ||
| 466 | |||
| 467 | void EmitStorageAtomicInc32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 468 | std::string_view value) { | ||
| 469 | NotImplemented(); | ||
| 470 | } | ||
| 471 | |||
| 472 | void EmitStorageAtomicDec32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 473 | std::string_view value) { | ||
| 474 | NotImplemented(); | ||
| 475 | } | ||
| 476 | |||
| 477 | void EmitStorageAtomicAnd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 478 | std::string_view value) { | ||
| 479 | NotImplemented(); | ||
| 480 | } | ||
| 481 | |||
| 482 | void EmitStorageAtomicOr32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 483 | std::string_view value) { | ||
| 484 | NotImplemented(); | ||
| 485 | } | ||
| 486 | |||
| 487 | void EmitStorageAtomicXor32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 488 | std::string_view value) { | ||
| 489 | NotImplemented(); | ||
| 490 | } | ||
| 491 | |||
| 492 | void EmitStorageAtomicExchange32(EmitContext& ctx, const IR::Value& binding, | ||
| 493 | const IR::Value& offset, std::string_view value) { | ||
| 494 | NotImplemented(); | ||
| 495 | } | ||
| 496 | |||
| 497 | void EmitStorageAtomicIAdd64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 498 | std::string_view value) { | ||
| 499 | NotImplemented(); | ||
| 500 | } | ||
| 501 | |||
| 502 | void EmitStorageAtomicSMin64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 503 | std::string_view value) { | ||
| 504 | NotImplemented(); | ||
| 505 | } | ||
| 506 | |||
| 507 | void EmitStorageAtomicUMin64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 508 | std::string_view value) { | ||
| 509 | NotImplemented(); | ||
| 510 | } | ||
| 511 | |||
| 512 | void EmitStorageAtomicSMax64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 513 | std::string_view value) { | ||
| 514 | NotImplemented(); | ||
| 515 | } | ||
| 516 | |||
| 517 | void EmitStorageAtomicUMax64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 518 | std::string_view value) { | ||
| 519 | NotImplemented(); | ||
| 520 | } | ||
| 521 | |||
| 522 | void EmitStorageAtomicAnd64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 523 | std::string_view value) { | ||
| 524 | NotImplemented(); | ||
| 525 | } | ||
| 526 | |||
| 527 | void EmitStorageAtomicOr64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 528 | std::string_view value) { | ||
| 529 | NotImplemented(); | ||
| 530 | } | ||
| 531 | |||
| 532 | void EmitStorageAtomicXor64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 533 | std::string_view value) { | ||
| 534 | NotImplemented(); | ||
| 535 | } | ||
| 536 | |||
| 537 | void EmitStorageAtomicExchange64(EmitContext& ctx, const IR::Value& binding, | ||
| 538 | const IR::Value& offset, std::string_view value) { | ||
| 539 | NotImplemented(); | ||
| 540 | } | ||
| 541 | |||
| 542 | void EmitStorageAtomicAddF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 543 | std::string_view value) { | ||
| 544 | NotImplemented(); | ||
| 545 | } | ||
| 546 | |||
| 547 | void EmitStorageAtomicAddF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 548 | std::string_view value) { | ||
| 549 | NotImplemented(); | ||
| 550 | } | ||
| 551 | |||
| 552 | void EmitStorageAtomicAddF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 553 | std::string_view value) { | ||
| 554 | NotImplemented(); | ||
| 555 | } | ||
| 556 | |||
| 557 | void EmitStorageAtomicMinF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 558 | std::string_view value) { | ||
| 559 | NotImplemented(); | ||
| 560 | } | ||
| 561 | |||
| 562 | void EmitStorageAtomicMinF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 563 | std::string_view value) { | ||
| 564 | NotImplemented(); | ||
| 565 | } | ||
| 566 | |||
| 567 | void EmitStorageAtomicMaxF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 568 | std::string_view value) { | ||
| 569 | NotImplemented(); | ||
| 570 | } | ||
| 571 | |||
| 572 | void EmitStorageAtomicMaxF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, | ||
| 573 | std::string_view value) { | ||
| 574 | NotImplemented(); | ||
| 575 | } | ||
| 576 | |||
| 577 | void EmitGlobalAtomicIAdd32(EmitContext& ctx) { | ||
| 578 | NotImplemented(); | ||
| 579 | } | ||
| 580 | |||
| 581 | void EmitGlobalAtomicSMin32(EmitContext& ctx) { | ||
| 582 | NotImplemented(); | ||
| 583 | } | ||
| 584 | |||
| 585 | void EmitGlobalAtomicUMin32(EmitContext& ctx) { | ||
| 586 | NotImplemented(); | ||
| 587 | } | ||
| 588 | |||
| 589 | void EmitGlobalAtomicSMax32(EmitContext& ctx) { | ||
| 590 | NotImplemented(); | ||
| 591 | } | ||
| 592 | |||
| 593 | void EmitGlobalAtomicUMax32(EmitContext& ctx) { | ||
| 594 | NotImplemented(); | ||
| 595 | } | ||
| 596 | |||
| 597 | void EmitGlobalAtomicInc32(EmitContext& ctx) { | ||
| 598 | NotImplemented(); | ||
| 599 | } | ||
| 600 | |||
| 601 | void EmitGlobalAtomicDec32(EmitContext& ctx) { | ||
| 602 | NotImplemented(); | ||
| 603 | } | ||
| 604 | |||
| 605 | void EmitGlobalAtomicAnd32(EmitContext& ctx) { | ||
| 606 | NotImplemented(); | ||
| 607 | } | ||
| 608 | |||
| 609 | void EmitGlobalAtomicOr32(EmitContext& ctx) { | ||
| 610 | NotImplemented(); | ||
| 611 | } | ||
| 612 | |||
| 613 | void EmitGlobalAtomicXor32(EmitContext& ctx) { | ||
| 614 | NotImplemented(); | ||
| 615 | } | ||
| 616 | |||
| 617 | void EmitGlobalAtomicExchange32(EmitContext& ctx) { | ||
| 618 | NotImplemented(); | ||
| 619 | } | ||
| 620 | |||
| 621 | void EmitGlobalAtomicIAdd64(EmitContext& ctx) { | ||
| 622 | NotImplemented(); | ||
| 623 | } | ||
| 624 | |||
| 625 | void EmitGlobalAtomicSMin64(EmitContext& ctx) { | ||
| 626 | NotImplemented(); | ||
| 627 | } | ||
| 628 | |||
| 629 | void EmitGlobalAtomicUMin64(EmitContext& ctx) { | ||
| 630 | NotImplemented(); | ||
| 631 | } | ||
| 632 | |||
| 633 | void EmitGlobalAtomicSMax64(EmitContext& ctx) { | ||
| 634 | NotImplemented(); | ||
| 635 | } | ||
| 636 | |||
| 637 | void EmitGlobalAtomicUMax64(EmitContext& ctx) { | ||
| 638 | NotImplemented(); | ||
| 639 | } | ||
| 640 | |||
| 641 | void EmitGlobalAtomicInc64(EmitContext& ctx) { | ||
| 642 | NotImplemented(); | ||
| 643 | } | ||
| 644 | |||
| 645 | void EmitGlobalAtomicDec64(EmitContext& ctx) { | ||
| 646 | NotImplemented(); | ||
| 647 | } | ||
| 648 | |||
| 649 | void EmitGlobalAtomicAnd64(EmitContext& ctx) { | ||
| 650 | NotImplemented(); | ||
| 651 | } | ||
| 652 | |||
| 653 | void EmitGlobalAtomicOr64(EmitContext& ctx) { | ||
| 654 | NotImplemented(); | ||
| 655 | } | ||
| 656 | |||
| 657 | void EmitGlobalAtomicXor64(EmitContext& ctx) { | ||
| 658 | NotImplemented(); | ||
| 659 | } | ||
| 660 | |||
| 661 | void EmitGlobalAtomicExchange64(EmitContext& ctx) { | ||
| 662 | NotImplemented(); | ||
| 663 | } | ||
| 664 | |||
| 665 | void EmitGlobalAtomicAddF32(EmitContext& ctx) { | ||
| 666 | NotImplemented(); | ||
| 667 | } | ||
| 668 | |||
| 669 | void EmitGlobalAtomicAddF16x2(EmitContext& ctx) { | ||
| 670 | NotImplemented(); | ||
| 671 | } | ||
| 672 | |||
| 673 | void EmitGlobalAtomicAddF32x2(EmitContext& ctx) { | ||
| 674 | NotImplemented(); | ||
| 675 | } | ||
| 676 | |||
| 677 | void EmitGlobalAtomicMinF16x2(EmitContext& ctx) { | ||
| 678 | NotImplemented(); | ||
| 679 | } | ||
| 680 | |||
| 681 | void EmitGlobalAtomicMinF32x2(EmitContext& ctx) { | ||
| 682 | NotImplemented(); | ||
| 683 | } | ||
| 684 | |||
| 685 | void EmitGlobalAtomicMaxF16x2(EmitContext& ctx) { | ||
| 686 | NotImplemented(); | ||
| 687 | } | ||
| 688 | |||
| 689 | void EmitGlobalAtomicMaxF32x2(EmitContext& ctx) { | ||
| 690 | NotImplemented(); | ||
| 691 | } | ||
| 692 | |||
| 693 | void EmitBindlessImageSampleImplicitLod(EmitContext&) { | 442 | void EmitBindlessImageSampleImplicitLod(EmitContext&) { |
| 694 | NotImplemented(); | 443 | NotImplemented(); |
| 695 | } | 444 | } |
diff --git a/src/shader_recompiler/backend/glsl/reg_alloc.cpp b/src/shader_recompiler/backend/glsl/reg_alloc.cpp index 9f529c358..8db1391fd 100644 --- a/src/shader_recompiler/backend/glsl/reg_alloc.cpp +++ b/src/shader_recompiler/backend/glsl/reg_alloc.cpp | |||
| @@ -61,6 +61,12 @@ std::string MakeImm(const IR::Value& value) { | |||
| 61 | } | 61 | } |
| 62 | } // Anonymous namespace | 62 | } // Anonymous namespace |
| 63 | 63 | ||
| 64 | std::string RegAlloc::Define(IR::Inst& inst) { | ||
| 65 | const Id id{Alloc()}; | ||
| 66 | inst.SetDefinition<Id>(id); | ||
| 67 | return Representation(id); | ||
| 68 | } | ||
| 69 | |||
| 64 | std::string RegAlloc::Define(IR::Inst& inst, Type type) { | 70 | std::string RegAlloc::Define(IR::Inst& inst, Type type) { |
| 65 | const Id id{Alloc()}; | 71 | const Id id{Alloc()}; |
| 66 | const auto type_str{GetType(type, id.index)}; | 72 | const auto type_str{GetType(type, id.index)}; |
diff --git a/src/shader_recompiler/backend/glsl/reg_alloc.h b/src/shader_recompiler/backend/glsl/reg_alloc.h index 63c940d3a..7891c30e0 100644 --- a/src/shader_recompiler/backend/glsl/reg_alloc.h +++ b/src/shader_recompiler/backend/glsl/reg_alloc.h | |||
| @@ -48,7 +48,8 @@ static_assert(sizeof(Id) == sizeof(u32)); | |||
| 48 | 48 | ||
| 49 | class RegAlloc { | 49 | class RegAlloc { |
| 50 | public: | 50 | public: |
| 51 | std::string Define(IR::Inst& inst, Type type = Type::Void); | 51 | std::string Define(IR::Inst& inst); |
| 52 | std::string Define(IR::Inst& inst, Type type); | ||
| 52 | 53 | ||
| 53 | std::string Consume(const IR::Value& value); | 54 | std::string Consume(const IR::Value& value); |
| 54 | 55 | ||