summaryrefslogtreecommitdiff
path: root/src/shader_recompiler/backend/spirv/emit_spirv_atomic.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/shader_recompiler/backend/spirv/emit_spirv_atomic.cpp')
-rw-r--r--src/shader_recompiler/backend/spirv/emit_spirv_atomic.cpp528
1 files changed, 528 insertions, 0 deletions
diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_atomic.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_atomic.cpp
new file mode 100644
index 000000000..03d891419
--- /dev/null
+++ b/src/shader_recompiler/backend/spirv/emit_spirv_atomic.cpp
@@ -0,0 +1,528 @@
1// Copyright 2021 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include "shader_recompiler/backend/spirv/emit_spirv.h"
6
7namespace Shader::Backend::SPIRV {
8namespace {
9
10Id GetSharedPointer(EmitContext& ctx, Id offset, u32 index_offset = 0) {
11 const Id shift_id{ctx.Constant(ctx.U32[1], 2U)};
12 const Id shifted_value{ctx.OpShiftRightArithmetic(ctx.U32[1], offset, shift_id)};
13 const Id index{ctx.OpIAdd(ctx.U32[1], shifted_value, ctx.Constant(ctx.U32[1], index_offset))};
14 return ctx.profile.support_explicit_workgroup_layout
15 ? ctx.OpAccessChain(ctx.shared_u32, ctx.shared_memory_u32, ctx.u32_zero_value, index)
16 : ctx.OpAccessChain(ctx.shared_u32, ctx.shared_memory_u32, index);
17}
18
19Id StorageIndex(EmitContext& ctx, const IR::Value& offset, size_t element_size) {
20 if (offset.IsImmediate()) {
21 const u32 imm_offset{static_cast<u32>(offset.U32() / element_size)};
22 return ctx.Constant(ctx.U32[1], imm_offset);
23 }
24 const u32 shift{static_cast<u32>(std::countr_zero(element_size))};
25 const Id index{ctx.Def(offset)};
26 if (shift == 0) {
27 return index;
28 }
29 const Id shift_id{ctx.Constant(ctx.U32[1], shift)};
30 return ctx.OpShiftRightLogical(ctx.U32[1], index, shift_id);
31}
32
33Id GetStoragePointer(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
34 u32 index_offset = 0) {
35 // TODO: Support reinterpreting bindings, guaranteed to be aligned
36 if (!binding.IsImmediate()) {
37 throw NotImplementedException("Dynamic storage buffer indexing");
38 }
39 const Id ssbo{ctx.ssbos[binding.U32()]};
40 const Id base_index{StorageIndex(ctx, offset, sizeof(u32))};
41 const Id index{ctx.OpIAdd(ctx.U32[1], base_index, ctx.Constant(ctx.U32[1], index_offset))};
42 return ctx.OpAccessChain(ctx.storage_u32, ssbo, ctx.u32_zero_value, index);
43}
44
45std::pair<Id, Id> GetAtomicArgs(EmitContext& ctx) {
46 const Id scope{ctx.Constant(ctx.U32[1], static_cast<u32>(spv::Scope::Device))};
47 const Id semantics{ctx.u32_zero_value};
48 return {scope, semantics};
49}
50
51Id LoadU64(EmitContext& ctx, Id pointer_1, Id pointer_2) {
52 const Id value_1{ctx.OpLoad(ctx.U32[1], pointer_1)};
53 const Id value_2{ctx.OpLoad(ctx.U32[1], pointer_2)};
54 const Id original_composite{ctx.OpCompositeConstruct(ctx.U32[2], value_1, value_2)};
55 return ctx.OpBitcast(ctx.U64, original_composite);
56}
57
58void StoreResult(EmitContext& ctx, Id pointer_1, Id pointer_2, Id result) {
59 const Id composite{ctx.OpBitcast(ctx.U32[2], result)};
60 ctx.OpStore(pointer_1, ctx.OpCompositeExtract(ctx.U32[1], composite, 0));
61 ctx.OpStore(pointer_2, ctx.OpCompositeExtract(ctx.U32[1], composite, 1));
62}
63} // Anonymous namespace
64
65Id EmitSharedAtomicIAdd32(EmitContext& ctx, Id pointer_offset, Id value) {
66 const Id pointer{GetSharedPointer(ctx, pointer_offset)};
67 const auto [scope, semantics]{GetAtomicArgs(ctx)};
68 return ctx.OpAtomicIAdd(ctx.U32[1], pointer, scope, semantics, value);
69}
70
71Id EmitSharedAtomicSMin32(EmitContext& ctx, Id pointer_offset, Id value) {
72 const Id pointer{GetSharedPointer(ctx, pointer_offset)};
73 const auto [scope, semantics]{GetAtomicArgs(ctx)};
74 return ctx.OpAtomicSMin(ctx.U32[1], pointer, scope, semantics, value);
75}
76
77Id EmitSharedAtomicUMin32(EmitContext& ctx, Id pointer_offset, Id value) {
78 const Id pointer{GetSharedPointer(ctx, pointer_offset)};
79 const auto [scope, semantics]{GetAtomicArgs(ctx)};
80 return ctx.OpAtomicUMin(ctx.U32[1], pointer, scope, semantics, value);
81}
82
83Id EmitSharedAtomicSMax32(EmitContext& ctx, Id pointer_offset, Id value) {
84 const Id pointer{GetSharedPointer(ctx, pointer_offset)};
85 const auto [scope, semantics]{GetAtomicArgs(ctx)};
86 return ctx.OpAtomicSMax(ctx.U32[1], pointer, scope, semantics, value);
87}
88
89Id EmitSharedAtomicUMax32(EmitContext& ctx, Id pointer_offset, Id value) {
90 const Id pointer{GetSharedPointer(ctx, pointer_offset)};
91 const auto [scope, semantics]{GetAtomicArgs(ctx)};
92 return ctx.OpAtomicUMax(ctx.U32[1], pointer, scope, semantics, value);
93}
94
95Id EmitSharedAtomicInc32(EmitContext& ctx, Id pointer_offset, Id value) {
96 const Id shift_id{ctx.Constant(ctx.U32[1], 2U)};
97 const Id index{ctx.OpShiftRightArithmetic(ctx.U32[1], pointer_offset, shift_id)};
98 return ctx.OpFunctionCall(ctx.U32[1], ctx.increment_cas_shared, index, value,
99 ctx.shared_memory_u32);
100}
101
102Id EmitSharedAtomicDec32(EmitContext& ctx, Id pointer_offset, Id value) {
103 const Id shift_id{ctx.Constant(ctx.U32[1], 2U)};
104 const Id index{ctx.OpShiftRightArithmetic(ctx.U32[1], pointer_offset, shift_id)};
105 return ctx.OpFunctionCall(ctx.U32[1], ctx.decrement_cas_shared, index, value,
106 ctx.shared_memory_u32);
107}
108
109Id EmitSharedAtomicAnd32(EmitContext& ctx, Id pointer_offset, Id value) {
110 const Id pointer{GetSharedPointer(ctx, pointer_offset)};
111 const auto [scope, semantics]{GetAtomicArgs(ctx)};
112 return ctx.OpAtomicAnd(ctx.U32[1], pointer, scope, semantics, value);
113}
114
115Id EmitSharedAtomicOr32(EmitContext& ctx, Id pointer_offset, Id value) {
116 const Id pointer{GetSharedPointer(ctx, pointer_offset)};
117 const auto [scope, semantics]{GetAtomicArgs(ctx)};
118 return ctx.OpAtomicOr(ctx.U32[1], pointer, scope, semantics, value);
119}
120
121Id EmitSharedAtomicXor32(EmitContext& ctx, Id pointer_offset, Id value) {
122 const Id pointer{GetSharedPointer(ctx, pointer_offset)};
123 const auto [scope, semantics]{GetAtomicArgs(ctx)};
124 return ctx.OpAtomicXor(ctx.U32[1], pointer, scope, semantics, value);
125}
126
127Id EmitSharedAtomicExchange32(EmitContext& ctx, Id pointer_offset, Id value) {
128 const Id pointer{GetSharedPointer(ctx, pointer_offset)};
129 const auto [scope, semantics]{GetAtomicArgs(ctx)};
130 return ctx.OpAtomicExchange(ctx.U32[1], pointer, scope, semantics, value);
131}
132
133Id EmitSharedAtomicExchange64(EmitContext& ctx, Id pointer_offset, Id value) {
134 const Id pointer_1{GetSharedPointer(ctx, pointer_offset)};
135 if (ctx.profile.support_int64_atomics) {
136 const auto [scope, semantics]{GetAtomicArgs(ctx)};
137 return ctx.OpAtomicExchange(ctx.U64, pointer_1, scope, semantics, value);
138 }
139 // LOG_WARNING(Render_Vulkan, "Int64 Atomics not supported, fallback to non-atomic");
140 const Id pointer_2{GetSharedPointer(ctx, pointer_offset, 1)};
141 const Id original_value{LoadU64(ctx, pointer_1, pointer_2)};
142 StoreResult(ctx, pointer_1, pointer_2, value);
143 return original_value;
144}
145
146Id EmitStorageAtomicIAdd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
147 Id value) {
148 const Id pointer{GetStoragePointer(ctx, binding, offset)};
149 const auto [scope, semantics]{GetAtomicArgs(ctx)};
150 return ctx.OpAtomicIAdd(ctx.U32[1], pointer, scope, semantics, value);
151}
152
153Id EmitStorageAtomicSMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
154 Id value) {
155 const Id pointer{GetStoragePointer(ctx, binding, offset)};
156 const auto [scope, semantics]{GetAtomicArgs(ctx)};
157 return ctx.OpAtomicSMin(ctx.U32[1], pointer, scope, semantics, value);
158}
159
160Id EmitStorageAtomicUMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
161 Id value) {
162 const Id pointer{GetStoragePointer(ctx, binding, offset)};
163 const auto [scope, semantics]{GetAtomicArgs(ctx)};
164 return ctx.OpAtomicUMin(ctx.U32[1], pointer, scope, semantics, value);
165}
166
167Id EmitStorageAtomicSMax32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
168 Id value) {
169 const Id pointer{GetStoragePointer(ctx, binding, offset)};
170 const auto [scope, semantics]{GetAtomicArgs(ctx)};
171 return ctx.OpAtomicSMax(ctx.U32[1], pointer, scope, semantics, value);
172}
173
174Id EmitStorageAtomicUMax32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
175 Id value) {
176 const Id pointer{GetStoragePointer(ctx, binding, offset)};
177 const auto [scope, semantics]{GetAtomicArgs(ctx)};
178 return ctx.OpAtomicUMax(ctx.U32[1], pointer, scope, semantics, value);
179}
180
181Id EmitStorageAtomicInc32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
182 Id value) {
183 const Id ssbo{ctx.ssbos[binding.U32()]};
184 const Id base_index{StorageIndex(ctx, offset, sizeof(u32))};
185 return ctx.OpFunctionCall(ctx.U32[1], ctx.increment_cas_ssbo, base_index, value, ssbo);
186}
187
188Id EmitStorageAtomicDec32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
189 Id value) {
190 const Id ssbo{ctx.ssbos[binding.U32()]};
191 const Id base_index{StorageIndex(ctx, offset, sizeof(u32))};
192 return ctx.OpFunctionCall(ctx.U32[1], ctx.decrement_cas_ssbo, base_index, value, ssbo);
193}
194
195Id EmitStorageAtomicAnd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
196 Id value) {
197 const Id pointer{GetStoragePointer(ctx, binding, offset)};
198 const auto [scope, semantics]{GetAtomicArgs(ctx)};
199 return ctx.OpAtomicAnd(ctx.U32[1], pointer, scope, semantics, value);
200}
201
202Id EmitStorageAtomicOr32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
203 Id value) {
204 const Id pointer{GetStoragePointer(ctx, binding, offset)};
205 const auto [scope, semantics]{GetAtomicArgs(ctx)};
206 return ctx.OpAtomicOr(ctx.U32[1], pointer, scope, semantics, value);
207}
208
209Id EmitStorageAtomicXor32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
210 Id value) {
211 const Id pointer{GetStoragePointer(ctx, binding, offset)};
212 const auto [scope, semantics]{GetAtomicArgs(ctx)};
213 return ctx.OpAtomicXor(ctx.U32[1], pointer, scope, semantics, value);
214}
215
216Id EmitStorageAtomicExchange32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
217 Id value) {
218 const Id pointer{GetStoragePointer(ctx, binding, offset)};
219 const auto [scope, semantics]{GetAtomicArgs(ctx)};
220 return ctx.OpAtomicExchange(ctx.U32[1], pointer, scope, semantics, value);
221}
222
223Id EmitStorageAtomicIAdd64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
224 Id value) {
225 const Id pointer_1{GetStoragePointer(ctx, binding, offset)};
226 if (ctx.profile.support_int64_atomics) {
227 const auto [scope, semantics]{GetAtomicArgs(ctx)};
228 return ctx.OpAtomicIAdd(ctx.U64, pointer_1, scope, semantics, value);
229 }
230 // LOG_WARNING(Render_Vulkan, "Int64 Atomics not supported, fallback to non-atomic");
231 const Id pointer_2{GetStoragePointer(ctx, binding, offset, 1)};
232 const Id original_value{LoadU64(ctx, pointer_1, pointer_2)};
233 const Id result{ctx.OpIAdd(ctx.U64, value, original_value)};
234 StoreResult(ctx, pointer_1, pointer_2, result);
235 return original_value;
236}
237
238Id EmitStorageAtomicSMin64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
239 Id value) {
240 const Id pointer_1{GetStoragePointer(ctx, binding, offset)};
241 if (ctx.profile.support_int64_atomics) {
242 const auto [scope, semantics]{GetAtomicArgs(ctx)};
243 return ctx.OpAtomicSMin(ctx.U64, pointer_1, scope, semantics, value);
244 }
245 // LOG_WARNING(Render_Vulkan, "Int64 Atomics not supported, fallback to non-atomic");
246 const Id pointer_2{GetStoragePointer(ctx, binding, offset, 1)};
247 const Id original_value{LoadU64(ctx, pointer_1, pointer_2)};
248 const Id result{ctx.OpSMin(ctx.U64, value, original_value)};
249 StoreResult(ctx, pointer_1, pointer_2, result);
250 return original_value;
251}
252
253Id EmitStorageAtomicUMin64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
254 Id value) {
255 const Id pointer_1{GetStoragePointer(ctx, binding, offset)};
256 if (ctx.profile.support_int64_atomics) {
257 const auto [scope, semantics]{GetAtomicArgs(ctx)};
258 return ctx.OpAtomicUMin(ctx.U64, pointer_1, scope, semantics, value);
259 }
260 // LOG_WARNING(Render_Vulkan, "Int64 Atomics not supported, fallback to non-atomic");
261 const Id pointer_2{GetStoragePointer(ctx, binding, offset, 1)};
262 const Id original_value{LoadU64(ctx, pointer_1, pointer_2)};
263 const Id result{ctx.OpUMin(ctx.U64, value, original_value)};
264 StoreResult(ctx, pointer_1, pointer_2, result);
265 return original_value;
266}
267
268Id EmitStorageAtomicSMax64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
269 Id value) {
270 const Id pointer_1{GetStoragePointer(ctx, binding, offset)};
271 if (ctx.profile.support_int64_atomics) {
272 const auto [scope, semantics]{GetAtomicArgs(ctx)};
273 return ctx.OpAtomicSMax(ctx.U64, pointer_1, scope, semantics, value);
274 }
275 // LOG_WARNING(Render_Vulkan, "Int64 Atomics not supported, fallback to non-atomic");
276 const Id pointer_2{GetStoragePointer(ctx, binding, offset, 1)};
277 const Id original_value{LoadU64(ctx, pointer_1, pointer_2)};
278 const Id result{ctx.OpSMax(ctx.U64, value, original_value)};
279 StoreResult(ctx, pointer_1, pointer_2, result);
280 return original_value;
281}
282
283Id EmitStorageAtomicUMax64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
284 Id value) {
285 const Id pointer_1{GetStoragePointer(ctx, binding, offset)};
286 if (ctx.profile.support_int64_atomics) {
287 const auto [scope, semantics]{GetAtomicArgs(ctx)};
288 return ctx.OpAtomicUMax(ctx.U64, pointer_1, scope, semantics, value);
289 }
290 // LOG_WARNING(Render_Vulkan, "Int64 Atomics not supported, fallback to non-atomic");
291 const Id pointer_2{GetStoragePointer(ctx, binding, offset, 1)};
292 const Id original_value{LoadU64(ctx, pointer_1, pointer_2)};
293 const Id result{ctx.OpUMax(ctx.U64, value, original_value)};
294 StoreResult(ctx, pointer_1, pointer_2, result);
295 return original_value;
296}
297
298Id EmitStorageAtomicAnd64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
299 Id value) {
300 const Id pointer_1{GetStoragePointer(ctx, binding, offset)};
301 if (ctx.profile.support_int64_atomics) {
302 const auto [scope, semantics]{GetAtomicArgs(ctx)};
303 return ctx.OpAtomicAnd(ctx.U64, pointer_1, scope, semantics, value);
304 }
305 // LOG_WARNING(Render_Vulkan, "Int64 Atomics not supported, fallback to non-atomic");
306 const Id pointer_2{GetStoragePointer(ctx, binding, offset, 1)};
307 const Id original_value{LoadU64(ctx, pointer_1, pointer_2)};
308 const Id result{ctx.OpBitwiseAnd(ctx.U64, value, original_value)};
309 StoreResult(ctx, pointer_1, pointer_2, result);
310 return original_value;
311}
312
313Id EmitStorageAtomicOr64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
314 Id value) {
315 const Id pointer_1{GetStoragePointer(ctx, binding, offset)};
316 if (ctx.profile.support_int64_atomics) {
317 const auto [scope, semantics]{GetAtomicArgs(ctx)};
318 return ctx.OpAtomicOr(ctx.U64, pointer_1, scope, semantics, value);
319 }
320 // LOG_WARNING(Render_Vulkan, "Int64 Atomics not supported, fallback to non-atomic");
321 const Id pointer_2{GetStoragePointer(ctx, binding, offset, 1)};
322 const Id original_value{LoadU64(ctx, pointer_1, pointer_2)};
323 const Id result{ctx.OpBitwiseOr(ctx.U64, value, original_value)};
324 StoreResult(ctx, pointer_1, pointer_2, result);
325 return original_value;
326}
327
328Id EmitStorageAtomicXor64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
329 Id value) {
330 const Id pointer_1{GetStoragePointer(ctx, binding, offset)};
331 if (ctx.profile.support_int64_atomics) {
332 const auto [scope, semantics]{GetAtomicArgs(ctx)};
333 return ctx.OpAtomicXor(ctx.U64, pointer_1, scope, semantics, value);
334 }
335 // LOG_WARNING(Render_Vulkan, "Int64 Atomics not supported, fallback to non-atomic");
336 const Id pointer_2{GetStoragePointer(ctx, binding, offset, 1)};
337 const Id original_value{LoadU64(ctx, pointer_1, pointer_2)};
338 const Id result{ctx.OpBitwiseXor(ctx.U64, value, original_value)};
339 StoreResult(ctx, pointer_1, pointer_2, result);
340 return original_value;
341}
342
343Id EmitStorageAtomicExchange64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
344 Id value) {
345 const Id pointer_1{GetStoragePointer(ctx, binding, offset)};
346 if (ctx.profile.support_int64_atomics) {
347 const auto [scope, semantics]{GetAtomicArgs(ctx)};
348 return ctx.OpAtomicExchange(ctx.U64, pointer_1, scope, semantics, value);
349 }
350 // LOG_WARNING(Render_Vulkan, "Int64 Atomics not supported, fallback to non-atomic");
351 const Id pointer_2{GetStoragePointer(ctx, binding, offset, 1)};
352 const Id original_value{LoadU64(ctx, pointer_1, pointer_2)};
353 StoreResult(ctx, pointer_1, pointer_2, value);
354 return original_value;
355}
356
357Id EmitStorageAtomicAddF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
358 Id value) {
359 const Id ssbo{ctx.ssbos[binding.U32()]};
360 const Id base_index{StorageIndex(ctx, offset, sizeof(u32))};
361 return ctx.OpFunctionCall(ctx.F32[1], ctx.f32_add_cas, base_index, value, ssbo);
362}
363
364Id EmitStorageAtomicAddF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
365 Id value) {
366 const Id ssbo{ctx.ssbos[binding.U32()]};
367 const Id base_index{StorageIndex(ctx, offset, sizeof(u32))};
368 const Id result{ctx.OpFunctionCall(ctx.F16[2], ctx.f16x2_add_cas, base_index, value, ssbo)};
369 return ctx.OpBitcast(ctx.U32[1], result);
370}
371
372Id EmitStorageAtomicAddF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
373 Id value) {
374 const Id ssbo{ctx.ssbos[binding.U32()]};
375 const Id base_index{StorageIndex(ctx, offset, sizeof(u32))};
376 const Id result{ctx.OpFunctionCall(ctx.F32[2], ctx.f32x2_add_cas, base_index, value, ssbo)};
377 return ctx.OpPackHalf2x16(ctx.U32[1], result);
378}
379
380Id EmitStorageAtomicMinF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
381 Id value) {
382 const Id ssbo{ctx.ssbos[binding.U32()]};
383 const Id base_index{StorageIndex(ctx, offset, sizeof(u32))};
384 const Id result{ctx.OpFunctionCall(ctx.F16[2], ctx.f16x2_min_cas, base_index, value, ssbo)};
385 return ctx.OpBitcast(ctx.U32[1], result);
386}
387
388Id EmitStorageAtomicMinF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
389 Id value) {
390 const Id ssbo{ctx.ssbos[binding.U32()]};
391 const Id base_index{StorageIndex(ctx, offset, sizeof(u32))};
392 const Id result{ctx.OpFunctionCall(ctx.F32[2], ctx.f32x2_min_cas, base_index, value, ssbo)};
393 return ctx.OpPackHalf2x16(ctx.U32[1], result);
394}
395
396Id EmitStorageAtomicMaxF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
397 Id value) {
398 const Id ssbo{ctx.ssbos[binding.U32()]};
399 const Id base_index{StorageIndex(ctx, offset, sizeof(u32))};
400 const Id result{ctx.OpFunctionCall(ctx.F16[2], ctx.f16x2_max_cas, base_index, value, ssbo)};
401 return ctx.OpBitcast(ctx.U32[1], result);
402}
403
404Id EmitStorageAtomicMaxF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
405 Id value) {
406 const Id ssbo{ctx.ssbos[binding.U32()]};
407 const Id base_index{StorageIndex(ctx, offset, sizeof(u32))};
408 const Id result{ctx.OpFunctionCall(ctx.F32[2], ctx.f32x2_max_cas, base_index, value, ssbo)};
409 return ctx.OpPackHalf2x16(ctx.U32[1], result);
410}
411
412Id EmitGlobalAtomicIAdd32(EmitContext&) {
413 throw NotImplementedException("SPIR-V Instruction");
414}
415
416Id EmitGlobalAtomicSMin32(EmitContext&) {
417 throw NotImplementedException("SPIR-V Instruction");
418}
419
420Id EmitGlobalAtomicUMin32(EmitContext&) {
421 throw NotImplementedException("SPIR-V Instruction");
422}
423
424Id EmitGlobalAtomicSMax32(EmitContext&) {
425 throw NotImplementedException("SPIR-V Instruction");
426}
427
428Id EmitGlobalAtomicUMax32(EmitContext&) {
429 throw NotImplementedException("SPIR-V Instruction");
430}
431
432Id EmitGlobalAtomicInc32(EmitContext&) {
433 throw NotImplementedException("SPIR-V Instruction");
434}
435
436Id EmitGlobalAtomicDec32(EmitContext&) {
437 throw NotImplementedException("SPIR-V Instruction");
438}
439
440Id EmitGlobalAtomicAnd32(EmitContext&) {
441 throw NotImplementedException("SPIR-V Instruction");
442}
443
444Id EmitGlobalAtomicOr32(EmitContext&) {
445 throw NotImplementedException("SPIR-V Instruction");
446}
447
448Id EmitGlobalAtomicXor32(EmitContext&) {
449 throw NotImplementedException("SPIR-V Instruction");
450}
451
452Id EmitGlobalAtomicExchange32(EmitContext&) {
453 throw NotImplementedException("SPIR-V Instruction");
454}
455
456Id EmitGlobalAtomicIAdd64(EmitContext&) {
457 throw NotImplementedException("SPIR-V Instruction");
458}
459
460Id EmitGlobalAtomicSMin64(EmitContext&) {
461 throw NotImplementedException("SPIR-V Instruction");
462}
463
464Id EmitGlobalAtomicUMin64(EmitContext&) {
465 throw NotImplementedException("SPIR-V Instruction");
466}
467
468Id EmitGlobalAtomicSMax64(EmitContext&) {
469 throw NotImplementedException("SPIR-V Instruction");
470}
471
472Id EmitGlobalAtomicUMax64(EmitContext&) {
473 throw NotImplementedException("SPIR-V Instruction");
474}
475
476Id EmitGlobalAtomicInc64(EmitContext&) {
477 throw NotImplementedException("SPIR-V Instruction");
478}
479
480Id EmitGlobalAtomicDec64(EmitContext&) {
481 throw NotImplementedException("SPIR-V Instruction");
482}
483
484Id EmitGlobalAtomicAnd64(EmitContext&) {
485 throw NotImplementedException("SPIR-V Instruction");
486}
487
488Id EmitGlobalAtomicOr64(EmitContext&) {
489 throw NotImplementedException("SPIR-V Instruction");
490}
491
492Id EmitGlobalAtomicXor64(EmitContext&) {
493 throw NotImplementedException("SPIR-V Instruction");
494}
495
496Id EmitGlobalAtomicExchange64(EmitContext&) {
497 throw NotImplementedException("SPIR-V Instruction");
498}
499
500Id EmitGlobalAtomicAddF32(EmitContext&) {
501 throw NotImplementedException("SPIR-V Instruction");
502}
503
504Id EmitGlobalAtomicAddF16x2(EmitContext&) {
505 throw NotImplementedException("SPIR-V Instruction");
506}
507
508Id EmitGlobalAtomicAddF32x2(EmitContext&) {
509 throw NotImplementedException("SPIR-V Instruction");
510}
511
512Id EmitGlobalAtomicMinF16x2(EmitContext&) {
513 throw NotImplementedException("SPIR-V Instruction");
514}
515
516Id EmitGlobalAtomicMinF32x2(EmitContext&) {
517 throw NotImplementedException("SPIR-V Instruction");
518}
519
520Id EmitGlobalAtomicMaxF16x2(EmitContext&) {
521 throw NotImplementedException("SPIR-V Instruction");
522}
523
524Id EmitGlobalAtomicMaxF32x2(EmitContext&) {
525 throw NotImplementedException("SPIR-V Instruction");
526}
527
528} // namespace Shader::Backend::SPIRV