summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--src/shader_recompiler/backend/glasm/emit_glasm_instructions.h31
-rw-r--r--src/shader_recompiler/backend/glasm/emit_glasm_memory.cpp107
-rw-r--r--src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp96
-rw-r--r--src/shader_recompiler/backend/glsl/emit_glsl_instructions.h31
-rw-r--r--src/shader_recompiler/backend/spirv/emit_spirv_atomic.cpp119
-rw-r--r--src/shader_recompiler/backend/spirv/emit_spirv_instructions.h30
-rw-r--r--src/shader_recompiler/frontend/ir/microinstruction.cpp19
-rw-r--r--src/shader_recompiler/frontend/ir/opcodes.inc19
-rw-r--r--src/shader_recompiler/ir_opt/collect_shader_info_pass.cpp18
9 files changed, 469 insertions, 1 deletions
diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_instructions.h b/src/shader_recompiler/backend/glasm/emit_glasm_instructions.h
index b48007856..5efbe4e6f 100644
--- a/src/shader_recompiler/backend/glasm/emit_glasm_instructions.h
+++ b/src/shader_recompiler/backend/glasm/emit_glasm_instructions.h
@@ -372,6 +372,8 @@ void EmitSharedAtomicExchange32(EmitContext& ctx, IR::Inst& inst, ScalarU32 poin
372 ScalarU32 value); 372 ScalarU32 value);
373void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset, 373void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset,
374 Register value); 374 Register value);
375void EmitSharedAtomicExchange32x2(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset,
376 Register value);
375void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 377void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
376 ScalarU32 offset, ScalarU32 value); 378 ScalarU32 offset, ScalarU32 value);
377void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 379void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
@@ -412,6 +414,24 @@ void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& b
412 ScalarU32 offset, Register value); 414 ScalarU32 offset, Register value);
413void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 415void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
414 ScalarU32 offset, Register value); 416 ScalarU32 offset, Register value);
417void EmitStorageAtomicIAdd32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
418 ScalarU32 offset, Register value);
419void EmitStorageAtomicSMin32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
420 ScalarU32 offset, Register value);
421void EmitStorageAtomicUMin32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
422 ScalarU32 offset, Register value);
423void EmitStorageAtomicSMax32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
424 ScalarU32 offset, Register value);
425void EmitStorageAtomicUMax32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
426 ScalarU32 offset, Register value);
427void EmitStorageAtomicAnd32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
428 ScalarU32 offset, Register value);
429void EmitStorageAtomicOr32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
430 ScalarU32 offset, Register value);
431void EmitStorageAtomicXor32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
432 ScalarU32 offset, Register value);
433void EmitStorageAtomicExchange32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
434 ScalarU32 offset, Register value);
415void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 435void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
416 ScalarU32 offset, ScalarF32 value); 436 ScalarU32 offset, ScalarF32 value);
417void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 437void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
@@ -448,6 +468,17 @@ void EmitGlobalAtomicAnd64(EmitContext& ctx);
448void EmitGlobalAtomicOr64(EmitContext& ctx); 468void EmitGlobalAtomicOr64(EmitContext& ctx);
449void EmitGlobalAtomicXor64(EmitContext& ctx); 469void EmitGlobalAtomicXor64(EmitContext& ctx);
450void EmitGlobalAtomicExchange64(EmitContext& ctx); 470void EmitGlobalAtomicExchange64(EmitContext& ctx);
471void EmitGlobalAtomicIAdd32x2(EmitContext& ctx);
472void EmitGlobalAtomicSMin32x2(EmitContext& ctx);
473void EmitGlobalAtomicUMin32x2(EmitContext& ctx);
474void EmitGlobalAtomicSMax32x2(EmitContext& ctx);
475void EmitGlobalAtomicUMax32x2(EmitContext& ctx);
476void EmitGlobalAtomicInc32x2(EmitContext& ctx);
477void EmitGlobalAtomicDec32x2(EmitContext& ctx);
478void EmitGlobalAtomicAnd32x2(EmitContext& ctx);
479void EmitGlobalAtomicOr32x2(EmitContext& ctx);
480void EmitGlobalAtomicXor32x2(EmitContext& ctx);
481void EmitGlobalAtomicExchange32x2(EmitContext& ctx);
451void EmitGlobalAtomicAddF32(EmitContext& ctx); 482void EmitGlobalAtomicAddF32(EmitContext& ctx);
452void EmitGlobalAtomicAddF16x2(EmitContext& ctx); 483void EmitGlobalAtomicAddF16x2(EmitContext& ctx);
453void EmitGlobalAtomicAddF32x2(EmitContext& ctx); 484void EmitGlobalAtomicAddF32x2(EmitContext& ctx);
diff --git a/src/shader_recompiler/backend/glasm/emit_glasm_memory.cpp b/src/shader_recompiler/backend/glasm/emit_glasm_memory.cpp
index f135b67f5..f0fd94a28 100644
--- a/src/shader_recompiler/backend/glasm/emit_glasm_memory.cpp
+++ b/src/shader_recompiler/backend/glasm/emit_glasm_memory.cpp
@@ -311,6 +311,13 @@ void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, ScalarU32 poin
311 ctx.LongAdd("ATOMS.EXCH.U64 {}.x,{},shared_mem[{}];", inst, value, pointer_offset); 311 ctx.LongAdd("ATOMS.EXCH.U64 {}.x,{},shared_mem[{}];", inst, value, pointer_offset);
312} 312}
313 313
314void EmitSharedAtomicExchange32x2([[maybe_unused]] EmitContext& ctx,
315 [[maybe_unused]] IR::Inst& inst,
316 [[maybe_unused]] ScalarU32 pointer_offset,
317 [[maybe_unused]] Register value) {
318 throw NotImplementedException("GLASM instruction");
319}
320
314void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 321void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
315 ScalarU32 offset, ScalarU32 value) { 322 ScalarU32 offset, ScalarU32 value) {
316 Atom(ctx, inst, binding, offset, value, "ADD", "U32"); 323 Atom(ctx, inst, binding, offset, value, "ADD", "U32");
@@ -411,6 +418,62 @@ void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Val
411 Atom(ctx, inst, binding, offset, value, "EXCH", "U64"); 418 Atom(ctx, inst, binding, offset, value, "EXCH", "U64");
412} 419}
413 420
421void EmitStorageAtomicIAdd32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
422 [[maybe_unused]] const IR::Value& binding,
423 [[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
424 throw NotImplementedException("GLASM instruction");
425}
426
427void EmitStorageAtomicSMin32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
428 [[maybe_unused]] const IR::Value& binding,
429 [[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
430 throw NotImplementedException("GLASM instruction");
431}
432
433void EmitStorageAtomicUMin32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
434 [[maybe_unused]] const IR::Value& binding,
435 [[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
436 throw NotImplementedException("GLASM instruction");
437}
438
439void EmitStorageAtomicSMax32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
440 [[maybe_unused]] const IR::Value& binding,
441 [[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
442 throw NotImplementedException("GLASM instruction");
443}
444
445void EmitStorageAtomicUMax32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
446 [[maybe_unused]] const IR::Value& binding,
447 [[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
448 throw NotImplementedException("GLASM instruction");
449}
450
451void EmitStorageAtomicAnd32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
452 [[maybe_unused]] const IR::Value& binding,
453 [[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
454 throw NotImplementedException("GLASM instruction");
455}
456
457void EmitStorageAtomicOr32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
458 [[maybe_unused]] const IR::Value& binding,
459 [[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
460 throw NotImplementedException("GLASM instruction");
461}
462
463void EmitStorageAtomicXor32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
464 [[maybe_unused]] const IR::Value& binding,
465 [[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
466 throw NotImplementedException("GLASM instruction");
467}
468
469void EmitStorageAtomicExchange32x2([[maybe_unused]] EmitContext& ctx,
470 [[maybe_unused]] IR::Inst& inst,
471 [[maybe_unused]] const IR::Value& binding,
472 [[maybe_unused]] ScalarU32 offset,
473 [[maybe_unused]] Register value) {
474 throw NotImplementedException("GLASM instruction");
475}
476
414void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 477void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
415 ScalarU32 offset, ScalarF32 value) { 478 ScalarU32 offset, ScalarF32 value) {
416 Atom(ctx, inst, binding, offset, value, "ADD", "F32"); 479 Atom(ctx, inst, binding, offset, value, "ADD", "F32");
@@ -537,6 +600,50 @@ void EmitGlobalAtomicExchange64(EmitContext&) {
537 throw NotImplementedException("GLASM instruction"); 600 throw NotImplementedException("GLASM instruction");
538} 601}
539 602
603void EmitGlobalAtomicIAdd32x2(EmitContext&) {
604 throw NotImplementedException("GLASM instruction");
605}
606
607void EmitGlobalAtomicSMin32x2(EmitContext&) {
608 throw NotImplementedException("GLASM instruction");
609}
610
611void EmitGlobalAtomicUMin32x2(EmitContext&) {
612 throw NotImplementedException("GLASM instruction");
613}
614
615void EmitGlobalAtomicSMax32x2(EmitContext&) {
616 throw NotImplementedException("GLASM instruction");
617}
618
619void EmitGlobalAtomicUMax32x2(EmitContext&) {
620 throw NotImplementedException("GLASM instruction");
621}
622
623void EmitGlobalAtomicInc32x2(EmitContext&) {
624 throw NotImplementedException("GLASM instruction");
625}
626
627void EmitGlobalAtomicDec32x2(EmitContext&) {
628 throw NotImplementedException("GLASM instruction");
629}
630
631void EmitGlobalAtomicAnd32x2(EmitContext&) {
632 throw NotImplementedException("GLASM instruction");
633}
634
635void EmitGlobalAtomicOr32x2(EmitContext&) {
636 throw NotImplementedException("GLASM instruction");
637}
638
639void EmitGlobalAtomicXor32x2(EmitContext&) {
640 throw NotImplementedException("GLASM instruction");
641}
642
643void EmitGlobalAtomicExchange32x2(EmitContext&) {
644 throw NotImplementedException("GLASM instruction");
645}
646
540void EmitGlobalAtomicAddF32(EmitContext&) { 647void EmitGlobalAtomicAddF32(EmitContext&) {
541 throw NotImplementedException("GLASM instruction"); 648 throw NotImplementedException("GLASM instruction");
542} 649}
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
index dc377b053..782e0e496 100644
--- a/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
+++ b/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
@@ -105,6 +105,13 @@ void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, std::string_vi
105 pointer_offset, value, pointer_offset, value); 105 pointer_offset, value, pointer_offset, value);
106} 106}
107 107
108void EmitSharedAtomicExchange32x2(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
109 std::string_view value) {
110 LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic");
111 ctx.AddU32x2("{}=uvec2(smem[{}>>2],smem[({}+4)>>2]);", inst, pointer_offset, pointer_offset);
112 ctx.Add("smem[{}>>2]={}.x;smem[({}+4)>>2]={}.y;", pointer_offset, value, pointer_offset, value);
113}
114
108void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 115void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
109 const IR::Value& offset, std::string_view value) { 116 const IR::Value& offset, std::string_view value) {
110 ctx.AddU32("{}=atomicAdd({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(), 117 ctx.AddU32("{}=atomicAdd({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(),
@@ -265,6 +272,51 @@ void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Val
265 ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value); 272 ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value);
266} 273}
267 274
275void EmitStorageAtomicIAdd32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
276 const IR::Value& offset, std::string_view value) {
277 throw NotImplementedException("GLSL Instrucion");
278}
279
280void EmitStorageAtomicSMin32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
281 const IR::Value& offset, std::string_view value) {
282 throw NotImplementedException("GLSL Instrucion");
283}
284
285void EmitStorageAtomicUMin32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
286 const IR::Value& offset, std::string_view value) {
287 throw NotImplementedException("GLSL Instrucion");
288}
289
290void EmitStorageAtomicSMax32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
291 const IR::Value& offset, std::string_view value) {
292 throw NotImplementedException("GLSL Instrucion");
293}
294
295void EmitStorageAtomicUMax32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
296 const IR::Value& offset, std::string_view value) {
297 throw NotImplementedException("GLSL Instrucion");
298}
299
300void EmitStorageAtomicAnd32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
301 const IR::Value& offset, std::string_view value) {
302 throw NotImplementedException("GLSL Instrucion");
303}
304
305void EmitStorageAtomicOr32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
306 const IR::Value& offset, std::string_view value) {
307 throw NotImplementedException("GLSL Instrucion");
308}
309
310void EmitStorageAtomicXor32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
311 const IR::Value& offset, std::string_view value) {
312 throw NotImplementedException("GLSL Instrucion");
313}
314
315void EmitStorageAtomicExchange32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
316 const IR::Value& offset, std::string_view value) {
317 throw NotImplementedException("GLSL Instrucion");
318}
319
268void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 320void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
269 const IR::Value& offset, std::string_view value) { 321 const IR::Value& offset, std::string_view value) {
270 SsboCasFunctionF32(ctx, inst, binding, offset, value, "CasFloatAdd"); 322 SsboCasFunctionF32(ctx, inst, binding, offset, value, "CasFloatAdd");
@@ -388,6 +440,50 @@ void EmitGlobalAtomicExchange64(EmitContext&) {
388 throw NotImplementedException("GLSL Instrucion"); 440 throw NotImplementedException("GLSL Instrucion");
389} 441}
390 442
443void EmitGlobalAtomicIAdd32x2(EmitContext&) {
444 throw NotImplementedException("GLSL Instrucion");
445}
446
447void EmitGlobalAtomicSMin32x2(EmitContext&) {
448 throw NotImplementedException("GLSL Instrucion");
449}
450
451void EmitGlobalAtomicUMin32x2(EmitContext&) {
452 throw NotImplementedException("GLSL Instrucion");
453}
454
455void EmitGlobalAtomicSMax32x2(EmitContext&) {
456 throw NotImplementedException("GLSL Instrucion");
457}
458
459void EmitGlobalAtomicUMax32x2(EmitContext&) {
460 throw NotImplementedException("GLSL Instrucion");
461}
462
463void EmitGlobalAtomicInc32x2(EmitContext&) {
464 throw NotImplementedException("GLSL Instrucion");
465}
466
467void EmitGlobalAtomicDec32x2(EmitContext&) {
468 throw NotImplementedException("GLSL Instrucion");
469}
470
471void EmitGlobalAtomicAnd32x2(EmitContext&) {
472 throw NotImplementedException("GLSL Instrucion");
473}
474
475void EmitGlobalAtomicOr32x2(EmitContext&) {
476 throw NotImplementedException("GLSL Instrucion");
477}
478
479void EmitGlobalAtomicXor32x2(EmitContext&) {
480 throw NotImplementedException("GLSL Instrucion");
481}
482
483void EmitGlobalAtomicExchange32x2(EmitContext&) {
484 throw NotImplementedException("GLSL Instrucion");
485}
486
391void EmitGlobalAtomicAddF32(EmitContext&) { 487void EmitGlobalAtomicAddF32(EmitContext&) {
392 throw NotImplementedException("GLSL Instrucion"); 488 throw NotImplementedException("GLSL Instrucion");
393} 489}
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h b/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h
index 6cabbc717..704baddc9 100644
--- a/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h
+++ b/src/shader_recompiler/backend/glsl/emit_glsl_instructions.h
@@ -442,6 +442,8 @@ void EmitSharedAtomicExchange32(EmitContext& ctx, IR::Inst& inst, std::string_vi
442 std::string_view value); 442 std::string_view value);
443void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset, 443void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
444 std::string_view value); 444 std::string_view value);
445void EmitSharedAtomicExchange32x2(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
446 std::string_view value);
445void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 447void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
446 const IR::Value& offset, std::string_view value); 448 const IR::Value& offset, std::string_view value);
447void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 449void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
@@ -482,6 +484,24 @@ void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& b
482 const IR::Value& offset, std::string_view value); 484 const IR::Value& offset, std::string_view value);
483void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 485void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
484 const IR::Value& offset, std::string_view value); 486 const IR::Value& offset, std::string_view value);
487void EmitStorageAtomicIAdd32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
488 const IR::Value& offset, std::string_view value);
489void EmitStorageAtomicSMin32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
490 const IR::Value& offset, std::string_view value);
491void EmitStorageAtomicUMin32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
492 const IR::Value& offset, std::string_view value);
493void EmitStorageAtomicSMax32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
494 const IR::Value& offset, std::string_view value);
495void EmitStorageAtomicUMax32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
496 const IR::Value& offset, std::string_view value);
497void EmitStorageAtomicAnd32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
498 const IR::Value& offset, std::string_view value);
499void EmitStorageAtomicOr32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
500 const IR::Value& offset, std::string_view value);
501void EmitStorageAtomicXor32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
502 const IR::Value& offset, std::string_view value);
503void EmitStorageAtomicExchange32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
504 const IR::Value& offset, std::string_view value);
485void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 505void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
486 const IR::Value& offset, std::string_view value); 506 const IR::Value& offset, std::string_view value);
487void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, 507void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
@@ -518,6 +538,17 @@ void EmitGlobalAtomicAnd64(EmitContext& ctx);
518void EmitGlobalAtomicOr64(EmitContext& ctx); 538void EmitGlobalAtomicOr64(EmitContext& ctx);
519void EmitGlobalAtomicXor64(EmitContext& ctx); 539void EmitGlobalAtomicXor64(EmitContext& ctx);
520void EmitGlobalAtomicExchange64(EmitContext& ctx); 540void EmitGlobalAtomicExchange64(EmitContext& ctx);
541void EmitGlobalAtomicIAdd32x2(EmitContext& ctx);
542void EmitGlobalAtomicSMin32x2(EmitContext& ctx);
543void EmitGlobalAtomicUMin32x2(EmitContext& ctx);
544void EmitGlobalAtomicSMax32x2(EmitContext& ctx);
545void EmitGlobalAtomicUMax32x2(EmitContext& ctx);
546void EmitGlobalAtomicInc32x2(EmitContext& ctx);
547void EmitGlobalAtomicDec32x2(EmitContext& ctx);
548void EmitGlobalAtomicAnd32x2(EmitContext& ctx);
549void EmitGlobalAtomicOr32x2(EmitContext& ctx);
550void EmitGlobalAtomicXor32x2(EmitContext& ctx);
551void EmitGlobalAtomicExchange32x2(EmitContext& ctx);
521void EmitGlobalAtomicAddF32(EmitContext& ctx); 552void EmitGlobalAtomicAddF32(EmitContext& ctx);
522void EmitGlobalAtomicAddF16x2(EmitContext& ctx); 553void EmitGlobalAtomicAddF16x2(EmitContext& ctx);
523void EmitGlobalAtomicAddF32x2(EmitContext& ctx); 554void EmitGlobalAtomicAddF32x2(EmitContext& ctx);
diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_atomic.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_atomic.cpp
index 46ba52a25..d3cbb14a9 100644
--- a/src/shader_recompiler/backend/spirv/emit_spirv_atomic.cpp
+++ b/src/shader_recompiler/backend/spirv/emit_spirv_atomic.cpp
@@ -82,6 +82,17 @@ Id StorageAtomicU64(EmitContext& ctx, const IR::Value& binding, const IR::Value&
82 ctx.OpStore(pointer, ctx.OpBitcast(ctx.U32[2], result)); 82 ctx.OpStore(pointer, ctx.OpBitcast(ctx.U32[2], result));
83 return original_value; 83 return original_value;
84} 84}
85
86Id StorageAtomicU32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, Id value,
87 Id (Sirit::Module::*non_atomic_func)(Id, Id, Id)) {
88 LOG_WARNING(Shader_SPIRV, "Int64 atomics not supported, fallback to non-atomic");
89 const Id pointer{StoragePointer(ctx, ctx.storage_types.U32x2, &StorageDefinitions::U32x2,
90 binding, offset, sizeof(u32[2]))};
91 const Id original_value{ctx.OpLoad(ctx.U32[2], pointer)};
92 const Id result{(ctx.*non_atomic_func)(ctx.U32[2], value, original_value)};
93 ctx.OpStore(pointer, result);
94 return original_value;
95}
85} // Anonymous namespace 96} // Anonymous namespace
86 97
87Id EmitSharedAtomicIAdd32(EmitContext& ctx, Id offset, Id value) { 98Id EmitSharedAtomicIAdd32(EmitContext& ctx, Id offset, Id value) {
@@ -141,7 +152,7 @@ Id EmitSharedAtomicExchange64(EmitContext& ctx, Id offset, Id value) {
141 const auto [scope, semantics]{AtomicArgs(ctx)}; 152 const auto [scope, semantics]{AtomicArgs(ctx)};
142 return ctx.OpAtomicExchange(ctx.U64, pointer, scope, semantics, value); 153 return ctx.OpAtomicExchange(ctx.U64, pointer, scope, semantics, value);
143 } 154 }
144 LOG_ERROR(Shader_SPIRV, "Int64 atomics not supported, fallback to non-atomic"); 155 LOG_WARNING(Shader_SPIRV, "Int64 atomics not supported, fallback to non-atomic");
145 const Id pointer_1{SharedPointer(ctx, offset, 0)}; 156 const Id pointer_1{SharedPointer(ctx, offset, 0)};
146 const Id pointer_2{SharedPointer(ctx, offset, 1)}; 157 const Id pointer_2{SharedPointer(ctx, offset, 1)};
147 const Id value_1{ctx.OpLoad(ctx.U32[1], pointer_1)}; 158 const Id value_1{ctx.OpLoad(ctx.U32[1], pointer_1)};
@@ -152,6 +163,18 @@ Id EmitSharedAtomicExchange64(EmitContext& ctx, Id offset, Id value) {
152 return ctx.OpBitcast(ctx.U64, ctx.OpCompositeConstruct(ctx.U32[2], value_1, value_2)); 163 return ctx.OpBitcast(ctx.U64, ctx.OpCompositeConstruct(ctx.U32[2], value_1, value_2));
153} 164}
154 165
166Id EmitSharedAtomicExchange32x2(EmitContext& ctx, Id offset, Id value) {
167 LOG_WARNING(Shader_SPIRV, "Int64 atomics not supported, fallback to non-atomic");
168 const Id pointer_1{SharedPointer(ctx, offset, 0)};
169 const Id pointer_2{SharedPointer(ctx, offset, 1)};
170 const Id value_1{ctx.OpLoad(ctx.U32[1], pointer_1)};
171 const Id value_2{ctx.OpLoad(ctx.U32[1], pointer_2)};
172 const Id new_vector{ctx.OpBitcast(ctx.U32[2], value)};
173 ctx.OpStore(pointer_1, ctx.OpCompositeExtract(ctx.U32[1], new_vector, 0U));
174 ctx.OpStore(pointer_2, ctx.OpCompositeExtract(ctx.U32[1], new_vector, 1U));
175 return ctx.OpCompositeConstruct(ctx.U32[2], value_1, value_2);
176}
177
155Id EmitStorageAtomicIAdd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, 178Id EmitStorageAtomicIAdd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
156 Id value) { 179 Id value) {
157 return StorageAtomicU32(ctx, binding, offset, value, &Sirit::Module::OpAtomicIAdd); 180 return StorageAtomicU32(ctx, binding, offset, value, &Sirit::Module::OpAtomicIAdd);
@@ -275,6 +298,56 @@ Id EmitStorageAtomicExchange64(EmitContext& ctx, const IR::Value& binding, const
275 return original; 298 return original;
276} 299}
277 300
301Id EmitStorageAtomicIAdd32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
302 Id value) {
303 return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpIAdd);
304}
305
306Id EmitStorageAtomicSMin32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
307 Id value) {
308 return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpSMin);
309}
310
311Id EmitStorageAtomicUMin32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
312 Id value) {
313 return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpUMin);
314}
315
316Id EmitStorageAtomicSMax32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
317 Id value) {
318 return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpSMax);
319}
320
321Id EmitStorageAtomicUMax32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
322 Id value) {
323 return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpUMax);
324}
325
326Id EmitStorageAtomicAnd32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
327 Id value) {
328 return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpBitwiseAnd);
329}
330
331Id EmitStorageAtomicOr32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
332 Id value) {
333 return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpBitwiseOr);
334}
335
336Id EmitStorageAtomicXor32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
337 Id value) {
338 return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpBitwiseXor);
339}
340
341Id EmitStorageAtomicExchange32x2(EmitContext& ctx, const IR::Value& binding,
342 const IR::Value& offset, Id value) {
343 LOG_WARNING(Shader_SPIRV, "Int64 atomics not supported, fallback to non-atomic");
344 const Id pointer{StoragePointer(ctx, ctx.storage_types.U32x2, &StorageDefinitions::U32x2,
345 binding, offset, sizeof(u32[2]))};
346 const Id original{ctx.OpLoad(ctx.U32[2], pointer)};
347 ctx.OpStore(pointer, value);
348 return original;
349}
350
278Id EmitStorageAtomicAddF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, 351Id EmitStorageAtomicAddF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
279 Id value) { 352 Id value) {
280 const Id ssbo{ctx.ssbos[binding.U32()].U32}; 353 const Id ssbo{ctx.ssbos[binding.U32()].U32};
@@ -418,6 +491,50 @@ Id EmitGlobalAtomicExchange64(EmitContext&) {
418 throw NotImplementedException("SPIR-V Instruction"); 491 throw NotImplementedException("SPIR-V Instruction");
419} 492}
420 493
494Id EmitGlobalAtomicIAdd32x2(EmitContext&) {
495 throw NotImplementedException("SPIR-V Instruction");
496}
497
498Id EmitGlobalAtomicSMin32x2(EmitContext&) {
499 throw NotImplementedException("SPIR-V Instruction");
500}
501
502Id EmitGlobalAtomicUMin32x2(EmitContext&) {
503 throw NotImplementedException("SPIR-V Instruction");
504}
505
506Id EmitGlobalAtomicSMax32x2(EmitContext&) {
507 throw NotImplementedException("SPIR-V Instruction");
508}
509
510Id EmitGlobalAtomicUMax32x2(EmitContext&) {
511 throw NotImplementedException("SPIR-V Instruction");
512}
513
514Id EmitGlobalAtomicInc32x2(EmitContext&) {
515 throw NotImplementedException("SPIR-V Instruction");
516}
517
518Id EmitGlobalAtomicDec32x2(EmitContext&) {
519 throw NotImplementedException("SPIR-V Instruction");
520}
521
522Id EmitGlobalAtomicAnd32x2(EmitContext&) {
523 throw NotImplementedException("SPIR-V Instruction");
524}
525
526Id EmitGlobalAtomicOr32x2(EmitContext&) {
527 throw NotImplementedException("SPIR-V Instruction");
528}
529
530Id EmitGlobalAtomicXor32x2(EmitContext&) {
531 throw NotImplementedException("SPIR-V Instruction");
532}
533
534Id EmitGlobalAtomicExchange32x2(EmitContext&) {
535 throw NotImplementedException("SPIR-V Instruction");
536}
537
421Id EmitGlobalAtomicAddF32(EmitContext&) { 538Id EmitGlobalAtomicAddF32(EmitContext&) {
422 throw NotImplementedException("SPIR-V Instruction"); 539 throw NotImplementedException("SPIR-V Instruction");
423} 540}
diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_instructions.h b/src/shader_recompiler/backend/spirv/emit_spirv_instructions.h
index 887112deb..f263b41b0 100644
--- a/src/shader_recompiler/backend/spirv/emit_spirv_instructions.h
+++ b/src/shader_recompiler/backend/spirv/emit_spirv_instructions.h
@@ -335,6 +335,7 @@ Id EmitSharedAtomicOr32(EmitContext& ctx, Id pointer_offset, Id value);
335Id EmitSharedAtomicXor32(EmitContext& ctx, Id pointer_offset, Id value); 335Id EmitSharedAtomicXor32(EmitContext& ctx, Id pointer_offset, Id value);
336Id EmitSharedAtomicExchange32(EmitContext& ctx, Id pointer_offset, Id value); 336Id EmitSharedAtomicExchange32(EmitContext& ctx, Id pointer_offset, Id value);
337Id EmitSharedAtomicExchange64(EmitContext& ctx, Id pointer_offset, Id value); 337Id EmitSharedAtomicExchange64(EmitContext& ctx, Id pointer_offset, Id value);
338Id EmitSharedAtomicExchange32x2(EmitContext& ctx, Id pointer_offset, Id value);
338Id EmitStorageAtomicIAdd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, 339Id EmitStorageAtomicIAdd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
339 Id value); 340 Id value);
340Id EmitStorageAtomicSMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, 341Id EmitStorageAtomicSMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
@@ -375,6 +376,24 @@ Id EmitStorageAtomicXor64(EmitContext& ctx, const IR::Value& binding, const IR::
375 Id value); 376 Id value);
376Id EmitStorageAtomicExchange64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, 377Id EmitStorageAtomicExchange64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
377 Id value); 378 Id value);
379Id EmitStorageAtomicIAdd32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
380 Id value);
381Id EmitStorageAtomicSMin32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
382 Id value);
383Id EmitStorageAtomicUMin32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
384 Id value);
385Id EmitStorageAtomicSMax32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
386 Id value);
387Id EmitStorageAtomicUMax32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
388 Id value);
389Id EmitStorageAtomicAnd32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
390 Id value);
391Id EmitStorageAtomicOr32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
392 Id value);
393Id EmitStorageAtomicXor32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
394 Id value);
395Id EmitStorageAtomicExchange32x2(EmitContext& ctx, const IR::Value& binding,
396 const IR::Value& offset, Id value);
378Id EmitStorageAtomicAddF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, 397Id EmitStorageAtomicAddF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
379 Id value); 398 Id value);
380Id EmitStorageAtomicAddF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, 399Id EmitStorageAtomicAddF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
@@ -411,6 +430,17 @@ Id EmitGlobalAtomicAnd64(EmitContext& ctx);
411Id EmitGlobalAtomicOr64(EmitContext& ctx); 430Id EmitGlobalAtomicOr64(EmitContext& ctx);
412Id EmitGlobalAtomicXor64(EmitContext& ctx); 431Id EmitGlobalAtomicXor64(EmitContext& ctx);
413Id EmitGlobalAtomicExchange64(EmitContext& ctx); 432Id EmitGlobalAtomicExchange64(EmitContext& ctx);
433Id EmitGlobalAtomicIAdd32x2(EmitContext& ctx);
434Id EmitGlobalAtomicSMin32x2(EmitContext& ctx);
435Id EmitGlobalAtomicUMin32x2(EmitContext& ctx);
436Id EmitGlobalAtomicSMax32x2(EmitContext& ctx);
437Id EmitGlobalAtomicUMax32x2(EmitContext& ctx);
438Id EmitGlobalAtomicInc32x2(EmitContext& ctx);
439Id EmitGlobalAtomicDec32x2(EmitContext& ctx);
440Id EmitGlobalAtomicAnd32x2(EmitContext& ctx);
441Id EmitGlobalAtomicOr32x2(EmitContext& ctx);
442Id EmitGlobalAtomicXor32x2(EmitContext& ctx);
443Id EmitGlobalAtomicExchange32x2(EmitContext& ctx);
414Id EmitGlobalAtomicAddF32(EmitContext& ctx); 444Id EmitGlobalAtomicAddF32(EmitContext& ctx);
415Id EmitGlobalAtomicAddF16x2(EmitContext& ctx); 445Id EmitGlobalAtomicAddF16x2(EmitContext& ctx);
416Id EmitGlobalAtomicAddF32x2(EmitContext& ctx); 446Id EmitGlobalAtomicAddF32x2(EmitContext& ctx);
diff --git a/src/shader_recompiler/frontend/ir/microinstruction.cpp b/src/shader_recompiler/frontend/ir/microinstruction.cpp
index 97e2bf6af..631446cf7 100644
--- a/src/shader_recompiler/frontend/ir/microinstruction.cpp
+++ b/src/shader_recompiler/frontend/ir/microinstruction.cpp
@@ -118,6 +118,7 @@ bool Inst::MayHaveSideEffects() const noexcept {
118 case Opcode::SharedAtomicXor32: 118 case Opcode::SharedAtomicXor32:
119 case Opcode::SharedAtomicExchange32: 119 case Opcode::SharedAtomicExchange32:
120 case Opcode::SharedAtomicExchange64: 120 case Opcode::SharedAtomicExchange64:
121 case Opcode::SharedAtomicExchange32x2:
121 case Opcode::GlobalAtomicIAdd32: 122 case Opcode::GlobalAtomicIAdd32:
122 case Opcode::GlobalAtomicSMin32: 123 case Opcode::GlobalAtomicSMin32:
123 case Opcode::GlobalAtomicUMin32: 124 case Opcode::GlobalAtomicUMin32:
@@ -138,6 +139,15 @@ bool Inst::MayHaveSideEffects() const noexcept {
138 case Opcode::GlobalAtomicOr64: 139 case Opcode::GlobalAtomicOr64:
139 case Opcode::GlobalAtomicXor64: 140 case Opcode::GlobalAtomicXor64:
140 case Opcode::GlobalAtomicExchange64: 141 case Opcode::GlobalAtomicExchange64:
142 case Opcode::GlobalAtomicIAdd32x2:
143 case Opcode::GlobalAtomicSMin32x2:
144 case Opcode::GlobalAtomicUMin32x2:
145 case Opcode::GlobalAtomicSMax32x2:
146 case Opcode::GlobalAtomicUMax32x2:
147 case Opcode::GlobalAtomicAnd32x2:
148 case Opcode::GlobalAtomicOr32x2:
149 case Opcode::GlobalAtomicXor32x2:
150 case Opcode::GlobalAtomicExchange32x2:
141 case Opcode::GlobalAtomicAddF32: 151 case Opcode::GlobalAtomicAddF32:
142 case Opcode::GlobalAtomicAddF16x2: 152 case Opcode::GlobalAtomicAddF16x2:
143 case Opcode::GlobalAtomicAddF32x2: 153 case Opcode::GlobalAtomicAddF32x2:
@@ -165,6 +175,15 @@ bool Inst::MayHaveSideEffects() const noexcept {
165 case Opcode::StorageAtomicOr64: 175 case Opcode::StorageAtomicOr64:
166 case Opcode::StorageAtomicXor64: 176 case Opcode::StorageAtomicXor64:
167 case Opcode::StorageAtomicExchange64: 177 case Opcode::StorageAtomicExchange64:
178 case Opcode::StorageAtomicIAdd32x2:
179 case Opcode::StorageAtomicSMin32x2:
180 case Opcode::StorageAtomicUMin32x2:
181 case Opcode::StorageAtomicSMax32x2:
182 case Opcode::StorageAtomicUMax32x2:
183 case Opcode::StorageAtomicAnd32x2:
184 case Opcode::StorageAtomicOr32x2:
185 case Opcode::StorageAtomicXor32x2:
186 case Opcode::StorageAtomicExchange32x2:
168 case Opcode::StorageAtomicAddF32: 187 case Opcode::StorageAtomicAddF32:
169 case Opcode::StorageAtomicAddF16x2: 188 case Opcode::StorageAtomicAddF16x2:
170 case Opcode::StorageAtomicAddF32x2: 189 case Opcode::StorageAtomicAddF32x2:
diff --git a/src/shader_recompiler/frontend/ir/opcodes.inc b/src/shader_recompiler/frontend/ir/opcodes.inc
index b94ce7406..8da5df97c 100644
--- a/src/shader_recompiler/frontend/ir/opcodes.inc
+++ b/src/shader_recompiler/frontend/ir/opcodes.inc
@@ -341,6 +341,7 @@ OPCODE(SharedAtomicOr32, U32, U32,
341OPCODE(SharedAtomicXor32, U32, U32, U32, ) 341OPCODE(SharedAtomicXor32, U32, U32, U32, )
342OPCODE(SharedAtomicExchange32, U32, U32, U32, ) 342OPCODE(SharedAtomicExchange32, U32, U32, U32, )
343OPCODE(SharedAtomicExchange64, U64, U32, U64, ) 343OPCODE(SharedAtomicExchange64, U64, U32, U64, )
344OPCODE(SharedAtomicExchange32x2, U32x2, U32, U32x2, )
344 345
345OPCODE(GlobalAtomicIAdd32, U32, U64, U32, ) 346OPCODE(GlobalAtomicIAdd32, U32, U64, U32, )
346OPCODE(GlobalAtomicSMin32, U32, U64, U32, ) 347OPCODE(GlobalAtomicSMin32, U32, U64, U32, )
@@ -362,6 +363,15 @@ OPCODE(GlobalAtomicAnd64, U64, U64,
362OPCODE(GlobalAtomicOr64, U64, U64, U64, ) 363OPCODE(GlobalAtomicOr64, U64, U64, U64, )
363OPCODE(GlobalAtomicXor64, U64, U64, U64, ) 364OPCODE(GlobalAtomicXor64, U64, U64, U64, )
364OPCODE(GlobalAtomicExchange64, U64, U64, U64, ) 365OPCODE(GlobalAtomicExchange64, U64, U64, U64, )
366OPCODE(GlobalAtomicIAdd32x2, U32x2, U64, U32x2, )
367OPCODE(GlobalAtomicSMin32x2, U32x2, U64, U32x2, )
368OPCODE(GlobalAtomicUMin32x2, U32x2, U64, U32x2, )
369OPCODE(GlobalAtomicSMax32x2, U32x2, U64, U32x2, )
370OPCODE(GlobalAtomicUMax32x2, U32x2, U64, U32x2, )
371OPCODE(GlobalAtomicAnd32x2, U32x2, U64, U32x2, )
372OPCODE(GlobalAtomicOr32x2, U32x2, U64, U32x2, )
373OPCODE(GlobalAtomicXor32x2, U32x2, U64, U32x2, )
374OPCODE(GlobalAtomicExchange32x2, U32x2, U64, U32x2, )
365OPCODE(GlobalAtomicAddF32, F32, U64, F32, ) 375OPCODE(GlobalAtomicAddF32, F32, U64, F32, )
366OPCODE(GlobalAtomicAddF16x2, U32, U64, F16x2, ) 376OPCODE(GlobalAtomicAddF16x2, U32, U64, F16x2, )
367OPCODE(GlobalAtomicAddF32x2, U32, U64, F32x2, ) 377OPCODE(GlobalAtomicAddF32x2, U32, U64, F32x2, )
@@ -390,6 +400,15 @@ OPCODE(StorageAtomicAnd64, U64, U32,
390OPCODE(StorageAtomicOr64, U64, U32, U32, U64, ) 400OPCODE(StorageAtomicOr64, U64, U32, U32, U64, )
391OPCODE(StorageAtomicXor64, U64, U32, U32, U64, ) 401OPCODE(StorageAtomicXor64, U64, U32, U32, U64, )
392OPCODE(StorageAtomicExchange64, U64, U32, U32, U64, ) 402OPCODE(StorageAtomicExchange64, U64, U32, U32, U64, )
403OPCODE(StorageAtomicIAdd32x2, U32x2, U32, U32, U32x2, )
404OPCODE(StorageAtomicSMin32x2, U32x2, U32, U32, U32x2, )
405OPCODE(StorageAtomicUMin32x2, U32x2, U32, U32, U32x2, )
406OPCODE(StorageAtomicSMax32x2, U32x2, U32, U32, U32x2, )
407OPCODE(StorageAtomicUMax32x2, U32x2, U32, U32, U32x2, )
408OPCODE(StorageAtomicAnd32x2, U32x2, U32, U32, U32x2, )
409OPCODE(StorageAtomicOr32x2, U32x2, U32, U32, U32x2, )
410OPCODE(StorageAtomicXor32x2, U32x2, U32, U32, U32x2, )
411OPCODE(StorageAtomicExchange32x2, U32x2, U32, U32, U32x2, )
393OPCODE(StorageAtomicAddF32, F32, U32, U32, F32, ) 412OPCODE(StorageAtomicAddF32, F32, U32, U32, F32, )
394OPCODE(StorageAtomicAddF16x2, U32, U32, U32, F16x2, ) 413OPCODE(StorageAtomicAddF16x2, U32, U32, U32, F16x2, )
395OPCODE(StorageAtomicAddF32x2, U32, U32, U32, F32x2, ) 414OPCODE(StorageAtomicAddF32x2, U32, U32, U32, F32x2, )
diff --git a/src/shader_recompiler/ir_opt/collect_shader_info_pass.cpp b/src/shader_recompiler/ir_opt/collect_shader_info_pass.cpp
index b6a20f904..bfd2ae650 100644
--- a/src/shader_recompiler/ir_opt/collect_shader_info_pass.cpp
+++ b/src/shader_recompiler/ir_opt/collect_shader_info_pass.cpp
@@ -360,6 +360,15 @@ void VisitUsages(Info& info, IR::Inst& inst) {
360 case IR::Opcode::GlobalAtomicOr64: 360 case IR::Opcode::GlobalAtomicOr64:
361 case IR::Opcode::GlobalAtomicXor64: 361 case IR::Opcode::GlobalAtomicXor64:
362 case IR::Opcode::GlobalAtomicExchange64: 362 case IR::Opcode::GlobalAtomicExchange64:
363 case IR::Opcode::GlobalAtomicIAdd32x2:
364 case IR::Opcode::GlobalAtomicSMin32x2:
365 case IR::Opcode::GlobalAtomicUMin32x2:
366 case IR::Opcode::GlobalAtomicSMax32x2:
367 case IR::Opcode::GlobalAtomicUMax32x2:
368 case IR::Opcode::GlobalAtomicAnd32x2:
369 case IR::Opcode::GlobalAtomicOr32x2:
370 case IR::Opcode::GlobalAtomicXor32x2:
371 case IR::Opcode::GlobalAtomicExchange32x2:
363 case IR::Opcode::GlobalAtomicAddF32: 372 case IR::Opcode::GlobalAtomicAddF32:
364 case IR::Opcode::GlobalAtomicAddF16x2: 373 case IR::Opcode::GlobalAtomicAddF16x2:
365 case IR::Opcode::GlobalAtomicAddF32x2: 374 case IR::Opcode::GlobalAtomicAddF32x2:
@@ -597,6 +606,15 @@ void VisitUsages(Info& info, IR::Inst& inst) {
597 break; 606 break;
598 case IR::Opcode::LoadStorage64: 607 case IR::Opcode::LoadStorage64:
599 case IR::Opcode::WriteStorage64: 608 case IR::Opcode::WriteStorage64:
609 case IR::Opcode::StorageAtomicIAdd32x2:
610 case IR::Opcode::StorageAtomicSMin32x2:
611 case IR::Opcode::StorageAtomicUMin32x2:
612 case IR::Opcode::StorageAtomicSMax32x2:
613 case IR::Opcode::StorageAtomicUMax32x2:
614 case IR::Opcode::StorageAtomicAnd32x2:
615 case IR::Opcode::StorageAtomicOr32x2:
616 case IR::Opcode::StorageAtomicXor32x2:
617 case IR::Opcode::StorageAtomicExchange32x2:
600 info.used_storage_buffer_types |= IR::Type::U32x2; 618 info.used_storage_buffer_types |= IR::Type::U32x2;
601 break; 619 break;
602 case IR::Opcode::LoadStorage128: 620 case IR::Opcode::LoadStorage128: