summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/common/atomic_ops.cpp37
-rw-r--r--src/common/atomic_ops.h10
-rw-r--r--src/core/memory.cpp10
3 files changed, 30 insertions, 27 deletions
diff --git a/src/common/atomic_ops.cpp b/src/common/atomic_ops.cpp
index 1098e21ff..1612d0e67 100644
--- a/src/common/atomic_ops.cpp
+++ b/src/common/atomic_ops.cpp
@@ -14,50 +14,55 @@ namespace Common {
14 14
15#if _MSC_VER 15#if _MSC_VER
16 16
17bool AtomicCompareAndSwap(u8 volatile* pointer, u8 value, u8 expected) { 17bool AtomicCompareAndSwap(volatile u8* pointer, u8 value, u8 expected) {
18 u8 result = _InterlockedCompareExchange8((char*)pointer, value, expected); 18 const u8 result =
19 _InterlockedCompareExchange8(reinterpret_cast<volatile char*>(pointer), value, expected);
19 return result == expected; 20 return result == expected;
20} 21}
21 22
22bool AtomicCompareAndSwap(u16 volatile* pointer, u16 value, u16 expected) { 23bool AtomicCompareAndSwap(volatile u16* pointer, u16 value, u16 expected) {
23 u16 result = _InterlockedCompareExchange16((short*)pointer, value, expected); 24 const u16 result =
25 _InterlockedCompareExchange16(reinterpret_cast<volatile short*>(pointer), value, expected);
24 return result == expected; 26 return result == expected;
25} 27}
26 28
27bool AtomicCompareAndSwap(u32 volatile* pointer, u32 value, u32 expected) { 29bool AtomicCompareAndSwap(volatile u32* pointer, u32 value, u32 expected) {
28 u32 result = _InterlockedCompareExchange((long*)pointer, value, expected); 30 const u32 result =
31 _InterlockedCompareExchange(reinterpret_cast<volatile long*>(pointer), value, expected);
29 return result == expected; 32 return result == expected;
30} 33}
31 34
32bool AtomicCompareAndSwap(u64 volatile* pointer, u64 value, u64 expected) { 35bool AtomicCompareAndSwap(volatile u64* pointer, u64 value, u64 expected) {
33 u64 result = _InterlockedCompareExchange64((__int64*)pointer, value, expected); 36 const u64 result = _InterlockedCompareExchange64(reinterpret_cast<volatile __int64*>(pointer),
37 value, expected);
34 return result == expected; 38 return result == expected;
35} 39}
36 40
37bool AtomicCompareAndSwap(u64 volatile* pointer, u128 value, u128 expected) { 41bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected) {
38 return _InterlockedCompareExchange128((__int64*)pointer, value[1], value[0], 42 return _InterlockedCompareExchange128(reinterpret_cast<volatile __int64*>(pointer), value[1],
39 (__int64*)expected.data()) != 0; 43 value[0],
44 reinterpret_cast<__int64*>(expected.data())) != 0;
40} 45}
41 46
42#else 47#else
43 48
44bool AtomicCompareAndSwap(u8 volatile* pointer, u8 value, u8 expected) { 49bool AtomicCompareAndSwap(volatile u8* pointer, u8 value, u8 expected) {
45 return __sync_bool_compare_and_swap(pointer, expected, value); 50 return __sync_bool_compare_and_swap(pointer, expected, value);
46} 51}
47 52
48bool AtomicCompareAndSwap(u16 volatile* pointer, u16 value, u16 expected) { 53bool AtomicCompareAndSwap(volatile u16* pointer, u16 value, u16 expected) {
49 return __sync_bool_compare_and_swap(pointer, expected, value); 54 return __sync_bool_compare_and_swap(pointer, expected, value);
50} 55}
51 56
52bool AtomicCompareAndSwap(u32 volatile* pointer, u32 value, u32 expected) { 57bool AtomicCompareAndSwap(volatile u32* pointer, u32 value, u32 expected) {
53 return __sync_bool_compare_and_swap(pointer, expected, value); 58 return __sync_bool_compare_and_swap(pointer, expected, value);
54} 59}
55 60
56bool AtomicCompareAndSwap(u64 volatile* pointer, u64 value, u64 expected) { 61bool AtomicCompareAndSwap(volatile u64* pointer, u64 value, u64 expected) {
57 return __sync_bool_compare_and_swap(pointer, expected, value); 62 return __sync_bool_compare_and_swap(pointer, expected, value);
58} 63}
59 64
60bool AtomicCompareAndSwap(u64 volatile* pointer, u128 value, u128 expected) { 65bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected) {
61 unsigned __int128 value_a; 66 unsigned __int128 value_a;
62 unsigned __int128 expected_a; 67 unsigned __int128 expected_a;
63 std::memcpy(&value_a, value.data(), sizeof(u128)); 68 std::memcpy(&value_a, value.data(), sizeof(u128));
diff --git a/src/common/atomic_ops.h b/src/common/atomic_ops.h
index e6181d521..8d6b73c00 100644
--- a/src/common/atomic_ops.h
+++ b/src/common/atomic_ops.h
@@ -8,10 +8,10 @@
8 8
9namespace Common { 9namespace Common {
10 10
11bool AtomicCompareAndSwap(u8 volatile* pointer, u8 value, u8 expected); 11bool AtomicCompareAndSwap(volatile u8* pointer, u8 value, u8 expected);
12bool AtomicCompareAndSwap(u16 volatile* pointer, u16 value, u16 expected); 12bool AtomicCompareAndSwap(volatile u16* pointer, u16 value, u16 expected);
13bool AtomicCompareAndSwap(u32 volatile* pointer, u32 value, u32 expected); 13bool AtomicCompareAndSwap(volatile u32* pointer, u32 value, u32 expected);
14bool AtomicCompareAndSwap(u64 volatile* pointer, u64 value, u64 expected); 14bool AtomicCompareAndSwap(volatile u64* pointer, u64 value, u64 expected);
15bool AtomicCompareAndSwap(u64 volatile* pointer, u128 value, u128 expected); 15bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected);
16 16
17} // namespace Common 17} // namespace Common
diff --git a/src/core/memory.cpp b/src/core/memory.cpp
index 2c5588933..86d17c6cb 100644
--- a/src/core/memory.cpp
+++ b/src/core/memory.cpp
@@ -704,7 +704,7 @@ struct Memory::Impl {
704 u8* page_pointer = current_page_table->pointers[vaddr >> PAGE_BITS]; 704 u8* page_pointer = current_page_table->pointers[vaddr >> PAGE_BITS];
705 if (page_pointer != nullptr) { 705 if (page_pointer != nullptr) {
706 // NOTE: Avoid adding any extra logic to this fast-path block 706 // NOTE: Avoid adding any extra logic to this fast-path block
707 T volatile* pointer = reinterpret_cast<T volatile*>(&page_pointer[vaddr]); 707 auto* pointer = reinterpret_cast<volatile T*>(&page_pointer[vaddr]);
708 return Common::AtomicCompareAndSwap(pointer, data, expected); 708 return Common::AtomicCompareAndSwap(pointer, data, expected);
709 } 709 }
710 710
@@ -720,9 +720,8 @@ struct Memory::Impl {
720 case Common::PageType::RasterizerCachedMemory: { 720 case Common::PageType::RasterizerCachedMemory: {
721 u8* host_ptr{GetPointerFromRasterizerCachedMemory(vaddr)}; 721 u8* host_ptr{GetPointerFromRasterizerCachedMemory(vaddr)};
722 system.GPU().InvalidateRegion(vaddr, sizeof(T)); 722 system.GPU().InvalidateRegion(vaddr, sizeof(T));
723 T volatile* pointer = reinterpret_cast<T volatile*>(&host_ptr); 723 auto* pointer = reinterpret_cast<volatile T*>(&host_ptr);
724 return Common::AtomicCompareAndSwap(pointer, data, expected); 724 return Common::AtomicCompareAndSwap(pointer, data, expected);
725 break;
726 } 725 }
727 default: 726 default:
728 UNREACHABLE(); 727 UNREACHABLE();
@@ -734,7 +733,7 @@ struct Memory::Impl {
734 u8* const page_pointer = current_page_table->pointers[vaddr >> PAGE_BITS]; 733 u8* const page_pointer = current_page_table->pointers[vaddr >> PAGE_BITS];
735 if (page_pointer != nullptr) { 734 if (page_pointer != nullptr) {
736 // NOTE: Avoid adding any extra logic to this fast-path block 735 // NOTE: Avoid adding any extra logic to this fast-path block
737 u64 volatile* pointer = reinterpret_cast<u64 volatile*>(&page_pointer[vaddr]); 736 auto* pointer = reinterpret_cast<volatile u64*>(&page_pointer[vaddr]);
738 return Common::AtomicCompareAndSwap(pointer, data, expected); 737 return Common::AtomicCompareAndSwap(pointer, data, expected);
739 } 738 }
740 739
@@ -750,9 +749,8 @@ struct Memory::Impl {
750 case Common::PageType::RasterizerCachedMemory: { 749 case Common::PageType::RasterizerCachedMemory: {
751 u8* host_ptr{GetPointerFromRasterizerCachedMemory(vaddr)}; 750 u8* host_ptr{GetPointerFromRasterizerCachedMemory(vaddr)};
752 system.GPU().InvalidateRegion(vaddr, sizeof(u128)); 751 system.GPU().InvalidateRegion(vaddr, sizeof(u128));
753 u64 volatile* pointer = reinterpret_cast<u64 volatile*>(&host_ptr); 752 auto* pointer = reinterpret_cast<volatile u64*>(&host_ptr);
754 return Common::AtomicCompareAndSwap(pointer, data, expected); 753 return Common::AtomicCompareAndSwap(pointer, data, expected);
755 break;
756 } 754 }
757 default: 755 default:
758 UNREACHABLE(); 756 UNREACHABLE();