summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGravatar Fernando Sahmkow2020-03-07 18:59:42 -0400
committerGravatar Fernando Sahmkow2020-06-27 11:35:37 -0400
commitcd1c38be8d15d3caf52f566a9e8dc20504c61068 (patch)
tree2fed02ffd4f2151dfca14ddb33ef1939eaee2fba /src
parentSVC: WaitSynchronization add Termination Pending Result. (diff)
downloadyuzu-cd1c38be8d15d3caf52f566a9e8dc20504c61068.tar.gz
yuzu-cd1c38be8d15d3caf52f566a9e8dc20504c61068.tar.xz
yuzu-cd1c38be8d15d3caf52f566a9e8dc20504c61068.zip
ARM/Memory: Correct Exclusive Monitor and Implement Exclusive Memory Writes.
Diffstat (limited to 'src')
-rw-r--r--src/common/CMakeLists.txt2
-rw-r--r--src/common/atomic_ops.cpp70
-rw-r--r--src/common/atomic_ops.h17
-rw-r--r--src/core/arm/dynarmic/arm_dynarmic_64.cpp66
-rw-r--r--src/core/arm/dynarmic/arm_dynarmic_64.h6
-rw-r--r--src/core/arm/exclusive_monitor.h6
-rw-r--r--src/core/hle/kernel/address_arbiter.cpp6
-rw-r--r--src/core/hle/kernel/mutex.cpp5
-rw-r--r--src/core/hle/kernel/svc.cpp2
-rw-r--r--src/core/hle/kernel/thread.cpp6
-rw-r--r--src/core/memory.cpp98
-rw-r--r--src/core/memory.h65
12 files changed, 325 insertions, 24 deletions
diff --git a/src/common/CMakeLists.txt b/src/common/CMakeLists.txt
index 3cc17d0e9..d120c8d3d 100644
--- a/src/common/CMakeLists.txt
+++ b/src/common/CMakeLists.txt
@@ -98,6 +98,8 @@ add_library(common STATIC
98 algorithm.h 98 algorithm.h
99 alignment.h 99 alignment.h
100 assert.h 100 assert.h
101 atomic_ops.cpp
102 atomic_ops.h
101 detached_tasks.cpp 103 detached_tasks.cpp
102 detached_tasks.h 104 detached_tasks.h
103 bit_field.h 105 bit_field.h
diff --git a/src/common/atomic_ops.cpp b/src/common/atomic_ops.cpp
new file mode 100644
index 000000000..65cdfb4fd
--- /dev/null
+++ b/src/common/atomic_ops.cpp
@@ -0,0 +1,70 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include <cstring>
6
7#include "common/atomic_ops.h"
8
9#if _MSC_VER
10#include <intrin.h>
11#endif
12
13namespace Common {
14
15#if _MSC_VER
16
17bool AtomicCompareAndSwap(u8 volatile* pointer, u8 value, u8 expected) {
18 u8 result = _InterlockedCompareExchange8((char*)pointer, value, expected);
19 return result == expected;
20}
21
22bool AtomicCompareAndSwap(u16 volatile* pointer, u16 value, u16 expected) {
23 u16 result = _InterlockedCompareExchange16((short*)pointer, value, expected);
24 return result == expected;
25}
26
27bool AtomicCompareAndSwap(u32 volatile* pointer, u32 value, u32 expected) {
28 u32 result = _InterlockedCompareExchange((long*)pointer, value, expected);
29 return result == expected;
30}
31
32bool AtomicCompareAndSwap(u64 volatile* pointer, u64 value, u64 expected) {
33 u64 result = _InterlockedCompareExchange64((__int64*)pointer, value, expected);
34 return result == expected;
35}
36
37bool AtomicCompareAndSwap(u64 volatile* pointer, u128 value, u128 expected) {
38 return _InterlockedCompareExchange128((__int64*)pointer, value[1], value[0], (__int64*)expected.data()) != 0;
39}
40
41
42#else
43
44bool AtomicCompareAndSwap(u8 volatile* pointer, u8 value, u8 expected) {
45 return __sync_bool_compare_and_swap (pointer, value, expected);
46}
47
48bool AtomicCompareAndSwap(u16 volatile* pointer, u16 value, u16 expected) {
49 return __sync_bool_compare_and_swap (pointer, value, expected);
50}
51
52bool AtomicCompareAndSwap(u32 volatile* pointer, u32 value, u32 expected) {
53 return __sync_bool_compare_and_swap (pointer, value, expected);
54}
55
56bool AtomicCompareAndSwap(u64 volatile* pointer, u64 value, u64 expected) {
57 return __sync_bool_compare_and_swap (pointer, value, expected);
58}
59
60bool AtomicCompareAndSwap(u64 volatile* pointer, u128 value, u128 expected) {
61 unsigned __int128 value_a;
62 unsigned __int128 expected_a;
63 std::memcpy(&value_a, value.data(), sizeof(u128));
64 std::memcpy(&expected_a, expected.data(), sizeof(u128));
65 return __sync_bool_compare_and_swap ((unsigned __int128*)pointer, value_a, expected_a);
66}
67
68#endif
69
70} // namespace Common
diff --git a/src/common/atomic_ops.h b/src/common/atomic_ops.h
new file mode 100644
index 000000000..22cb3a402
--- /dev/null
+++ b/src/common/atomic_ops.h
@@ -0,0 +1,17 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#pragma once
6
7#include "common/common_types.h"
8
9namespace Common {
10
11bool AtomicCompareAndSwap(u8 volatile * pointer, u8 value, u8 expected);
12bool AtomicCompareAndSwap(u16 volatile* pointer, u16 value, u16 expected);
13bool AtomicCompareAndSwap(u32 volatile* pointer, u32 value, u32 expected);
14bool AtomicCompareAndSwap(u64 volatile* pointer, u64 value, u64 expected);
15bool AtomicCompareAndSwap(u64 volatile* pointer, u128 value, u128 expected);
16
17} // namespace Common
diff --git a/src/core/arm/dynarmic/arm_dynarmic_64.cpp b/src/core/arm/dynarmic/arm_dynarmic_64.cpp
index 5e316ffd4..a22c22bf0 100644
--- a/src/core/arm/dynarmic/arm_dynarmic_64.cpp
+++ b/src/core/arm/dynarmic/arm_dynarmic_64.cpp
@@ -66,6 +66,22 @@ public:
66 memory.Write64(vaddr + 8, value[1]); 66 memory.Write64(vaddr + 8, value[1]);
67 } 67 }
68 68
69 bool MemoryWriteExclusive8(u64 vaddr, std::uint8_t value, std::uint8_t expected) override {
70 return parent.system.Memory().WriteExclusive8(vaddr, value, expected);
71 }
72 bool MemoryWriteExclusive16(u64 vaddr, std::uint16_t value, std::uint16_t expected) override {
73 return parent.system.Memory().WriteExclusive16(vaddr, value, expected);
74 }
75 bool MemoryWriteExclusive32(u64 vaddr, std::uint32_t value, std::uint32_t expected) override {
76 return parent.system.Memory().WriteExclusive32(vaddr, value, expected);
77 }
78 bool MemoryWriteExclusive64(u64 vaddr, std::uint64_t value, std::uint64_t expected) override {
79 return parent.system.Memory().WriteExclusive64(vaddr, value, expected);
80 }
81 bool MemoryWriteExclusive128(u64 vaddr, Vector value, Vector expected) override {
82 return parent.system.Memory().WriteExclusive128(vaddr, value, expected);
83 }
84
69 void InterpreterFallback(u64 pc, std::size_t num_instructions) override { 85 void InterpreterFallback(u64 pc, std::size_t num_instructions) override {
70 LOG_INFO(Core_ARM, "Unicorn fallback @ 0x{:X} for {} instructions (instr = {:08X})", pc, 86 LOG_INFO(Core_ARM, "Unicorn fallback @ 0x{:X} for {} instructions (instr = {:08X})", pc,
71 num_instructions, MemoryReadCode(pc)); 87 num_instructions, MemoryReadCode(pc));
@@ -284,9 +300,29 @@ DynarmicExclusiveMonitor::DynarmicExclusiveMonitor(Memory::Memory& memory, std::
284 300
285DynarmicExclusiveMonitor::~DynarmicExclusiveMonitor() = default; 301DynarmicExclusiveMonitor::~DynarmicExclusiveMonitor() = default;
286 302
287void DynarmicExclusiveMonitor::SetExclusive(std::size_t core_index, VAddr addr) { 303void DynarmicExclusiveMonitor::SetExclusive8(std::size_t core_index, VAddr addr) {
288 // Size doesn't actually matter. 304 monitor.Mark<u8>(core_index, addr, 1, [&]() -> u8 { return memory.Read8(addr); });
289 monitor.Mark(core_index, addr, 16); 305}
306
307void DynarmicExclusiveMonitor::SetExclusive16(std::size_t core_index, VAddr addr) {
308 monitor.Mark<u16>(core_index, addr, 2, [&]() -> u16 { return memory.Read16(addr); });
309}
310
311void DynarmicExclusiveMonitor::SetExclusive32(std::size_t core_index, VAddr addr) {
312 monitor.Mark<u32>(core_index, addr, 4, [&]() -> u32 { return memory.Read32(addr); });
313}
314
315void DynarmicExclusiveMonitor::SetExclusive64(std::size_t core_index, VAddr addr) {
316 monitor.Mark<u64>(core_index, addr, 8, [&]() -> u64 { return memory.Read64(addr); });
317}
318
319void DynarmicExclusiveMonitor::SetExclusive128(std::size_t core_index, VAddr addr) {
320 monitor.Mark<u128>(core_index, addr, 16, [&]() -> u128 {
321 u128 result;
322 result[0] = memory.Read64(addr);
323 result[1] = memory.Read64(addr + 8);
324 return result;
325 });
290} 326}
291 327
292void DynarmicExclusiveMonitor::ClearExclusive() { 328void DynarmicExclusiveMonitor::ClearExclusive() {
@@ -294,28 +330,32 @@ void DynarmicExclusiveMonitor::ClearExclusive() {
294} 330}
295 331
296bool DynarmicExclusiveMonitor::ExclusiveWrite8(std::size_t core_index, VAddr vaddr, u8 value) { 332bool DynarmicExclusiveMonitor::ExclusiveWrite8(std::size_t core_index, VAddr vaddr, u8 value) {
297 return monitor.DoExclusiveOperation(core_index, vaddr, 1, [&] { memory.Write8(vaddr, value); }); 333 return monitor.DoExclusiveOperation<u8>(core_index, vaddr, 1, [&](u8 expected) -> bool {
334 return memory.WriteExclusive8(vaddr, value, expected);
335 });
298} 336}
299 337
300bool DynarmicExclusiveMonitor::ExclusiveWrite16(std::size_t core_index, VAddr vaddr, u16 value) { 338bool DynarmicExclusiveMonitor::ExclusiveWrite16(std::size_t core_index, VAddr vaddr, u16 value) {
301 return monitor.DoExclusiveOperation(core_index, vaddr, 2, 339 return monitor.DoExclusiveOperation<u16>(core_index, vaddr, 2, [&](u16 expected) -> bool {
302 [&] { memory.Write16(vaddr, value); }); 340 return memory.WriteExclusive16(vaddr, value, expected);
341 });
303} 342}
304 343
305bool DynarmicExclusiveMonitor::ExclusiveWrite32(std::size_t core_index, VAddr vaddr, u32 value) { 344bool DynarmicExclusiveMonitor::ExclusiveWrite32(std::size_t core_index, VAddr vaddr, u32 value) {
306 return monitor.DoExclusiveOperation(core_index, vaddr, 4, 345 return monitor.DoExclusiveOperation<u32>(core_index, vaddr, 4, [&](u32 expected) -> bool {
307 [&] { memory.Write32(vaddr, value); }); 346 return memory.WriteExclusive32(vaddr, value, expected);
347 });
308} 348}
309 349
310bool DynarmicExclusiveMonitor::ExclusiveWrite64(std::size_t core_index, VAddr vaddr, u64 value) { 350bool DynarmicExclusiveMonitor::ExclusiveWrite64(std::size_t core_index, VAddr vaddr, u64 value) {
311 return monitor.DoExclusiveOperation(core_index, vaddr, 8, 351 return monitor.DoExclusiveOperation<u64>(core_index, vaddr, 8, [&](u64 expected) -> bool {
312 [&] { memory.Write64(vaddr, value); }); 352 return memory.WriteExclusive64(vaddr, value, expected);
353 });
313} 354}
314 355
315bool DynarmicExclusiveMonitor::ExclusiveWrite128(std::size_t core_index, VAddr vaddr, u128 value) { 356bool DynarmicExclusiveMonitor::ExclusiveWrite128(std::size_t core_index, VAddr vaddr, u128 value) {
316 return monitor.DoExclusiveOperation(core_index, vaddr, 16, [&] { 357 return monitor.DoExclusiveOperation<u128>(core_index, vaddr, 16, [&](u128 expected) -> bool {
317 memory.Write64(vaddr + 0, value[0]); 358 return memory.WriteExclusive128(vaddr, value, expected);
318 memory.Write64(vaddr + 8, value[1]);
319 }); 359 });
320} 360}
321 361
diff --git a/src/core/arm/dynarmic/arm_dynarmic_64.h b/src/core/arm/dynarmic/arm_dynarmic_64.h
index 9e94b58c2..3ead59f16 100644
--- a/src/core/arm/dynarmic/arm_dynarmic_64.h
+++ b/src/core/arm/dynarmic/arm_dynarmic_64.h
@@ -82,7 +82,11 @@ public:
82 explicit DynarmicExclusiveMonitor(Memory::Memory& memory, std::size_t core_count); 82 explicit DynarmicExclusiveMonitor(Memory::Memory& memory, std::size_t core_count);
83 ~DynarmicExclusiveMonitor() override; 83 ~DynarmicExclusiveMonitor() override;
84 84
85 void SetExclusive(std::size_t core_index, VAddr addr) override; 85 void SetExclusive8(std::size_t core_index, VAddr addr) override;
86 void SetExclusive16(std::size_t core_index, VAddr addr) override;
87 void SetExclusive32(std::size_t core_index, VAddr addr) override;
88 void SetExclusive64(std::size_t core_index, VAddr addr) override;
89 void SetExclusive128(std::size_t core_index, VAddr addr) override;
86 void ClearExclusive() override; 90 void ClearExclusive() override;
87 91
88 bool ExclusiveWrite8(std::size_t core_index, VAddr vaddr, u8 value) override; 92 bool ExclusiveWrite8(std::size_t core_index, VAddr vaddr, u8 value) override;
diff --git a/src/core/arm/exclusive_monitor.h b/src/core/arm/exclusive_monitor.h
index ccd73b80f..2ee312eee 100644
--- a/src/core/arm/exclusive_monitor.h
+++ b/src/core/arm/exclusive_monitor.h
@@ -18,7 +18,11 @@ class ExclusiveMonitor {
18public: 18public:
19 virtual ~ExclusiveMonitor(); 19 virtual ~ExclusiveMonitor();
20 20
21 virtual void SetExclusive(std::size_t core_index, VAddr addr) = 0; 21 virtual void SetExclusive8(std::size_t core_index, VAddr addr) = 0;
22 virtual void SetExclusive16(std::size_t core_index, VAddr addr) = 0;
23 virtual void SetExclusive32(std::size_t core_index, VAddr addr) = 0;
24 virtual void SetExclusive64(std::size_t core_index, VAddr addr) = 0;
25 virtual void SetExclusive128(std::size_t core_index, VAddr addr) = 0;
22 virtual void ClearExclusive() = 0; 26 virtual void ClearExclusive() = 0;
23 27
24 virtual bool ExclusiveWrite8(std::size_t core_index, VAddr vaddr, u8 value) = 0; 28 virtual bool ExclusiveWrite8(std::size_t core_index, VAddr vaddr, u8 value) = 0;
diff --git a/src/core/hle/kernel/address_arbiter.cpp b/src/core/hle/kernel/address_arbiter.cpp
index ebabde921..07acabc1d 100644
--- a/src/core/hle/kernel/address_arbiter.cpp
+++ b/src/core/hle/kernel/address_arbiter.cpp
@@ -90,7 +90,7 @@ ResultCode AddressArbiter::IncrementAndSignalToAddressIfEqual(VAddr address, s32
90 auto& monitor = system.Monitor(); 90 auto& monitor = system.Monitor();
91 u32 current_value; 91 u32 current_value;
92 do { 92 do {
93 monitor.SetExclusive(current_core, address); 93 monitor.SetExclusive32(current_core, address);
94 current_value = memory.Read32(address); 94 current_value = memory.Read32(address);
95 95
96 if (current_value != value) { 96 if (current_value != value) {
@@ -120,7 +120,7 @@ ResultCode AddressArbiter::ModifyByWaitingCountAndSignalToAddressIfEqual(VAddr a
120 auto& monitor = system.Monitor(); 120 auto& monitor = system.Monitor();
121 s32 updated_value; 121 s32 updated_value;
122 do { 122 do {
123 monitor.SetExclusive(current_core, address); 123 monitor.SetExclusive32(current_core, address);
124 updated_value = memory.Read32(address); 124 updated_value = memory.Read32(address);
125 125
126 if (updated_value != value) { 126 if (updated_value != value) {
@@ -191,7 +191,7 @@ ResultCode AddressArbiter::WaitForAddressIfLessThan(VAddr address, s32 value, s6
191 const std::size_t current_core = system.CurrentCoreIndex(); 191 const std::size_t current_core = system.CurrentCoreIndex();
192 auto& monitor = system.Monitor(); 192 auto& monitor = system.Monitor();
193 do { 193 do {
194 monitor.SetExclusive(current_core, address); 194 monitor.SetExclusive32(current_core, address);
195 current_value = static_cast<s32>(memory.Read32(address)); 195 current_value = static_cast<s32>(memory.Read32(address));
196 if (should_decrement) { 196 if (should_decrement) {
197 decrement_value = current_value - 1; 197 decrement_value = current_value - 1;
diff --git a/src/core/hle/kernel/mutex.cpp b/src/core/hle/kernel/mutex.cpp
index ebe3f6050..16c95782a 100644
--- a/src/core/hle/kernel/mutex.cpp
+++ b/src/core/hle/kernel/mutex.cpp
@@ -10,6 +10,7 @@
10#include "common/logging/log.h" 10#include "common/logging/log.h"
11#include "core/core.h" 11#include "core/core.h"
12#include "core/arm/exclusive_monitor.h" 12#include "core/arm/exclusive_monitor.h"
13#include "core/core.h"
13#include "core/hle/kernel/errors.h" 14#include "core/hle/kernel/errors.h"
14#include "core/hle/kernel/handle_table.h" 15#include "core/hle/kernel/handle_table.h"
15#include "core/hle/kernel/kernel.h" 16#include "core/hle/kernel/kernel.h"
@@ -138,7 +139,7 @@ std::pair<ResultCode, std::shared_ptr<Thread>> Mutex::Unlock(std::shared_ptr<Thr
138 const std::size_t current_core = system.CurrentCoreIndex(); 139 const std::size_t current_core = system.CurrentCoreIndex();
139 if (new_owner == nullptr) { 140 if (new_owner == nullptr) {
140 do { 141 do {
141 monitor.SetExclusive(current_core, address); 142 monitor.SetExclusive32(current_core, address);
142 } while (!monitor.ExclusiveWrite32(current_core, address, 0)); 143 } while (!monitor.ExclusiveWrite32(current_core, address, 0));
143 return {RESULT_SUCCESS, nullptr}; 144 return {RESULT_SUCCESS, nullptr};
144 } 145 }
@@ -154,7 +155,7 @@ std::pair<ResultCode, std::shared_ptr<Thread>> Mutex::Unlock(std::shared_ptr<Thr
154 new_owner->ResumeFromWait(); 155 new_owner->ResumeFromWait();
155 156
156 do { 157 do {
157 monitor.SetExclusive(current_core, address); 158 monitor.SetExclusive32(current_core, address);
158 } while (!monitor.ExclusiveWrite32(current_core, address, mutex_value)); 159 } while (!monitor.ExclusiveWrite32(current_core, address, mutex_value));
159 return {RESULT_SUCCESS, new_owner}; 160 return {RESULT_SUCCESS, new_owner};
160} 161}
diff --git a/src/core/hle/kernel/svc.cpp b/src/core/hle/kernel/svc.cpp
index da2f90a1d..371beed0d 100644
--- a/src/core/hle/kernel/svc.cpp
+++ b/src/core/hle/kernel/svc.cpp
@@ -1641,7 +1641,7 @@ static void SignalProcessWideKey(Core::System& system, VAddr condition_variable_
1641 u32 update_val = 0; 1641 u32 update_val = 0;
1642 const VAddr mutex_address = thread->GetMutexWaitAddress(); 1642 const VAddr mutex_address = thread->GetMutexWaitAddress();
1643 do { 1643 do {
1644 monitor.SetExclusive(current_core, mutex_address); 1644 monitor.SetExclusive32(current_core, mutex_address);
1645 1645
1646 // If the mutex is not yet acquired, acquire it. 1646 // If the mutex is not yet acquired, acquire it.
1647 mutex_val = memory.Read32(mutex_address); 1647 mutex_val = memory.Read32(mutex_address);
diff --git a/src/core/hle/kernel/thread.cpp b/src/core/hle/kernel/thread.cpp
index b99e3b7a5..51cc5dcca 100644
--- a/src/core/hle/kernel/thread.cpp
+++ b/src/core/hle/kernel/thread.cpp
@@ -236,7 +236,7 @@ ResultVal<std::shared_ptr<Thread>> Thread::Create(Core::System& system, ThreadTy
236 ResetThreadContext64(thread->context_64, stack_top, entry_point, arg); 236 ResetThreadContext64(thread->context_64, stack_top, entry_point, arg);
237 } 237 }
238 thread->host_context = 238 thread->host_context =
239 std::make_shared<Common::Fiber>(std::move(thread_start_func), thread_start_parameter); 239 std::make_shared<Common::Fiber>(std::move(thread_start_func), thread_start_parameter);
240 240
241 return MakeResult<std::shared_ptr<Thread>>(std::move(thread)); 241 return MakeResult<std::shared_ptr<Thread>>(std::move(thread));
242} 242}
@@ -412,12 +412,12 @@ ResultCode Thread::SetActivity(ThreadActivity value) {
412 } 412 }
413 413
414 if (value == ThreadActivity::Paused) { 414 if (value == ThreadActivity::Paused) {
415 if (pausing_state & static_cast<u32>(ThreadSchedFlags::ThreadPauseFlag) != 0) { 415 if ((pausing_state & static_cast<u32>(ThreadSchedFlags::ThreadPauseFlag)) != 0) {
416 return ERR_INVALID_STATE; 416 return ERR_INVALID_STATE;
417 } 417 }
418 AddSchedulingFlag(ThreadSchedFlags::ThreadPauseFlag); 418 AddSchedulingFlag(ThreadSchedFlags::ThreadPauseFlag);
419 } else { 419 } else {
420 if (pausing_state & static_cast<u32>(ThreadSchedFlags::ThreadPauseFlag) == 0) { 420 if ((pausing_state & static_cast<u32>(ThreadSchedFlags::ThreadPauseFlag)) == 0) {
421 return ERR_INVALID_STATE; 421 return ERR_INVALID_STATE;
422 } 422 }
423 RemoveSchedulingFlag(ThreadSchedFlags::ThreadPauseFlag); 423 RemoveSchedulingFlag(ThreadSchedFlags::ThreadPauseFlag);
diff --git a/src/core/memory.cpp b/src/core/memory.cpp
index 66634596d..4cb5d05e5 100644
--- a/src/core/memory.cpp
+++ b/src/core/memory.cpp
@@ -8,6 +8,7 @@
8#include <utility> 8#include <utility>
9 9
10#include "common/assert.h" 10#include "common/assert.h"
11#include "common/atomic_ops.h"
11#include "common/common_types.h" 12#include "common/common_types.h"
12#include "common/logging/log.h" 13#include "common/logging/log.h"
13#include "common/page_table.h" 14#include "common/page_table.h"
@@ -176,6 +177,22 @@ struct Memory::Impl {
176 } 177 }
177 } 178 }
178 179
180 bool WriteExclusive8(const VAddr addr, const u8 data, const u8 expected) {
181 return WriteExclusive<u8>(addr, data, expected);
182 }
183
184 bool WriteExclusive16(const VAddr addr, const u16 data, const u16 expected) {
185 return WriteExclusive<u16_le>(addr, data, expected);
186 }
187
188 bool WriteExclusive32(const VAddr addr, const u32 data, const u32 expected) {
189 return WriteExclusive<u32_le>(addr, data, expected);
190 }
191
192 bool WriteExclusive64(const VAddr addr, const u64 data, const u64 expected) {
193 return WriteExclusive<u64_le>(addr, data, expected);
194 }
195
179 std::string ReadCString(VAddr vaddr, std::size_t max_length) { 196 std::string ReadCString(VAddr vaddr, std::size_t max_length) {
180 std::string string; 197 std::string string;
181 string.reserve(max_length); 198 string.reserve(max_length);
@@ -679,6 +696,67 @@ struct Memory::Impl {
679 } 696 }
680 } 697 }
681 698
699 template <typename T>
700 bool WriteExclusive(const VAddr vaddr, const T data, const T expected) {
701 u8* page_pointer = current_page_table->pointers[vaddr >> PAGE_BITS];
702 if (page_pointer != nullptr) {
703 // NOTE: Avoid adding any extra logic to this fast-path block
704 T volatile* pointer = reinterpret_cast<T volatile*>(&page_pointer[vaddr]);
705 return Common::AtomicCompareAndSwap(pointer, data, expected);
706 }
707
708 const Common::PageType type = current_page_table->attributes[vaddr >> PAGE_BITS];
709 switch (type) {
710 case Common::PageType::Unmapped:
711 LOG_ERROR(HW_Memory, "Unmapped Write{} 0x{:08X} @ 0x{:016X}", sizeof(data) * 8,
712 static_cast<u32>(data), vaddr);
713 return true;
714 case Common::PageType::Memory:
715 ASSERT_MSG(false, "Mapped memory page without a pointer @ {:016X}", vaddr);
716 break;
717 case Common::PageType::RasterizerCachedMemory: {
718 u8* host_ptr{GetPointerFromVMA(vaddr)};
719 system.GPU().InvalidateRegion(ToCacheAddr(host_ptr), sizeof(T));
720 T volatile* pointer = reinterpret_cast<T volatile*>(&host_ptr);
721 return Common::AtomicCompareAndSwap(pointer, data, expected);
722 break;
723 }
724 default:
725 UNREACHABLE();
726 }
727 return true;
728 }
729
730 bool WriteExclusive128(const VAddr vaddr, const u128 data, const u128 expected) {
731 u8* const page_pointer = current_page_table->pointers[vaddr >> PAGE_BITS];
732 if (page_pointer != nullptr) {
733 // NOTE: Avoid adding any extra logic to this fast-path block
734 u64 volatile* pointer = reinterpret_cast<u64 volatile*>(&page_pointer[vaddr]);
735 return Common::AtomicCompareAndSwap(pointer, data, expected);
736 }
737
738 const Common::PageType type = current_page_table->attributes[vaddr >> PAGE_BITS];
739 switch (type) {
740 case Common::PageType::Unmapped:
741 LOG_ERROR(HW_Memory, "Unmapped Write{} 0x{:08X} @ 0x{:016X}{:016X}", sizeof(data) * 8,
742 static_cast<u64>(data[1]), static_cast<u64>(data[0]), vaddr);
743 return true;
744 case Common::PageType::Memory:
745 ASSERT_MSG(false, "Mapped memory page without a pointer @ {:016X}", vaddr);
746 break;
747 case Common::PageType::RasterizerCachedMemory: {
748 u8* host_ptr{GetPointerFromVMA(vaddr)};
749 system.GPU().InvalidateRegion(ToCacheAddr(host_ptr), sizeof(u128));
750 u64 volatile* pointer = reinterpret_cast<u64 volatile*>(&host_ptr);
751 return Common::AtomicCompareAndSwap(pointer, data, expected);
752 break;
753 }
754 default:
755 UNREACHABLE();
756 }
757 return true;
758 }
759
682 Common::PageTable* current_page_table = nullptr; 760 Common::PageTable* current_page_table = nullptr;
683 Core::System& system; 761 Core::System& system;
684}; 762};
@@ -761,6 +839,26 @@ void Memory::Write64(VAddr addr, u64 data) {
761 impl->Write64(addr, data); 839 impl->Write64(addr, data);
762} 840}
763 841
842bool Memory::WriteExclusive8(VAddr addr, u8 data, u8 expected) {
843 return impl->WriteExclusive8(addr, data, expected);
844}
845
846bool Memory::WriteExclusive16(VAddr addr, u16 data, u16 expected) {
847 return impl->WriteExclusive16(addr, data, expected);
848}
849
850bool Memory::WriteExclusive32(VAddr addr, u32 data, u32 expected) {
851 return impl->WriteExclusive32(addr, data, expected);
852}
853
854bool Memory::WriteExclusive64(VAddr addr, u64 data, u64 expected) {
855 return impl->WriteExclusive64(addr, data, expected);
856}
857
858bool Memory::WriteExclusive128(VAddr addr, u128 data, u128 expected) {
859 return impl->WriteExclusive128(addr, data, expected);
860}
861
764std::string Memory::ReadCString(VAddr vaddr, std::size_t max_length) { 862std::string Memory::ReadCString(VAddr vaddr, std::size_t max_length) {
765 return impl->ReadCString(vaddr, max_length); 863 return impl->ReadCString(vaddr, max_length);
766} 864}
diff --git a/src/core/memory.h b/src/core/memory.h
index 93f0c1d6c..4a1cc63f4 100644
--- a/src/core/memory.h
+++ b/src/core/memory.h
@@ -245,6 +245,71 @@ public:
245 void Write64(VAddr addr, u64 data); 245 void Write64(VAddr addr, u64 data);
246 246
247 /** 247 /**
248 * Writes a 8-bit unsigned integer to the given virtual address in
249 * the current process' address space if and only if the address contains
250 * the expected value. This operation is atomic.
251 *
252 * @param addr The virtual address to write the 8-bit unsigned integer to.
253 * @param data The 8-bit unsigned integer to write to the given virtual address.
254 * @param expected The 8-bit unsigned integer to check against the given virtual address.
255 *
256 * @post The memory range [addr, sizeof(data)) contains the given data value.
257 */
258 bool WriteExclusive8(VAddr addr, u8 data, u8 expected);
259
260 /**
261 * Writes a 16-bit unsigned integer to the given virtual address in
262 * the current process' address space if and only if the address contains
263 * the expected value. This operation is atomic.
264 *
265 * @param addr The virtual address to write the 16-bit unsigned integer to.
266 * @param data The 16-bit unsigned integer to write to the given virtual address.
267 * @param expected The 16-bit unsigned integer to check against the given virtual address.
268 *
269 * @post The memory range [addr, sizeof(data)) contains the given data value.
270 */
271 bool WriteExclusive16(VAddr addr, u16 data, u16 expected);
272
273 /**
274 * Writes a 32-bit unsigned integer to the given virtual address in
275 * the current process' address space if and only if the address contains
276 * the expected value. This operation is atomic.
277 *
278 * @param addr The virtual address to write the 32-bit unsigned integer to.
279 * @param data The 32-bit unsigned integer to write to the given virtual address.
280 * @param expected The 32-bit unsigned integer to check against the given virtual address.
281 *
282 * @post The memory range [addr, sizeof(data)) contains the given data value.
283 */
284 bool WriteExclusive32(VAddr addr, u32 data, u32 expected);
285
286 /**
287 * Writes a 64-bit unsigned integer to the given virtual address in
288 * the current process' address space if and only if the address contains
289 * the expected value. This operation is atomic.
290 *
291 * @param addr The virtual address to write the 64-bit unsigned integer to.
292 * @param data The 64-bit unsigned integer to write to the given virtual address.
293 * @param expected The 64-bit unsigned integer to check against the given virtual address.
294 *
295 * @post The memory range [addr, sizeof(data)) contains the given data value.
296 */
297 bool WriteExclusive64(VAddr addr, u64 data, u64 expected);
298
299 /**
300 * Writes a 128-bit unsigned integer to the given virtual address in
301 * the current process' address space if and only if the address contains
302 * the expected value. This operation is atomic.
303 *
304 * @param addr The virtual address to write the 128-bit unsigned integer to.
305 * @param data The 128-bit unsigned integer to write to the given virtual address.
306 * @param expected The 128-bit unsigned integer to check against the given virtual address.
307 *
308 * @post The memory range [addr, sizeof(data)) contains the given data value.
309 */
310 bool WriteExclusive128(VAddr addr, u128 data, u128 expected);
311
312 /**
248 * Reads a null-terminated string from the given virtual address. 313 * Reads a null-terminated string from the given virtual address.
249 * This function will continually read characters until either: 314 * This function will continually read characters until either:
250 * 315 *