diff options
| author | 2021-01-02 02:24:49 +0100 | |
|---|---|---|
| committer | 2021-01-02 04:00:27 +0100 | |
| commit | 53d92318b82cd4a9e08f814fcb8aab624d795c6c (patch) | |
| tree | 4f5236ffebdcf947297d8ace42151b36810f3144 | |
| parent | X86/NativeClock: Improve performance of clock calculations on hot path. (diff) | |
| download | yuzu-53d92318b82cd4a9e08f814fcb8aab624d795c6c.tar.gz yuzu-53d92318b82cd4a9e08f814fcb8aab624d795c6c.tar.xz yuzu-53d92318b82cd4a9e08f814fcb8aab624d795c6c.zip | |
X86/NativeClock: Reimplement RTDSC access to be lock free.
| -rw-r--r-- | src/common/CMakeLists.txt | 1 | ||||
| -rw-r--r-- | src/common/atomic_ops.cpp | 75 | ||||
| -rw-r--r-- | src/common/atomic_ops.h | 71 | ||||
| -rw-r--r-- | src/common/x64/native_clock.cpp | 41 | ||||
| -rw-r--r-- | src/common/x64/native_clock.h | 22 |
5 files changed, 107 insertions, 103 deletions
diff --git a/src/common/CMakeLists.txt b/src/common/CMakeLists.txt index 2c2bd2ee8..abe62543e 100644 --- a/src/common/CMakeLists.txt +++ b/src/common/CMakeLists.txt | |||
| @@ -98,7 +98,6 @@ add_library(common STATIC | |||
| 98 | algorithm.h | 98 | algorithm.h |
| 99 | alignment.h | 99 | alignment.h |
| 100 | assert.h | 100 | assert.h |
| 101 | atomic_ops.cpp | ||
| 102 | atomic_ops.h | 101 | atomic_ops.h |
| 103 | detached_tasks.cpp | 102 | detached_tasks.cpp |
| 104 | detached_tasks.h | 103 | detached_tasks.h |
diff --git a/src/common/atomic_ops.cpp b/src/common/atomic_ops.cpp deleted file mode 100644 index 1612d0e67..000000000 --- a/src/common/atomic_ops.cpp +++ /dev/null | |||
| @@ -1,75 +0,0 @@ | |||
| 1 | // Copyright 2020 yuzu Emulator Project | ||
| 2 | // Licensed under GPLv2 or any later version | ||
| 3 | // Refer to the license.txt file included. | ||
| 4 | |||
| 5 | #include <cstring> | ||
| 6 | |||
| 7 | #include "common/atomic_ops.h" | ||
| 8 | |||
| 9 | #if _MSC_VER | ||
| 10 | #include <intrin.h> | ||
| 11 | #endif | ||
| 12 | |||
| 13 | namespace Common { | ||
| 14 | |||
| 15 | #if _MSC_VER | ||
| 16 | |||
| 17 | bool AtomicCompareAndSwap(volatile u8* pointer, u8 value, u8 expected) { | ||
| 18 | const u8 result = | ||
| 19 | _InterlockedCompareExchange8(reinterpret_cast<volatile char*>(pointer), value, expected); | ||
| 20 | return result == expected; | ||
| 21 | } | ||
| 22 | |||
| 23 | bool AtomicCompareAndSwap(volatile u16* pointer, u16 value, u16 expected) { | ||
| 24 | const u16 result = | ||
| 25 | _InterlockedCompareExchange16(reinterpret_cast<volatile short*>(pointer), value, expected); | ||
| 26 | return result == expected; | ||
| 27 | } | ||
| 28 | |||
| 29 | bool AtomicCompareAndSwap(volatile u32* pointer, u32 value, u32 expected) { | ||
| 30 | const u32 result = | ||
| 31 | _InterlockedCompareExchange(reinterpret_cast<volatile long*>(pointer), value, expected); | ||
| 32 | return result == expected; | ||
| 33 | } | ||
| 34 | |||
| 35 | bool AtomicCompareAndSwap(volatile u64* pointer, u64 value, u64 expected) { | ||
| 36 | const u64 result = _InterlockedCompareExchange64(reinterpret_cast<volatile __int64*>(pointer), | ||
| 37 | value, expected); | ||
| 38 | return result == expected; | ||
| 39 | } | ||
| 40 | |||
| 41 | bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected) { | ||
| 42 | return _InterlockedCompareExchange128(reinterpret_cast<volatile __int64*>(pointer), value[1], | ||
| 43 | value[0], | ||
| 44 | reinterpret_cast<__int64*>(expected.data())) != 0; | ||
| 45 | } | ||
| 46 | |||
| 47 | #else | ||
| 48 | |||
| 49 | bool AtomicCompareAndSwap(volatile u8* pointer, u8 value, u8 expected) { | ||
| 50 | return __sync_bool_compare_and_swap(pointer, expected, value); | ||
| 51 | } | ||
| 52 | |||
| 53 | bool AtomicCompareAndSwap(volatile u16* pointer, u16 value, u16 expected) { | ||
| 54 | return __sync_bool_compare_and_swap(pointer, expected, value); | ||
| 55 | } | ||
| 56 | |||
| 57 | bool AtomicCompareAndSwap(volatile u32* pointer, u32 value, u32 expected) { | ||
| 58 | return __sync_bool_compare_and_swap(pointer, expected, value); | ||
| 59 | } | ||
| 60 | |||
| 61 | bool AtomicCompareAndSwap(volatile u64* pointer, u64 value, u64 expected) { | ||
| 62 | return __sync_bool_compare_and_swap(pointer, expected, value); | ||
| 63 | } | ||
| 64 | |||
| 65 | bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected) { | ||
| 66 | unsigned __int128 value_a; | ||
| 67 | unsigned __int128 expected_a; | ||
| 68 | std::memcpy(&value_a, value.data(), sizeof(u128)); | ||
| 69 | std::memcpy(&expected_a, expected.data(), sizeof(u128)); | ||
| 70 | return __sync_bool_compare_and_swap((unsigned __int128*)pointer, expected_a, value_a); | ||
| 71 | } | ||
| 72 | |||
| 73 | #endif | ||
| 74 | |||
| 75 | } // namespace Common | ||
diff --git a/src/common/atomic_ops.h b/src/common/atomic_ops.h index b46888589..2b1f515e8 100644 --- a/src/common/atomic_ops.h +++ b/src/common/atomic_ops.h | |||
| @@ -4,14 +4,75 @@ | |||
| 4 | 4 | ||
| 5 | #pragma once | 5 | #pragma once |
| 6 | 6 | ||
| 7 | #include <cstring> | ||
| 8 | #include <memory> | ||
| 9 | |||
| 7 | #include "common/common_types.h" | 10 | #include "common/common_types.h" |
| 8 | 11 | ||
| 12 | #if _MSC_VER | ||
| 13 | #include <intrin.h> | ||
| 14 | #endif | ||
| 15 | |||
| 9 | namespace Common { | 16 | namespace Common { |
| 10 | 17 | ||
| 11 | [[nodiscard]] bool AtomicCompareAndSwap(volatile u8* pointer, u8 value, u8 expected); | 18 | #if _MSC_VER |
| 12 | [[nodiscard]] bool AtomicCompareAndSwap(volatile u16* pointer, u16 value, u16 expected); | 19 | |
| 13 | [[nodiscard]] bool AtomicCompareAndSwap(volatile u32* pointer, u32 value, u32 expected); | 20 | [[nodiscard]] inline bool AtomicCompareAndSwap(volatile u8* pointer, u8 value, u8 expected) { |
| 14 | [[nodiscard]] bool AtomicCompareAndSwap(volatile u64* pointer, u64 value, u64 expected); | 21 | const u8 result = |
| 15 | [[nodiscard]] bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected); | 22 | _InterlockedCompareExchange8(reinterpret_cast<volatile char*>(pointer), value, expected); |
| 23 | return result == expected; | ||
| 24 | } | ||
| 25 | |||
| 26 | [[nodiscard]] inline bool AtomicCompareAndSwap(volatile u16* pointer, u16 value, u16 expected) { | ||
| 27 | const u16 result = | ||
| 28 | _InterlockedCompareExchange16(reinterpret_cast<volatile short*>(pointer), value, expected); | ||
| 29 | return result == expected; | ||
| 30 | } | ||
| 31 | |||
| 32 | [[nodiscard]] inline bool AtomicCompareAndSwap(volatile u32* pointer, u32 value, u32 expected) { | ||
| 33 | const u32 result = | ||
| 34 | _InterlockedCompareExchange(reinterpret_cast<volatile long*>(pointer), value, expected); | ||
| 35 | return result == expected; | ||
| 36 | } | ||
| 37 | |||
| 38 | [[nodiscard]] inline bool AtomicCompareAndSwap(volatile u64* pointer, u64 value, u64 expected) { | ||
| 39 | const u64 result = _InterlockedCompareExchange64(reinterpret_cast<volatile __int64*>(pointer), | ||
| 40 | value, expected); | ||
| 41 | return result == expected; | ||
| 42 | } | ||
| 43 | |||
| 44 | [[nodiscard]] inline bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected) { | ||
| 45 | return _InterlockedCompareExchange128(reinterpret_cast<volatile __int64*>(pointer), value[1], | ||
| 46 | value[0], | ||
| 47 | reinterpret_cast<__int64*>(expected.data())) != 0; | ||
| 48 | } | ||
| 49 | |||
| 50 | #else | ||
| 51 | |||
| 52 | [[nodiscard]] inline bool AtomicCompareAndSwap(volatile u8* pointer, u8 value, u8 expected) { | ||
| 53 | return __sync_bool_compare_and_swap(pointer, expected, value); | ||
| 54 | } | ||
| 55 | |||
| 56 | [[nodiscard]] inline bool AtomicCompareAndSwap(volatile u16* pointer, u16 value, u16 expected) { | ||
| 57 | return __sync_bool_compare_and_swap(pointer, expected, value); | ||
| 58 | } | ||
| 59 | |||
| 60 | [[nodiscard]] inline bool AtomicCompareAndSwap(volatile u32* pointer, u32 value, u32 expected) { | ||
| 61 | return __sync_bool_compare_and_swap(pointer, expected, value); | ||
| 62 | } | ||
| 63 | |||
| 64 | [[nodiscard]] inline bool AtomicCompareAndSwap(volatile u64* pointer, u64 value, u64 expected) { | ||
| 65 | return __sync_bool_compare_and_swap(pointer, expected, value); | ||
| 66 | } | ||
| 67 | |||
| 68 | [[nodiscard]] inline bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected) { | ||
| 69 | unsigned __int128 value_a; | ||
| 70 | unsigned __int128 expected_a; | ||
| 71 | std::memcpy(&value_a, value.data(), sizeof(u128)); | ||
| 72 | std::memcpy(&expected_a, expected.data(), sizeof(u128)); | ||
| 73 | return __sync_bool_compare_and_swap((unsigned __int128*)pointer, expected_a, value_a); | ||
| 74 | } | ||
| 75 | |||
| 76 | #endif | ||
| 16 | 77 | ||
| 17 | } // namespace Common | 78 | } // namespace Common |
diff --git a/src/common/x64/native_clock.cpp b/src/common/x64/native_clock.cpp index e246432d0..a65f6b832 100644 --- a/src/common/x64/native_clock.cpp +++ b/src/common/x64/native_clock.cpp | |||
| @@ -17,6 +17,7 @@ | |||
| 17 | #include <x86intrin.h> | 17 | #include <x86intrin.h> |
| 18 | #endif | 18 | #endif |
| 19 | 19 | ||
| 20 | #include "common/atomic_ops.h" | ||
| 20 | #include "common/uint128.h" | 21 | #include "common/uint128.h" |
| 21 | #include "common/x64/native_clock.h" | 22 | #include "common/x64/native_clock.h" |
| 22 | 23 | ||
| @@ -102,8 +103,8 @@ NativeClock::NativeClock(u64 emulated_cpu_frequency_, u64 emulated_clock_frequen | |||
| 102 | : WallClock(emulated_cpu_frequency_, emulated_clock_frequency_, true), rtsc_frequency{ | 103 | : WallClock(emulated_cpu_frequency_, emulated_clock_frequency_, true), rtsc_frequency{ |
| 103 | rtsc_frequency_} { | 104 | rtsc_frequency_} { |
| 104 | _mm_mfence(); | 105 | _mm_mfence(); |
| 105 | last_measure = __rdtsc(); | 106 | time_point.inner.last_measure = __rdtsc(); |
| 106 | accumulated_ticks = 0U; | 107 | time_point.inner.accumulated_ticks = 0U; |
| 107 | ns_rtsc_factor = GetFixedPoint64Factor(1000000000, rtsc_frequency); | 108 | ns_rtsc_factor = GetFixedPoint64Factor(1000000000, rtsc_frequency); |
| 108 | us_rtsc_factor = GetFixedPoint64Factor(1000000, rtsc_frequency); | 109 | us_rtsc_factor = GetFixedPoint64Factor(1000000, rtsc_frequency); |
| 109 | ms_rtsc_factor = GetFixedPoint64Factor(1000, rtsc_frequency); | 110 | ms_rtsc_factor = GetFixedPoint64Factor(1000, rtsc_frequency); |
| @@ -112,23 +113,35 @@ NativeClock::NativeClock(u64 emulated_cpu_frequency_, u64 emulated_clock_frequen | |||
| 112 | } | 113 | } |
| 113 | 114 | ||
| 114 | u64 NativeClock::GetRTSC() { | 115 | u64 NativeClock::GetRTSC() { |
| 115 | std::scoped_lock scope{rtsc_serialize}; | 116 | TimePoint new_time_point{}; |
| 116 | _mm_mfence(); | 117 | TimePoint current_time_point{}; |
| 117 | const u64 current_measure = __rdtsc(); | 118 | do { |
| 118 | u64 diff = current_measure - last_measure; | 119 | current_time_point.pack = time_point.pack; |
| 119 | diff = diff & ~static_cast<u64>(static_cast<s64>(diff) >> 63); // max(diff, 0) | 120 | _mm_mfence(); |
| 120 | if (current_measure > last_measure) { | 121 | const u64 current_measure = __rdtsc(); |
| 121 | last_measure = current_measure; | 122 | u64 diff = current_measure - current_time_point.inner.last_measure; |
| 122 | } | 123 | diff = diff & ~static_cast<u64>(static_cast<s64>(diff) >> 63); // max(diff, 0) |
| 123 | accumulated_ticks += diff; | 124 | new_time_point.inner.last_measure = current_measure > current_time_point.inner.last_measure |
| 125 | ? current_measure | ||
| 126 | : current_time_point.inner.last_measure; | ||
| 127 | new_time_point.inner.accumulated_ticks = current_time_point.inner.accumulated_ticks + diff; | ||
| 128 | } while (!Common::AtomicCompareAndSwap(time_point.pack.data(), new_time_point.pack, | ||
| 129 | current_time_point.pack)); | ||
| 124 | /// The clock cannot be more precise than the guest timer, remove the lower bits | 130 | /// The clock cannot be more precise than the guest timer, remove the lower bits |
| 125 | return accumulated_ticks & inaccuracy_mask; | 131 | return new_time_point.inner.accumulated_ticks & inaccuracy_mask; |
| 126 | } | 132 | } |
| 127 | 133 | ||
| 128 | void NativeClock::Pause(bool is_paused) { | 134 | void NativeClock::Pause(bool is_paused) { |
| 129 | if (!is_paused) { | 135 | if (!is_paused) { |
| 130 | _mm_mfence(); | 136 | TimePoint current_time_point{}; |
| 131 | last_measure = __rdtsc(); | 137 | TimePoint new_time_point{}; |
| 138 | do { | ||
| 139 | current_time_point.pack = time_point.pack; | ||
| 140 | new_time_point.pack = current_time_point.pack; | ||
| 141 | _mm_mfence(); | ||
| 142 | new_time_point.inner.last_measure = __rdtsc(); | ||
| 143 | } while (!Common::AtomicCompareAndSwap(time_point.pack.data(), new_time_point.pack, | ||
| 144 | current_time_point.pack)); | ||
| 132 | } | 145 | } |
| 133 | } | 146 | } |
| 134 | 147 | ||
diff --git a/src/common/x64/native_clock.h b/src/common/x64/native_clock.h index a7b1ee9e0..7cbd400d2 100644 --- a/src/common/x64/native_clock.h +++ b/src/common/x64/native_clock.h | |||
| @@ -6,7 +6,6 @@ | |||
| 6 | 6 | ||
| 7 | #include <optional> | 7 | #include <optional> |
| 8 | 8 | ||
| 9 | #include "common/spin_lock.h" | ||
| 10 | #include "common/wall_clock.h" | 9 | #include "common/wall_clock.h" |
| 11 | 10 | ||
| 12 | namespace Common { | 11 | namespace Common { |
| @@ -32,22 +31,29 @@ public: | |||
| 32 | private: | 31 | private: |
| 33 | u64 GetRTSC(); | 32 | u64 GetRTSC(); |
| 34 | 33 | ||
| 34 | union alignas(16) TimePoint { | ||
| 35 | TimePoint() : pack{} {} | ||
| 36 | u128 pack{}; | ||
| 37 | struct Inner { | ||
| 38 | u64 last_measure{}; | ||
| 39 | u64 accumulated_ticks{}; | ||
| 40 | } inner; | ||
| 41 | }; | ||
| 42 | |||
| 35 | /// value used to reduce the native clocks accuracy as some apss rely on | 43 | /// value used to reduce the native clocks accuracy as some apss rely on |
| 36 | /// undefined behavior where the level of accuracy in the clock shouldn't | 44 | /// undefined behavior where the level of accuracy in the clock shouldn't |
| 37 | /// be higher. | 45 | /// be higher. |
| 38 | static constexpr u64 inaccuracy_mask = ~(UINT64_C(0x400) - 1); | 46 | static constexpr u64 inaccuracy_mask = ~(UINT64_C(0x400) - 1); |
| 39 | 47 | ||
| 40 | SpinLock rtsc_serialize{}; | 48 | TimePoint time_point; |
| 41 | u64 last_measure{}; | ||
| 42 | u64 accumulated_ticks{}; | ||
| 43 | u64 rtsc_frequency; | ||
| 44 | |||
| 45 | // factors | 49 | // factors |
| 50 | u64 clock_rtsc_factor{}; | ||
| 51 | u64 cpu_rtsc_factor{}; | ||
| 46 | u64 ns_rtsc_factor{}; | 52 | u64 ns_rtsc_factor{}; |
| 47 | u64 us_rtsc_factor{}; | 53 | u64 us_rtsc_factor{}; |
| 48 | u64 ms_rtsc_factor{}; | 54 | u64 ms_rtsc_factor{}; |
| 49 | u64 clock_rtsc_factor{}; | 55 | |
| 50 | u64 cpu_rtsc_factor{}; | 56 | u64 rtsc_frequency; |
| 51 | }; | 57 | }; |
| 52 | } // namespace X64 | 58 | } // namespace X64 |
| 53 | 59 | ||