diff options
| author | 2022-03-11 16:29:53 -0800 | |
|---|---|---|
| committer | 2022-03-14 18:14:54 -0700 | |
| commit | 15d9b0418f22637ec848d30dee55d168ee821f6a (patch) | |
| tree | b0d81616aba414b5e8fa2f5add577bbf1af26832 /src/core | |
| parent | core: hle: kernel: Update init_slab_heap, use device memory, and add KThreadL... (diff) | |
| download | yuzu-15d9b0418f22637ec848d30dee55d168ee821f6a.tar.gz yuzu-15d9b0418f22637ec848d30dee55d168ee821f6a.tar.xz yuzu-15d9b0418f22637ec848d30dee55d168ee821f6a.zip | |
core: hle: kernel: k_slab_heap: Refresh to use guest allocations.
Diffstat (limited to 'src/core')
| -rw-r--r-- | src/core/hle/kernel/k_slab_heap.h | 230 | ||||
| -rw-r--r-- | src/core/hle/kernel/slab_helpers.h | 2 |
2 files changed, 107 insertions, 125 deletions
diff --git a/src/core/hle/kernel/k_slab_heap.h b/src/core/hle/kernel/k_slab_heap.h index 05c0bec9c..5690cc757 100644 --- a/src/core/hle/kernel/k_slab_heap.h +++ b/src/core/hle/kernel/k_slab_heap.h | |||
| @@ -16,39 +16,34 @@ class KernelCore; | |||
| 16 | 16 | ||
| 17 | namespace impl { | 17 | namespace impl { |
| 18 | 18 | ||
| 19 | class KSlabHeapImpl final { | 19 | class KSlabHeapImpl { |
| 20 | public: | ||
| 21 | YUZU_NON_COPYABLE(KSlabHeapImpl); | 20 | YUZU_NON_COPYABLE(KSlabHeapImpl); |
| 22 | YUZU_NON_MOVEABLE(KSlabHeapImpl); | 21 | YUZU_NON_MOVEABLE(KSlabHeapImpl); |
| 23 | 22 | ||
| 23 | public: | ||
| 24 | struct Node { | 24 | struct Node { |
| 25 | Node* next{}; | 25 | Node* next{}; |
| 26 | }; | 26 | }; |
| 27 | 27 | ||
| 28 | public: | ||
| 28 | constexpr KSlabHeapImpl() = default; | 29 | constexpr KSlabHeapImpl() = default; |
| 29 | constexpr ~KSlabHeapImpl() = default; | ||
| 30 | 30 | ||
| 31 | void Initialize(std::size_t size) { | 31 | void Initialize() { |
| 32 | ASSERT(head == nullptr); | 32 | ASSERT(m_head == nullptr); |
| 33 | obj_size = size; | ||
| 34 | } | ||
| 35 | |||
| 36 | constexpr std::size_t GetObjectSize() const { | ||
| 37 | return obj_size; | ||
| 38 | } | 33 | } |
| 39 | 34 | ||
| 40 | Node* GetHead() const { | 35 | Node* GetHead() const { |
| 41 | return head; | 36 | return m_head; |
| 42 | } | 37 | } |
| 43 | 38 | ||
| 44 | void* Allocate() { | 39 | void* Allocate() { |
| 45 | Node* ret = head.load(); | 40 | Node* ret = m_head.load(); |
| 46 | 41 | ||
| 47 | do { | 42 | do { |
| 48 | if (ret == nullptr) { | 43 | if (ret == nullptr) { |
| 49 | break; | 44 | break; |
| 50 | } | 45 | } |
| 51 | } while (!head.compare_exchange_weak(ret, ret->next)); | 46 | } while (!m_head.compare_exchange_weak(ret, ret->next)); |
| 52 | 47 | ||
| 53 | return ret; | 48 | return ret; |
| 54 | } | 49 | } |
| @@ -56,170 +51,157 @@ public: | |||
| 56 | void Free(void* obj) { | 51 | void Free(void* obj) { |
| 57 | Node* node = static_cast<Node*>(obj); | 52 | Node* node = static_cast<Node*>(obj); |
| 58 | 53 | ||
| 59 | Node* cur_head = head.load(); | 54 | Node* cur_head = m_head.load(); |
| 60 | do { | 55 | do { |
| 61 | node->next = cur_head; | 56 | node->next = cur_head; |
| 62 | } while (!head.compare_exchange_weak(cur_head, node)); | 57 | } while (!m_head.compare_exchange_weak(cur_head, node)); |
| 63 | } | 58 | } |
| 64 | 59 | ||
| 65 | private: | 60 | private: |
| 66 | std::atomic<Node*> head{}; | 61 | std::atomic<Node*> m_head{}; |
| 67 | std::size_t obj_size{}; | ||
| 68 | }; | 62 | }; |
| 69 | 63 | ||
| 70 | } // namespace impl | 64 | } // namespace impl |
| 71 | 65 | ||
| 72 | class KSlabHeapBase { | 66 | template <bool SupportDynamicExpansion> |
| 73 | public: | 67 | class KSlabHeapBase : protected impl::KSlabHeapImpl { |
| 74 | YUZU_NON_COPYABLE(KSlabHeapBase); | 68 | YUZU_NON_COPYABLE(KSlabHeapBase); |
| 75 | YUZU_NON_MOVEABLE(KSlabHeapBase); | 69 | YUZU_NON_MOVEABLE(KSlabHeapBase); |
| 76 | 70 | ||
| 77 | constexpr KSlabHeapBase() = default; | 71 | private: |
| 78 | constexpr ~KSlabHeapBase() = default; | 72 | size_t m_obj_size{}; |
| 73 | uintptr_t m_peak{}; | ||
| 74 | uintptr_t m_start{}; | ||
| 75 | uintptr_t m_end{}; | ||
| 79 | 76 | ||
| 80 | constexpr bool Contains(uintptr_t addr) const { | 77 | private: |
| 81 | return start <= addr && addr < end; | 78 | void UpdatePeakImpl(uintptr_t obj) { |
| 82 | } | 79 | static_assert(std::atomic_ref<uintptr_t>::is_always_lock_free); |
| 80 | std::atomic_ref<uintptr_t> peak_ref(m_peak); | ||
| 83 | 81 | ||
| 84 | constexpr std::size_t GetSlabHeapSize() const { | 82 | const uintptr_t alloc_peak = obj + this->GetObjectSize(); |
| 85 | return (end - start) / GetObjectSize(); | 83 | uintptr_t cur_peak = m_peak; |
| 84 | do { | ||
| 85 | if (alloc_peak <= cur_peak) { | ||
| 86 | break; | ||
| 87 | } | ||
| 88 | } while (!peak_ref.compare_exchange_strong(cur_peak, alloc_peak)); | ||
| 86 | } | 89 | } |
| 87 | 90 | ||
| 88 | constexpr std::size_t GetObjectSize() const { | 91 | public: |
| 89 | return impl.GetObjectSize(); | 92 | constexpr KSlabHeapBase() = default; |
| 90 | } | ||
| 91 | 93 | ||
| 92 | constexpr uintptr_t GetSlabHeapAddress() const { | 94 | bool Contains(uintptr_t address) const { |
| 93 | return start; | 95 | return m_start <= address && address < m_end; |
| 94 | } | 96 | } |
| 95 | 97 | ||
| 96 | std::size_t GetObjectIndexImpl(const void* obj) const { | 98 | void Initialize(size_t obj_size, void* memory, size_t memory_size) { |
| 97 | return (reinterpret_cast<uintptr_t>(obj) - start) / GetObjectSize(); | 99 | // Ensure we don't initialize a slab using null memory. |
| 100 | ASSERT(memory != nullptr); | ||
| 101 | |||
| 102 | // Set our object size. | ||
| 103 | m_obj_size = obj_size; | ||
| 104 | |||
| 105 | // Initialize the base allocator. | ||
| 106 | KSlabHeapImpl::Initialize(); | ||
| 107 | |||
| 108 | // Set our tracking variables. | ||
| 109 | const size_t num_obj = (memory_size / obj_size); | ||
| 110 | m_start = reinterpret_cast<uintptr_t>(memory); | ||
| 111 | m_end = m_start + num_obj * obj_size; | ||
| 112 | m_peak = m_start; | ||
| 113 | |||
| 114 | // Free the objects. | ||
| 115 | u8* cur = reinterpret_cast<u8*>(m_end); | ||
| 116 | |||
| 117 | for (size_t i = 0; i < num_obj; i++) { | ||
| 118 | cur -= obj_size; | ||
| 119 | KSlabHeapImpl::Free(cur); | ||
| 120 | } | ||
| 98 | } | 121 | } |
| 99 | 122 | ||
| 100 | std::size_t GetPeakIndex() const { | 123 | size_t GetSlabHeapSize() const { |
| 101 | return GetObjectIndexImpl(reinterpret_cast<const void*>(peak)); | 124 | return (m_end - m_start) / this->GetObjectSize(); |
| 102 | } | 125 | } |
| 103 | 126 | ||
| 104 | void* AllocateImpl() { | 127 | size_t GetObjectSize() const { |
| 105 | return impl.Allocate(); | 128 | return m_obj_size; |
| 106 | } | 129 | } |
| 107 | 130 | ||
| 108 | void FreeImpl(void* obj) { | 131 | void* Allocate() { |
| 109 | // Don't allow freeing an object that wasn't allocated from this heap | 132 | void* obj = KSlabHeapImpl::Allocate(); |
| 110 | ASSERT(Contains(reinterpret_cast<uintptr_t>(obj))); | ||
| 111 | 133 | ||
| 112 | impl.Free(obj); | 134 | return obj; |
| 113 | } | 135 | } |
| 114 | 136 | ||
| 115 | void InitializeImpl(std::size_t obj_size, void* memory, std::size_t memory_size) { | 137 | void Free(void* obj) { |
| 116 | // Ensure we don't initialize a slab using null memory | 138 | // Don't allow freeing an object that wasn't allocated from this heap. |
| 117 | ASSERT(memory != nullptr); | 139 | const bool contained = this->Contains(reinterpret_cast<uintptr_t>(obj)); |
| 118 | 140 | ASSERT(contained); | |
| 119 | // Initialize the base allocator | 141 | KSlabHeapImpl::Free(obj); |
| 120 | impl.Initialize(obj_size); | 142 | } |
| 121 | 143 | ||
| 122 | // Set our tracking variables | 144 | size_t GetObjectIndex(const void* obj) const { |
| 123 | const std::size_t num_obj = (memory_size / obj_size); | 145 | if constexpr (SupportDynamicExpansion) { |
| 124 | start = reinterpret_cast<uintptr_t>(memory); | 146 | if (!this->Contains(reinterpret_cast<uintptr_t>(obj))) { |
| 125 | end = start + num_obj * obj_size; | 147 | return std::numeric_limits<size_t>::max(); |
| 126 | peak = start; | 148 | } |
| 149 | } | ||
| 127 | 150 | ||
| 128 | // Free the objects | 151 | return (reinterpret_cast<uintptr_t>(obj) - m_start) / this->GetObjectSize(); |
| 129 | u8* cur = reinterpret_cast<u8*>(end); | 152 | } |
| 130 | 153 | ||
| 131 | for (std::size_t i{}; i < num_obj; i++) { | 154 | size_t GetPeakIndex() const { |
| 132 | cur -= obj_size; | 155 | return this->GetObjectIndex(reinterpret_cast<const void*>(m_peak)); |
| 133 | impl.Free(cur); | ||
| 134 | } | ||
| 135 | } | 156 | } |
| 136 | 157 | ||
| 137 | private: | 158 | uintptr_t GetSlabHeapAddress() const { |
| 138 | using Impl = impl::KSlabHeapImpl; | 159 | return m_start; |
| 160 | } | ||
| 139 | 161 | ||
| 140 | Impl impl; | 162 | size_t GetNumRemaining() const { |
| 141 | uintptr_t peak{}; | 163 | // Only calculate the number of remaining objects under debug configuration. |
| 142 | uintptr_t start{}; | 164 | return 0; |
| 143 | uintptr_t end{}; | 165 | } |
| 144 | }; | 166 | }; |
| 145 | 167 | ||
| 146 | template <typename T> | 168 | template <typename T> |
| 147 | class KSlabHeap final : public KSlabHeapBase { | 169 | class KSlabHeap final : public KSlabHeapBase<false> { |
| 148 | public: | 170 | private: |
| 149 | enum class AllocationType { | 171 | using BaseHeap = KSlabHeapBase<false>; |
| 150 | Host, | ||
| 151 | Guest, | ||
| 152 | }; | ||
| 153 | 172 | ||
| 154 | explicit constexpr KSlabHeap(AllocationType allocation_type_ = AllocationType::Host) | 173 | public: |
| 155 | : KSlabHeapBase(), allocation_type{allocation_type_} {} | 174 | constexpr KSlabHeap() = default; |
| 156 | 175 | ||
| 157 | void Initialize(void* memory, std::size_t memory_size) { | 176 | void Initialize(void* memory, size_t memory_size) { |
| 158 | if (allocation_type == AllocationType::Guest) { | 177 | BaseHeap::Initialize(sizeof(T), memory, memory_size); |
| 159 | InitializeImpl(sizeof(T), memory, memory_size); | ||
| 160 | } | ||
| 161 | } | 178 | } |
| 162 | 179 | ||
| 163 | T* Allocate() { | 180 | T* Allocate() { |
| 164 | switch (allocation_type) { | 181 | T* obj = static_cast<T*>(BaseHeap::Allocate()); |
| 165 | case AllocationType::Host: | ||
| 166 | // Fallback for cases where we do not yet support allocating guest memory from the slab | ||
| 167 | // heap, such as for kernel memory regions. | ||
| 168 | return new T; | ||
| 169 | |||
| 170 | case AllocationType::Guest: | ||
| 171 | T* obj = static_cast<T*>(AllocateImpl()); | ||
| 172 | if (obj != nullptr) { | ||
| 173 | new (obj) T(); | ||
| 174 | } | ||
| 175 | return obj; | ||
| 176 | } | ||
| 177 | 182 | ||
| 178 | UNREACHABLE_MSG("Invalid AllocationType {}", allocation_type); | 183 | if (obj != nullptr) [[likely]] { |
| 179 | return nullptr; | 184 | std::construct_at(obj); |
| 185 | } | ||
| 186 | return obj; | ||
| 180 | } | 187 | } |
| 181 | 188 | ||
| 182 | T* AllocateWithKernel(KernelCore& kernel) { | 189 | T* Allocate(KernelCore& kernel) { |
| 183 | switch (allocation_type) { | 190 | T* obj = static_cast<T*>(BaseHeap::Allocate()); |
| 184 | case AllocationType::Host: | ||
| 185 | // Fallback for cases where we do not yet support allocating guest memory from the slab | ||
| 186 | // heap, such as for kernel memory regions. | ||
| 187 | return new T(kernel); | ||
| 188 | 191 | ||
| 189 | case AllocationType::Guest: | 192 | if (obj != nullptr) [[likely]] { |
| 190 | T* obj = static_cast<T*>(AllocateImpl()); | 193 | std::construct_at(obj, kernel); |
| 191 | if (obj != nullptr) { | ||
| 192 | new (obj) T(kernel); | ||
| 193 | } | ||
| 194 | return obj; | ||
| 195 | } | 194 | } |
| 196 | 195 | return obj; | |
| 197 | UNREACHABLE_MSG("Invalid AllocationType {}", allocation_type); | ||
| 198 | return nullptr; | ||
| 199 | } | 196 | } |
| 200 | 197 | ||
| 201 | void Free(T* obj) { | 198 | void Free(T* obj) { |
| 202 | switch (allocation_type) { | 199 | BaseHeap::Free(obj); |
| 203 | case AllocationType::Host: | ||
| 204 | // Fallback for cases where we do not yet support allocating guest memory from the slab | ||
| 205 | // heap, such as for kernel memory regions. | ||
| 206 | delete obj; | ||
| 207 | return; | ||
| 208 | |||
| 209 | case AllocationType::Guest: | ||
| 210 | FreeImpl(obj); | ||
| 211 | return; | ||
| 212 | } | ||
| 213 | |||
| 214 | UNREACHABLE_MSG("Invalid AllocationType {}", allocation_type); | ||
| 215 | } | 200 | } |
| 216 | 201 | ||
| 217 | constexpr std::size_t GetObjectIndex(const T* obj) const { | 202 | size_t GetObjectIndex(const T* obj) const { |
| 218 | return GetObjectIndexImpl(obj); | 203 | return BaseHeap::GetObjectIndex(obj); |
| 219 | } | 204 | } |
| 220 | |||
| 221 | private: | ||
| 222 | const AllocationType allocation_type; | ||
| 223 | }; | 205 | }; |
| 224 | 206 | ||
| 225 | } // namespace Kernel | 207 | } // namespace Kernel |
diff --git a/src/core/hle/kernel/slab_helpers.h b/src/core/hle/kernel/slab_helpers.h index f1c11256e..dc1e48fc9 100644 --- a/src/core/hle/kernel/slab_helpers.h +++ b/src/core/hle/kernel/slab_helpers.h | |||
| @@ -59,7 +59,7 @@ class KAutoObjectWithSlabHeapAndContainer : public Base { | |||
| 59 | 59 | ||
| 60 | private: | 60 | private: |
| 61 | static Derived* Allocate(KernelCore& kernel) { | 61 | static Derived* Allocate(KernelCore& kernel) { |
| 62 | return kernel.SlabHeap<Derived>().AllocateWithKernel(kernel); | 62 | return kernel.SlabHeap<Derived>().Allocate(kernel); |
| 63 | } | 63 | } |
| 64 | 64 | ||
| 65 | static void Free(KernelCore& kernel, Derived* obj) { | 65 | static void Free(KernelCore& kernel, Derived* obj) { |