diff options
| author | 2021-05-27 17:15:23 -0700 | |
|---|---|---|
| committer | 2021-05-28 17:42:41 -0700 | |
| commit | ee099b26976dd7bfd1ac7907363f4f7608fe75c0 (patch) | |
| tree | 17663da7860f35ae4ac2dad054cb59b75e0363e1 /src | |
| parent | Merge pull request #6356 from ogniK5377/ApplyNpadSystemCommonPolicy (diff) | |
| download | yuzu-ee099b26976dd7bfd1ac7907363f4f7608fe75c0.tar.gz yuzu-ee099b26976dd7bfd1ac7907363f4f7608fe75c0.tar.xz yuzu-ee099b26976dd7bfd1ac7907363f4f7608fe75c0.zip | |
hle: kernel: KSlabHeap: Allow host or guest allocations.
- Use host allocations for kernel memory, as this is not properly emulated yet.
- Use guest allocations for TLS, as this needs to be backed by DeviceMemory.
Diffstat (limited to 'src')
| -rw-r--r-- | src/core/hle/kernel/k_slab_heap.h | 199 | ||||
| -rw-r--r-- | src/core/hle/kernel/kernel.cpp | 3 |
2 files changed, 191 insertions, 11 deletions
diff --git a/src/core/hle/kernel/k_slab_heap.h b/src/core/hle/kernel/k_slab_heap.h index 81d472a3e..0ad74b0a0 100644 --- a/src/core/hle/kernel/k_slab_heap.h +++ b/src/core/hle/kernel/k_slab_heap.h | |||
| @@ -4,34 +4,213 @@ | |||
| 4 | 4 | ||
| 5 | #pragma once | 5 | #pragma once |
| 6 | 6 | ||
| 7 | #include <atomic> | ||
| 8 | |||
| 9 | #include "common/assert.h" | ||
| 10 | #include "common/common_types.h" | ||
| 11 | |||
| 7 | namespace Kernel { | 12 | namespace Kernel { |
| 8 | 13 | ||
| 9 | class KernelCore; | 14 | class KernelCore; |
| 10 | 15 | ||
| 11 | /// This is a placeholder class to manage slab heaps for kernel objects. For now, we just allocate | 16 | namespace impl { |
| 12 | /// these with new/delete, but this can be re-implemented later to allocate these in emulated | 17 | |
| 13 | /// memory. | 18 | class KSlabHeapImpl final : NonCopyable { |
| 19 | public: | ||
| 20 | struct Node { | ||
| 21 | Node* next{}; | ||
| 22 | }; | ||
| 23 | |||
| 24 | constexpr KSlabHeapImpl() = default; | ||
| 25 | |||
| 26 | void Initialize(std::size_t size) { | ||
| 27 | ASSERT(head == nullptr); | ||
| 28 | obj_size = size; | ||
| 29 | } | ||
| 30 | |||
| 31 | constexpr std::size_t GetObjectSize() const { | ||
| 32 | return obj_size; | ||
| 33 | } | ||
| 34 | |||
| 35 | Node* GetHead() const { | ||
| 36 | return head; | ||
| 37 | } | ||
| 38 | |||
| 39 | void* Allocate() { | ||
| 40 | Node* ret = head.load(); | ||
| 41 | |||
| 42 | do { | ||
| 43 | if (ret == nullptr) { | ||
| 44 | break; | ||
| 45 | } | ||
| 46 | } while (!head.compare_exchange_weak(ret, ret->next)); | ||
| 47 | |||
| 48 | return ret; | ||
| 49 | } | ||
| 50 | |||
| 51 | void Free(void* obj) { | ||
| 52 | Node* node = static_cast<Node*>(obj); | ||
| 53 | |||
| 54 | Node* cur_head = head.load(); | ||
| 55 | do { | ||
| 56 | node->next = cur_head; | ||
| 57 | } while (!head.compare_exchange_weak(cur_head, node)); | ||
| 58 | } | ||
| 59 | |||
| 60 | private: | ||
| 61 | std::atomic<Node*> head{}; | ||
| 62 | std::size_t obj_size{}; | ||
| 63 | }; | ||
| 64 | |||
| 65 | } // namespace impl | ||
| 66 | |||
| 67 | class KSlabHeapBase : NonCopyable { | ||
| 68 | public: | ||
| 69 | constexpr KSlabHeapBase() = default; | ||
| 70 | |||
| 71 | constexpr bool Contains(uintptr_t addr) const { | ||
| 72 | return start <= addr && addr < end; | ||
| 73 | } | ||
| 74 | |||
| 75 | constexpr std::size_t GetSlabHeapSize() const { | ||
| 76 | return (end - start) / GetObjectSize(); | ||
| 77 | } | ||
| 78 | |||
| 79 | constexpr std::size_t GetObjectSize() const { | ||
| 80 | return impl.GetObjectSize(); | ||
| 81 | } | ||
| 82 | |||
| 83 | constexpr uintptr_t GetSlabHeapAddress() const { | ||
| 84 | return start; | ||
| 85 | } | ||
| 86 | |||
| 87 | std::size_t GetObjectIndexImpl(const void* obj) const { | ||
| 88 | return (reinterpret_cast<uintptr_t>(obj) - start) / GetObjectSize(); | ||
| 89 | } | ||
| 90 | |||
| 91 | std::size_t GetPeakIndex() const { | ||
| 92 | return GetObjectIndexImpl(reinterpret_cast<const void*>(peak)); | ||
| 93 | } | ||
| 94 | |||
| 95 | void* AllocateImpl() { | ||
| 96 | return impl.Allocate(); | ||
| 97 | } | ||
| 98 | |||
| 99 | void FreeImpl(void* obj) { | ||
| 100 | // Don't allow freeing an object that wasn't allocated from this heap | ||
| 101 | ASSERT(Contains(reinterpret_cast<uintptr_t>(obj))); | ||
| 102 | |||
| 103 | impl.Free(obj); | ||
| 104 | } | ||
| 105 | |||
| 106 | void InitializeImpl(std::size_t obj_size, void* memory, std::size_t memory_size) { | ||
| 107 | // Ensure we don't initialize a slab using null memory | ||
| 108 | ASSERT(memory != nullptr); | ||
| 109 | |||
| 110 | // Initialize the base allocator | ||
| 111 | impl.Initialize(obj_size); | ||
| 112 | |||
| 113 | // Set our tracking variables | ||
| 114 | const std::size_t num_obj = (memory_size / obj_size); | ||
| 115 | start = reinterpret_cast<uintptr_t>(memory); | ||
| 116 | end = start + num_obj * obj_size; | ||
| 117 | peak = start; | ||
| 118 | |||
| 119 | // Free the objects | ||
| 120 | u8* cur = reinterpret_cast<u8*>(end); | ||
| 121 | |||
| 122 | for (std::size_t i{}; i < num_obj; i++) { | ||
| 123 | cur -= obj_size; | ||
| 124 | impl.Free(cur); | ||
| 125 | } | ||
| 126 | } | ||
| 127 | |||
| 128 | private: | ||
| 129 | using Impl = impl::KSlabHeapImpl; | ||
| 130 | |||
| 131 | Impl impl; | ||
| 132 | uintptr_t peak{}; | ||
| 133 | uintptr_t start{}; | ||
| 134 | uintptr_t end{}; | ||
| 135 | }; | ||
| 14 | 136 | ||
| 15 | template <typename T> | 137 | template <typename T> |
| 16 | class KSlabHeap final : NonCopyable { | 138 | class KSlabHeap final : public KSlabHeapBase { |
| 17 | public: | 139 | public: |
| 18 | KSlabHeap() = default; | 140 | enum class AllocationType { |
| 141 | Host, | ||
| 142 | Guest, | ||
| 143 | }; | ||
| 19 | 144 | ||
| 20 | void Initialize([[maybe_unused]] void* memory, [[maybe_unused]] std::size_t memory_size) { | 145 | explicit constexpr KSlabHeap(AllocationType allocation_type_ = AllocationType::Host) |
| 21 | // Placeholder that should initialize the backing slab heap implementation. | 146 | : KSlabHeapBase(), allocation_type{allocation_type_} {} |
| 147 | |||
| 148 | void Initialize(void* memory, std::size_t memory_size) { | ||
| 149 | if (allocation_type == AllocationType::Guest) { | ||
| 150 | InitializeImpl(sizeof(T), memory, memory_size); | ||
| 151 | } | ||
| 22 | } | 152 | } |
| 23 | 153 | ||
| 24 | T* Allocate() { | 154 | T* Allocate() { |
| 25 | return new T(); | 155 | switch (allocation_type) { |
| 156 | case AllocationType::Host: | ||
| 157 | // Fallback for cases where we do not yet support allocating guest memory from the slab | ||
| 158 | // heap, such as for kernel memory regions. | ||
| 159 | return new T; | ||
| 160 | |||
| 161 | case AllocationType::Guest: | ||
| 162 | T* obj = static_cast<T*>(AllocateImpl()); | ||
| 163 | if (obj != nullptr) { | ||
| 164 | new (obj) T(); | ||
| 165 | } | ||
| 166 | return obj; | ||
| 167 | } | ||
| 168 | |||
| 169 | UNREACHABLE_MSG("Invalid AllocationType {}", allocation_type); | ||
| 170 | return nullptr; | ||
| 26 | } | 171 | } |
| 27 | 172 | ||
| 28 | T* AllocateWithKernel(KernelCore& kernel) { | 173 | T* AllocateWithKernel(KernelCore& kernel) { |
| 29 | return new T(kernel); | 174 | switch (allocation_type) { |
| 175 | case AllocationType::Host: | ||
| 176 | // Fallback for cases where we do not yet support allocating guest memory from the slab | ||
| 177 | // heap, such as for kernel memory regions. | ||
| 178 | return new T(kernel); | ||
| 179 | |||
| 180 | case AllocationType::Guest: | ||
| 181 | T* obj = static_cast<T*>(AllocateImpl()); | ||
| 182 | if (obj != nullptr) { | ||
| 183 | new (obj) T(kernel); | ||
| 184 | } | ||
| 185 | return obj; | ||
| 186 | } | ||
| 187 | |||
| 188 | UNREACHABLE_MSG("Invalid AllocationType {}", allocation_type); | ||
| 189 | return nullptr; | ||
| 30 | } | 190 | } |
| 31 | 191 | ||
| 32 | void Free(T* obj) { | 192 | void Free(T* obj) { |
| 33 | delete obj; | 193 | switch (allocation_type) { |
| 194 | case AllocationType::Host: | ||
| 195 | // Fallback for cases where we do not yet support allocating guest memory from the slab | ||
| 196 | // heap, such as for kernel memory regions. | ||
| 197 | delete obj; | ||
| 198 | return; | ||
| 199 | |||
| 200 | case AllocationType::Guest: | ||
| 201 | FreeImpl(obj); | ||
| 202 | return; | ||
| 203 | } | ||
| 204 | |||
| 205 | UNREACHABLE_MSG("Invalid AllocationType {}", allocation_type); | ||
| 34 | } | 206 | } |
| 207 | |||
| 208 | constexpr std::size_t GetObjectIndex(const T* obj) const { | ||
| 209 | return GetObjectIndexImpl(obj); | ||
| 210 | } | ||
| 211 | |||
| 212 | private: | ||
| 213 | const AllocationType allocation_type; | ||
| 35 | }; | 214 | }; |
| 36 | 215 | ||
| 37 | } // namespace Kernel | 216 | } // namespace Kernel |
diff --git a/src/core/hle/kernel/kernel.cpp b/src/core/hle/kernel/kernel.cpp index 8b55df82e..b7a6c9abf 100644 --- a/src/core/hle/kernel/kernel.cpp +++ b/src/core/hle/kernel/kernel.cpp | |||
| @@ -620,7 +620,8 @@ struct KernelCore::Impl { | |||
| 620 | 620 | ||
| 621 | void InitializePageSlab() { | 621 | void InitializePageSlab() { |
| 622 | // Allocate slab heaps | 622 | // Allocate slab heaps |
| 623 | user_slab_heap_pages = std::make_unique<KSlabHeap<Page>>(); | 623 | user_slab_heap_pages = |
| 624 | std::make_unique<KSlabHeap<Page>>(KSlabHeap<Page>::AllocationType::Guest); | ||
| 624 | 625 | ||
| 625 | // TODO(ameerj): This should be derived, not hardcoded within the kernel | 626 | // TODO(ameerj): This should be derived, not hardcoded within the kernel |
| 626 | constexpr u64 user_slab_heap_size{0x3de000}; | 627 | constexpr u64 user_slab_heap_size{0x3de000}; |