summaryrefslogtreecommitdiff
path: root/src/core/hle/kernel
diff options
context:
space:
mode:
Diffstat (limited to 'src/core/hle/kernel')
-rw-r--r--src/core/hle/kernel/init/init_slab_setup.cpp10
-rw-r--r--src/core/hle/kernel/k_slab_heap.h154
2 files changed, 20 insertions, 144 deletions
diff --git a/src/core/hle/kernel/init/init_slab_setup.cpp b/src/core/hle/kernel/init/init_slab_setup.cpp
index 69ae405e6..10edede17 100644
--- a/src/core/hle/kernel/init/init_slab_setup.cpp
+++ b/src/core/hle/kernel/init/init_slab_setup.cpp
@@ -70,14 +70,22 @@ constexpr size_t SlabCountExtraKThread = 160;
70template <typename T> 70template <typename T>
71VAddr InitializeSlabHeap(Core::System& system, KMemoryLayout& memory_layout, VAddr address, 71VAddr InitializeSlabHeap(Core::System& system, KMemoryLayout& memory_layout, VAddr address,
72 size_t num_objects) { 72 size_t num_objects) {
73 // TODO(bunnei): This is just a place holder. We should initialize the appropriate KSlabHeap for
74 // kernel object type T with the backing kernel memory pointer once we emulate kernel memory.
75
73 const size_t size = Common::AlignUp(sizeof(T) * num_objects, alignof(void*)); 76 const size_t size = Common::AlignUp(sizeof(T) * num_objects, alignof(void*));
74 VAddr start = Common::AlignUp(address, alignof(T)); 77 VAddr start = Common::AlignUp(address, alignof(T));
75 78
79 // This is intentionally empty. Once KSlabHeap is fully implemented, we can replace this with
80 // the pointer to emulated memory to pass along. Until then, KSlabHeap will just allocate/free
81 // host memory.
82 void* backing_kernel_memory{};
83
76 if (size > 0) { 84 if (size > 0) {
77 const KMemoryRegion* region = memory_layout.FindVirtual(start + size - 1); 85 const KMemoryRegion* region = memory_layout.FindVirtual(start + size - 1);
78 ASSERT(region != nullptr); 86 ASSERT(region != nullptr);
79 ASSERT(region->IsDerivedFrom(KMemoryRegionType_KernelSlab)); 87 ASSERT(region->IsDerivedFrom(KMemoryRegionType_KernelSlab));
80 T::InitializeSlabHeap(system.Kernel(), system.Memory().GetKernelBuffer(start, size), size); 88 T::InitializeSlabHeap(system.Kernel(), backing_kernel_memory, size);
81 } 89 }
82 90
83 return start + size; 91 return start + size;
diff --git a/src/core/hle/kernel/k_slab_heap.h b/src/core/hle/kernel/k_slab_heap.h
index 5ce9a1d7c..81d472a3e 100644
--- a/src/core/hle/kernel/k_slab_heap.h
+++ b/src/core/hle/kernel/k_slab_heap.h
@@ -4,165 +4,33 @@
4 4
5#pragma once 5#pragma once
6 6
7#include <atomic>
8
9#include "common/assert.h"
10#include "common/common_types.h"
11
12namespace Kernel { 7namespace Kernel {
13 8
14namespace impl { 9class KernelCore;
15
16class KSlabHeapImpl final : NonCopyable {
17public:
18 struct Node {
19 Node* next{};
20 };
21
22 constexpr KSlabHeapImpl() = default;
23
24 void Initialize(std::size_t size) {
25 ASSERT(head == nullptr);
26 obj_size = size;
27 }
28
29 constexpr std::size_t GetObjectSize() const {
30 return obj_size;
31 }
32
33 Node* GetHead() const {
34 return head;
35 }
36
37 void* Allocate() {
38 Node* ret = head.load();
39
40 do {
41 if (ret == nullptr) {
42 break;
43 }
44 } while (!head.compare_exchange_weak(ret, ret->next));
45
46 return ret;
47 }
48
49 void Free(void* obj) {
50 Node* node = static_cast<Node*>(obj);
51
52 Node* cur_head = head.load();
53 do {
54 node->next = cur_head;
55 } while (!head.compare_exchange_weak(cur_head, node));
56 }
57
58private:
59 std::atomic<Node*> head{};
60 std::size_t obj_size{};
61};
62
63} // namespace impl
64
65class KSlabHeapBase : NonCopyable {
66public:
67 constexpr KSlabHeapBase() = default;
68
69 constexpr bool Contains(uintptr_t addr) const {
70 return start <= addr && addr < end;
71 }
72
73 constexpr std::size_t GetSlabHeapSize() const {
74 return (end - start) / GetObjectSize();
75 }
76
77 constexpr std::size_t GetObjectSize() const {
78 return impl.GetObjectSize();
79 }
80 10
81 constexpr uintptr_t GetSlabHeapAddress() const { 11/// This is a placeholder class to manage slab heaps for kernel objects. For now, we just allocate
82 return start; 12/// these with new/delete, but this can be re-implemented later to allocate these in emulated
83 } 13/// memory.
84
85 std::size_t GetObjectIndexImpl(const void* obj) const {
86 return (reinterpret_cast<uintptr_t>(obj) - start) / GetObjectSize();
87 }
88
89 std::size_t GetPeakIndex() const {
90 return GetObjectIndexImpl(reinterpret_cast<const void*>(peak));
91 }
92
93 void* AllocateImpl() {
94 return impl.Allocate();
95 }
96
97 void FreeImpl(void* obj) {
98 // Don't allow freeing an object that wasn't allocated from this heap
99 ASSERT(Contains(reinterpret_cast<uintptr_t>(obj)));
100
101 impl.Free(obj);
102 }
103
104 void InitializeImpl(std::size_t obj_size, void* memory, std::size_t memory_size) {
105 // Ensure we don't initialize a slab using null memory
106 ASSERT(memory != nullptr);
107
108 // Initialize the base allocator
109 impl.Initialize(obj_size);
110
111 // Set our tracking variables
112 const std::size_t num_obj = (memory_size / obj_size);
113 start = reinterpret_cast<uintptr_t>(memory);
114 end = start + num_obj * obj_size;
115 peak = start;
116
117 // Free the objects
118 u8* cur = reinterpret_cast<u8*>(end);
119
120 for (std::size_t i{}; i < num_obj; i++) {
121 cur -= obj_size;
122 impl.Free(cur);
123 }
124 }
125
126private:
127 using Impl = impl::KSlabHeapImpl;
128
129 Impl impl;
130 uintptr_t peak{};
131 uintptr_t start{};
132 uintptr_t end{};
133};
134 14
135template <typename T> 15template <typename T>
136class KSlabHeap final : public KSlabHeapBase { 16class KSlabHeap final : NonCopyable {
137public: 17public:
138 constexpr KSlabHeap() : KSlabHeapBase() {} 18 KSlabHeap() = default;
139 19
140 void Initialize(void* memory, std::size_t memory_size) { 20 void Initialize([[maybe_unused]] void* memory, [[maybe_unused]] std::size_t memory_size) {
141 InitializeImpl(sizeof(T), memory, memory_size); 21 // Placeholder that should initialize the backing slab heap implementation.
142 } 22 }
143 23
144 T* Allocate() { 24 T* Allocate() {
145 T* obj = static_cast<T*>(AllocateImpl()); 25 return new T();
146 if (obj != nullptr) {
147 new (obj) T();
148 }
149 return obj;
150 } 26 }
151 27
152 T* AllocateWithKernel(KernelCore& kernel) { 28 T* AllocateWithKernel(KernelCore& kernel) {
153 T* obj = static_cast<T*>(AllocateImpl()); 29 return new T(kernel);
154 if (obj != nullptr) {
155 new (obj) T(kernel);
156 }
157 return obj;
158 } 30 }
159 31
160 void Free(T* obj) { 32 void Free(T* obj) {
161 FreeImpl(obj); 33 delete obj;
162 }
163
164 constexpr std::size_t GetObjectIndex(const T* obj) const {
165 return GetObjectIndexImpl(obj);
166 } 34 }
167}; 35};
168 36