summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGravatar bunnei2022-09-05 17:53:44 -0700
committerGravatar bunnei2022-10-18 19:13:34 -0700
commit9ec5f75f43c2ecbfdf52b45f78029b1fd1080658 (patch)
tree1c5426aa3ea86a7ec247be630520cbc9bc9838df /src
parentcore: hle: kernel: Add KDynamicPageManager. (diff)
downloadyuzu-9ec5f75f43c2ecbfdf52b45f78029b1fd1080658.tar.gz
yuzu-9ec5f75f43c2ecbfdf52b45f78029b1fd1080658.tar.xz
yuzu-9ec5f75f43c2ecbfdf52b45f78029b1fd1080658.zip
core: hle: kernel: Add KDynamicSlabHeap.
Diffstat (limited to 'src')
-rw-r--r--src/core/CMakeLists.txt1
-rw-r--r--src/core/hle/kernel/k_dynamic_slab_heap.h122
2 files changed, 123 insertions, 0 deletions
diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt
index 2bb4dea6a..296571762 100644
--- a/src/core/CMakeLists.txt
+++ b/src/core/CMakeLists.txt
@@ -191,6 +191,7 @@ add_library(core STATIC
191 hle/kernel/k_condition_variable.cpp 191 hle/kernel/k_condition_variable.cpp
192 hle/kernel/k_condition_variable.h 192 hle/kernel/k_condition_variable.h
193 hle/kernel/k_dynamic_page_manager.h 193 hle/kernel/k_dynamic_page_manager.h
194 hle/kernel/k_dynamic_slab_heap.h
194 hle/kernel/k_event.cpp 195 hle/kernel/k_event.cpp
195 hle/kernel/k_event.h 196 hle/kernel/k_event.h
196 hle/kernel/k_handle_table.cpp 197 hle/kernel/k_handle_table.cpp
diff --git a/src/core/hle/kernel/k_dynamic_slab_heap.h b/src/core/hle/kernel/k_dynamic_slab_heap.h
new file mode 100644
index 000000000..3a0ddd050
--- /dev/null
+++ b/src/core/hle/kernel/k_dynamic_slab_heap.h
@@ -0,0 +1,122 @@
1// SPDX-FileCopyrightText: Copyright 2022 yuzu Emulator Project
2// SPDX-License-Identifier: GPL-2.0-or-later
3
4#pragma once
5
6#include <atomic>
7
8#include "common/common_funcs.h"
9#include "core/hle/kernel/k_dynamic_page_manager.h"
10#include "core/hle/kernel/k_slab_heap.h"
11
12namespace Kernel {
13
14template <typename T, bool ClearNode = false>
15class KDynamicSlabHeap : protected impl::KSlabHeapImpl {
16 YUZU_NON_COPYABLE(KDynamicSlabHeap);
17 YUZU_NON_MOVEABLE(KDynamicSlabHeap);
18
19public:
20 constexpr KDynamicSlabHeap() = default;
21
22 constexpr VAddr GetAddress() const {
23 return m_address;
24 }
25 constexpr size_t GetSize() const {
26 return m_size;
27 }
28 constexpr size_t GetUsed() const {
29 return m_used.load();
30 }
31 constexpr size_t GetPeak() const {
32 return m_peak.load();
33 }
34 constexpr size_t GetCount() const {
35 return m_count.load();
36 }
37
38 constexpr bool IsInRange(VAddr addr) const {
39 return this->GetAddress() <= addr && addr <= this->GetAddress() + this->GetSize() - 1;
40 }
41
42 void Initialize(KDynamicPageManager* page_allocator, size_t num_objects) {
43 ASSERT(page_allocator != nullptr);
44
45 // Initialize members.
46 m_address = page_allocator->GetAddress();
47 m_size = page_allocator->GetSize();
48
49 // Initialize the base allocator.
50 KSlabHeapImpl::Initialize();
51
52 // Allocate until we have the correct number of objects.
53 while (m_count.load() < num_objects) {
54 auto* allocated = reinterpret_cast<T*>(page_allocator->Allocate());
55 ASSERT(allocated != nullptr);
56
57 for (size_t i = 0; i < sizeof(PageBuffer) / sizeof(T); i++) {
58 KSlabHeapImpl::Free(allocated + i);
59 }
60
61 m_count += sizeof(PageBuffer) / sizeof(T);
62 }
63 }
64
65 T* Allocate(KDynamicPageManager* page_allocator) {
66 T* allocated = static_cast<T*>(KSlabHeapImpl::Allocate());
67
68 // If we successfully allocated and we should clear the node, do so.
69 if constexpr (ClearNode) {
70 if (allocated != nullptr) [[likely]] {
71 reinterpret_cast<KSlabHeapImpl::Node*>(allocated)->next = nullptr;
72 }
73 }
74
75 // If we fail to allocate, try to get a new page from our next allocator.
76 if (allocated == nullptr) [[unlikely]] {
77 if (page_allocator != nullptr) {
78 allocated = reinterpret_cast<T*>(page_allocator->Allocate());
79 if (allocated != nullptr) {
80 // If we succeeded in getting a page, free the rest to our slab.
81 for (size_t i = 1; i < sizeof(PageBuffer) / sizeof(T); i++) {
82 KSlabHeapImpl::Free(allocated + i);
83 }
84 m_count += sizeof(PageBuffer) / sizeof(T);
85 }
86 }
87 }
88
89 if (allocated != nullptr) [[likely]] {
90 // Construct the object.
91 std::construct_at(allocated);
92
93 // Update our tracking.
94 const size_t used = ++m_used;
95 size_t peak = m_peak.load();
96 while (peak < used) {
97 if (m_peak.compare_exchange_weak(peak, used, std::memory_order_relaxed)) {
98 break;
99 }
100 }
101 }
102
103 return allocated;
104 }
105
106 void Free(T* t) {
107 KSlabHeapImpl::Free(t);
108 --m_used;
109 }
110
111private:
112 using PageBuffer = KDynamicPageManager::PageBuffer;
113
114private:
115 std::atomic<size_t> m_used{};
116 std::atomic<size_t> m_peak{};
117 std::atomic<size_t> m_count{};
118 VAddr m_address{};
119 size_t m_size{};
120};
121
122} // namespace Kernel