summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar bunnei2020-04-05 15:26:25 -0400
committerGravatar bunnei2020-04-17 00:59:30 -0400
commit548ef190ab95f2f4562f700f7e1c622df9bd6afe (patch)
tree161b938fe5304c1d06fffd0facc1067906050ea7
parentkernel: memory: Add PageHeap class, to manage a heap of pages. (diff)
downloadyuzu-548ef190ab95f2f4562f700f7e1c622df9bd6afe.tar.gz
yuzu-548ef190ab95f2f4562f700f7e1c622df9bd6afe.tar.xz
yuzu-548ef190ab95f2f4562f700f7e1c622df9bd6afe.zip
kernel: memory: Add MemoryBlockManager class, to manage memory blocks.
Diffstat (limited to '')
-rw-r--r--src/core/CMakeLists.txt2
-rw-r--r--src/core/hle/kernel/memory/memory_block_manager.cpp190
-rw-r--r--src/core/hle/kernel/memory/memory_block_manager.h64
3 files changed, 256 insertions, 0 deletions
diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt
index 2a2239951..9fc5bd84b 100644
--- a/src/core/CMakeLists.txt
+++ b/src/core/CMakeLists.txt
@@ -157,6 +157,8 @@ add_library(core STATIC
157 hle/kernel/memory/address_space_info.cpp 157 hle/kernel/memory/address_space_info.cpp
158 hle/kernel/memory/address_space_info.h 158 hle/kernel/memory/address_space_info.h
159 hle/kernel/memory/memory_block.h 159 hle/kernel/memory/memory_block.h
160 hle/kernel/memory/memory_block_manager.cpp
161 hle/kernel/memory/memory_block_manager.h
160 hle/kernel/memory/memory_types.h 162 hle/kernel/memory/memory_types.h
161 hle/kernel/memory/page_linked_list.h 163 hle/kernel/memory/page_linked_list.h
162 hle/kernel/memory/page_heap.cpp 164 hle/kernel/memory/page_heap.cpp
diff --git a/src/core/hle/kernel/memory/memory_block_manager.cpp b/src/core/hle/kernel/memory/memory_block_manager.cpp
new file mode 100644
index 000000000..da009566f
--- /dev/null
+++ b/src/core/hle/kernel/memory/memory_block_manager.cpp
@@ -0,0 +1,190 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include "core/hle/kernel/memory/memory_block_manager.h"
6#include "core/hle/kernel/memory/memory_types.h"
7
8namespace Kernel::Memory {
9
10MemoryBlockManager::MemoryBlockManager(VAddr start_addr, VAddr end_addr)
11 : start_addr{start_addr}, end_addr{end_addr} {
12 const u64 num_pages{(end_addr - start_addr) / PageSize};
13 memory_block_tree.emplace_back(start_addr, num_pages, MemoryState::Free, MemoryPermission::None,
14 MemoryAttribute::None);
15}
16
17MemoryBlockManager::iterator MemoryBlockManager::FindIterator(VAddr addr) {
18 iterator node{memory_block_tree.begin()};
19 while (node != end()) {
20 const VAddr end_addr{node->GetNumPages() * PageSize + node->GetAddress()};
21 if (node->GetAddress() <= addr && end_addr - 1 >= addr) {
22 return node;
23 }
24 node = std::next(node);
25 }
26 return end();
27}
28
29VAddr MemoryBlockManager::FindFreeArea(VAddr region_start, std::size_t region_num_pages,
30 std::size_t num_pages, std::size_t align, std::size_t offset,
31 std::size_t guard_pages) {
32 if (num_pages == 0) {
33 return {};
34 }
35
36 const VAddr region_end{region_start + region_num_pages * PageSize};
37 const VAddr region_last{region_end - 1};
38 for (const_iterator it{FindIterator(region_start)}; it != memory_block_tree.cend(); it++) {
39 const MemoryInfo info{it->GetMemoryInfo()};
40 if (region_last < info.GetAddress()) {
41 break;
42 }
43
44 if (info.state != MemoryState::Free) {
45 continue;
46 }
47
48 VAddr area{(info.GetAddress() <= region_start) ? region_start : info.GetAddress()};
49 area += guard_pages * PageSize;
50
51 const VAddr offset_area{Common::AlignDown(area, align) + offset};
52 area = (area <= offset_area) ? offset_area : offset_area + align;
53
54 const VAddr area_end{area + num_pages * PageSize + guard_pages * PageSize};
55 const VAddr area_last{area_end - 1};
56
57 if (info.GetAddress() <= area && area < area_last && area_last <= region_last &&
58 area_last <= info.GetLastAddress()) {
59 return area;
60 }
61 }
62
63 return {};
64}
65
66void MemoryBlockManager::Update(VAddr addr, std::size_t num_pages, MemoryState prev_state,
67 MemoryPermission prev_perm, MemoryAttribute prev_attribute,
68 MemoryState state, MemoryPermission perm,
69 MemoryAttribute attribute) {
70 const std::size_t prev_count{memory_block_tree.size()};
71 const VAddr end_addr{addr + num_pages * PageSize};
72 iterator node{memory_block_tree.begin()};
73
74 prev_attribute |= MemoryAttribute::IpcAndDeviceMapped;
75
76 while (node != memory_block_tree.end()) {
77 MemoryBlock* block{&(*node)};
78 iterator next_node{std::next(node)};
79 const VAddr cur_addr{block->GetAddress()};
80 const VAddr cur_end_addr{block->GetNumPages() * PageSize + cur_addr};
81
82 if (addr < cur_end_addr && cur_addr < end_addr) {
83 if (!block->HasProperties(prev_state, prev_perm, prev_attribute)) {
84 node = next_node;
85 continue;
86 }
87
88 iterator new_node{node};
89 if (addr > cur_addr) {
90 memory_block_tree.insert(node, block->Split(addr));
91 }
92
93 if (end_addr < cur_end_addr) {
94 new_node = memory_block_tree.insert(node, block->Split(end_addr));
95 }
96
97 new_node->Update(state, perm, attribute);
98
99 MergeAdjacent(new_node, next_node);
100 }
101
102 if (cur_end_addr - 1 >= end_addr - 1) {
103 break;
104 }
105
106 node = next_node;
107 }
108}
109
110void MemoryBlockManager::Update(VAddr addr, std::size_t num_pages, MemoryState state,
111 MemoryPermission perm, MemoryAttribute attribute) {
112 const std::size_t prev_count{memory_block_tree.size()};
113 const VAddr end_addr{addr + num_pages * PageSize};
114 iterator node{memory_block_tree.begin()};
115
116 while (node != memory_block_tree.end()) {
117 MemoryBlock* block{&(*node)};
118 iterator next_node{std::next(node)};
119 const VAddr cur_addr{block->GetAddress()};
120 const VAddr cur_end_addr{block->GetNumPages() * PageSize + cur_addr};
121
122 if (addr < cur_end_addr && cur_addr < end_addr) {
123 iterator new_node{node};
124
125 if (addr > cur_addr) {
126 memory_block_tree.insert(node, block->Split(addr));
127 }
128
129 if (end_addr < cur_end_addr) {
130 new_node = memory_block_tree.insert(node, block->Split(end_addr));
131 }
132
133 new_node->Update(state, perm, attribute);
134
135 MergeAdjacent(new_node, next_node);
136 }
137
138 if (cur_end_addr - 1 >= end_addr - 1) {
139 break;
140 }
141
142 node = next_node;
143 }
144}
145
146void MemoryBlockManager::IterateForRange(VAddr start, VAddr end, IterateFunc&& func) {
147 const_iterator it{FindIterator(start)};
148 MemoryInfo info{};
149 do {
150 info = it->GetMemoryInfo();
151 func(info);
152 it = std::next(it);
153 } while (info.addr + info.size - 1 < end - 1 && it != cend());
154}
155
156void MemoryBlockManager::MergeAdjacent(iterator it, iterator& next_it) {
157 MemoryBlock* block{&(*it)};
158
159 auto EraseIt = [&](const iterator it_to_erase) {
160 if (next_it == it_to_erase) {
161 next_it = std::next(next_it);
162 }
163 memory_block_tree.erase(it_to_erase);
164 };
165
166 if (it != memory_block_tree.begin()) {
167 MemoryBlock* prev{&(*std::prev(it))};
168
169 if (block->HasSameProperties(*prev)) {
170 const iterator prev_it{std::prev(it)};
171
172 prev->Add(block->GetNumPages());
173 EraseIt(it);
174
175 it = prev_it;
176 block = prev;
177 }
178 }
179
180 if (it != cend()) {
181 const MemoryBlock* const next{&(*std::next(it))};
182
183 if (block->HasSameProperties(*next)) {
184 block->Add(next->GetNumPages());
185 EraseIt(std::next(it));
186 }
187 }
188}
189
190} // namespace Kernel::Memory
diff --git a/src/core/hle/kernel/memory/memory_block_manager.h b/src/core/hle/kernel/memory/memory_block_manager.h
new file mode 100644
index 000000000..0f2270f0f
--- /dev/null
+++ b/src/core/hle/kernel/memory/memory_block_manager.h
@@ -0,0 +1,64 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#pragma once
6
7#include <functional>
8#include <list>
9#include <memory>
10
11#include "common/common_types.h"
12#include "core/hle/kernel/memory/memory_block.h"
13
14namespace Kernel::Memory {
15
16class MemoryBlockManager final {
17public:
18 using MemoryBlockTree = std::list<MemoryBlock>;
19 using iterator = MemoryBlockTree::iterator;
20 using const_iterator = MemoryBlockTree::const_iterator;
21
22public:
23 MemoryBlockManager(VAddr start_addr, VAddr end_addr);
24
25 iterator end() {
26 return memory_block_tree.end();
27 }
28 const_iterator end() const {
29 return memory_block_tree.end();
30 }
31 const_iterator cend() const {
32 return memory_block_tree.cend();
33 }
34
35 iterator FindIterator(VAddr addr);
36
37 VAddr FindFreeArea(VAddr region_start, std::size_t region_num_pages, std::size_t num_pages,
38 std::size_t align, std::size_t offset, std::size_t guard_pages);
39
40 void Update(VAddr addr, std::size_t num_pages, MemoryState prev_state,
41 MemoryPermission prev_perm, MemoryAttribute prev_attribute, MemoryState state,
42 MemoryPermission perm, MemoryAttribute attribute);
43
44 void Update(VAddr addr, std::size_t num_pages, MemoryState state,
45 MemoryPermission perm = MemoryPermission::None,
46 MemoryAttribute attribute = MemoryAttribute::None);
47
48 using IterateFunc = std::function<void(const MemoryInfo&)>;
49 void IterateForRange(VAddr start, VAddr end, IterateFunc&& func);
50
51 MemoryBlock& FindBlock(VAddr addr) {
52 return *FindIterator(addr);
53 }
54
55private:
56 void MergeAdjacent(iterator it, iterator& next_it);
57
58 const VAddr start_addr;
59 const VAddr end_addr;
60
61 MemoryBlockTree memory_block_tree;
62};
63
64} // namespace Kernel::Memory