summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGravatar bunnei2022-10-29 14:16:54 -0700
committerGravatar bunnei2022-11-03 21:17:06 -0700
commit6b6c02f5415b6c63f71bf114fd4ee8e336dd2895 (patch)
treea402c936ff1cb60a2277e7b4ef9f2d96df4ae111 /src
parentcore: hle: kernel: k_memory_block: Refresh. (diff)
downloadyuzu-6b6c02f5415b6c63f71bf114fd4ee8e336dd2895.tar.gz
yuzu-6b6c02f5415b6c63f71bf114fd4ee8e336dd2895.tar.xz
yuzu-6b6c02f5415b6c63f71bf114fd4ee8e336dd2895.zip
core: hle: kernel: k_page_bitmap: Refresh.
Diffstat (limited to 'src')
-rw-r--r--src/core/hle/kernel/k_page_bitmap.h243
1 files changed, 155 insertions, 88 deletions
diff --git a/src/core/hle/kernel/k_page_bitmap.h b/src/core/hle/kernel/k_page_bitmap.h
index c97b3dc0b..0ff987732 100644
--- a/src/core/hle/kernel/k_page_bitmap.h
+++ b/src/core/hle/kernel/k_page_bitmap.h
@@ -16,107 +16,126 @@
16namespace Kernel { 16namespace Kernel {
17 17
18class KPageBitmap { 18class KPageBitmap {
19private: 19public:
20 class RandomBitGenerator { 20 class RandomBitGenerator {
21 private: 21 public:
22 Common::TinyMT rng{}; 22 RandomBitGenerator() {
23 u32 entropy{}; 23 m_rng.Initialize(static_cast<u32>(KSystemControl::GenerateRandomU64()));
24 u32 bits_available{}; 24 }
25
26 u64 SelectRandomBit(u64 bitmap) {
27 u64 selected = 0;
28
29 for (size_t cur_num_bits = Common::BitSize<decltype(bitmap)>() / 2; cur_num_bits != 0;
30 cur_num_bits /= 2) {
31 const u64 high = (bitmap >> cur_num_bits);
32 const u64 low = (bitmap & (~(UINT64_C(0xFFFFFFFFFFFFFFFF) << cur_num_bits)));
33
34 // Choose high if we have high and (don't have low or select high randomly).
35 if (high && (low == 0 || this->GenerateRandomBit())) {
36 bitmap = high;
37 selected += cur_num_bits;
38 } else {
39 bitmap = low;
40 selected += 0;
41 }
42 }
43
44 return selected;
45 }
46
47 u64 GenerateRandom(u64 max) {
48 // Determine the number of bits we need.
49 const u64 bits_needed = 1 + (Common::BitSize<decltype(max)>() - std::countl_zero(max));
50
51 // Generate a random value of the desired bitwidth.
52 const u64 rnd = this->GenerateRandomBits(static_cast<u32>(bits_needed));
53
54 // Adjust the value to be in range.
55 return rnd - ((rnd / max) * max);
56 }
25 57
26 private: 58 private:
27 void RefreshEntropy() { 59 void RefreshEntropy() {
28 entropy = rng.GenerateRandomU32(); 60 m_entropy = m_rng.GenerateRandomU32();
29 bits_available = static_cast<u32>(Common::BitSize<decltype(entropy)>()); 61 m_bits_available = static_cast<u32>(Common::BitSize<decltype(m_entropy)>());
30 } 62 }
31 63
32 bool GenerateRandomBit() { 64 bool GenerateRandomBit() {
33 if (bits_available == 0) { 65 if (m_bits_available == 0) {
34 this->RefreshEntropy(); 66 this->RefreshEntropy();
35 } 67 }
36 68
37 const bool rnd_bit = (entropy & 1) != 0; 69 const bool rnd_bit = (m_entropy & 1) != 0;
38 entropy >>= 1; 70 m_entropy >>= 1;
39 --bits_available; 71 --m_bits_available;
40 return rnd_bit; 72 return rnd_bit;
41 } 73 }
42 74
43 public: 75 u64 GenerateRandomBits(u32 num_bits) {
44 RandomBitGenerator() { 76 u64 result = 0;
45 rng.Initialize(static_cast<u32>(KSystemControl::GenerateRandomU64()));
46 }
47 77
48 std::size_t SelectRandomBit(u64 bitmap) { 78 // Iteratively add random bits to our result.
49 u64 selected = 0; 79 while (num_bits > 0) {
80 // Ensure we have random bits to take from.
81 if (m_bits_available == 0) {
82 this->RefreshEntropy();
83 }
50 84
51 u64 cur_num_bits = Common::BitSize<decltype(bitmap)>() / 2; 85 // Determine how many bits to take this round.
52 u64 cur_mask = (1ULL << cur_num_bits) - 1; 86 const auto cur_bits = std::min(num_bits, m_bits_available);
53 87
54 while (cur_num_bits) { 88 // Generate mask for our current bits.
55 const u64 low = (bitmap >> 0) & cur_mask; 89 const u64 mask = (static_cast<u64>(1) << cur_bits) - 1;
56 const u64 high = (bitmap >> cur_num_bits) & cur_mask;
57 90
58 bool choose_low; 91 // Add bits to output from our entropy.
59 if (high == 0) { 92 result <<= cur_bits;
60 // If only low val is set, choose low. 93 result |= (m_entropy & mask);
61 choose_low = true;
62 } else if (low == 0) {
63 // If only high val is set, choose high.
64 choose_low = false;
65 } else {
66 // If both are set, choose random.
67 choose_low = this->GenerateRandomBit();
68 }
69 94
70 // If we chose low, proceed with low. 95 // Remove bits from our entropy.
71 if (choose_low) { 96 m_entropy >>= cur_bits;
72 bitmap = low; 97 m_bits_available -= cur_bits;
73 selected += 0;
74 } else {
75 bitmap = high;
76 selected += cur_num_bits;
77 }
78 98
79 // Proceed. 99 // Advance.
80 cur_num_bits /= 2; 100 num_bits -= cur_bits;
81 cur_mask >>= cur_num_bits;
82 } 101 }
83 102
84 return selected; 103 return result;
85 } 104 }
105
106 private:
107 Common::TinyMT m_rng;
108 u32 m_entropy{};
109 u32 m_bits_available{};
86 }; 110 };
87 111
88public: 112public:
89 static constexpr std::size_t MaxDepth = 4; 113 static constexpr size_t MaxDepth = 4;
90
91private:
92 std::array<u64*, MaxDepth> bit_storages{};
93 RandomBitGenerator rng{};
94 std::size_t num_bits{};
95 std::size_t used_depths{};
96 114
97public: 115public:
98 KPageBitmap() = default; 116 KPageBitmap() = default;
99 117
100 constexpr std::size_t GetNumBits() const { 118 constexpr size_t GetNumBits() const {
101 return num_bits; 119 return m_num_bits;
102 } 120 }
103 constexpr s32 GetHighestDepthIndex() const { 121 constexpr s32 GetHighestDepthIndex() const {
104 return static_cast<s32>(used_depths) - 1; 122 return static_cast<s32>(m_used_depths) - 1;
105 } 123 }
106 124
107 u64* Initialize(u64* storage, std::size_t size) { 125 u64* Initialize(u64* storage, size_t size) {
108 // Initially, everything is un-set. 126 // Initially, everything is un-set.
109 num_bits = 0; 127 m_num_bits = 0;
110 128
111 // Calculate the needed bitmap depth. 129 // Calculate the needed bitmap depth.
112 used_depths = static_cast<std::size_t>(GetRequiredDepth(size)); 130 m_used_depths = static_cast<size_t>(GetRequiredDepth(size));
113 ASSERT(used_depths <= MaxDepth); 131 ASSERT(m_used_depths <= MaxDepth);
114 132
115 // Set the bitmap pointers. 133 // Set the bitmap pointers.
116 for (s32 depth = this->GetHighestDepthIndex(); depth >= 0; depth--) { 134 for (s32 depth = this->GetHighestDepthIndex(); depth >= 0; depth--) {
117 bit_storages[depth] = storage; 135 m_bit_storages[depth] = storage;
118 size = Common::AlignUp(size, Common::BitSize<u64>()) / Common::BitSize<u64>(); 136 size = Common::AlignUp(size, Common::BitSize<u64>()) / Common::BitSize<u64>();
119 storage += size; 137 storage += size;
138 m_end_storages[depth] = storage;
120 } 139 }
121 140
122 return storage; 141 return storage;
@@ -128,19 +147,19 @@ public:
128 147
129 if (random) { 148 if (random) {
130 do { 149 do {
131 const u64 v = bit_storages[depth][offset]; 150 const u64 v = m_bit_storages[depth][offset];
132 if (v == 0) { 151 if (v == 0) {
133 // If depth is bigger than zero, then a previous level indicated a block was 152 // If depth is bigger than zero, then a previous level indicated a block was
134 // free. 153 // free.
135 ASSERT(depth == 0); 154 ASSERT(depth == 0);
136 return -1; 155 return -1;
137 } 156 }
138 offset = offset * Common::BitSize<u64>() + rng.SelectRandomBit(v); 157 offset = offset * Common::BitSize<u64>() + m_rng.SelectRandomBit(v);
139 ++depth; 158 ++depth;
140 } while (depth < static_cast<s32>(used_depths)); 159 } while (depth < static_cast<s32>(m_used_depths));
141 } else { 160 } else {
142 do { 161 do {
143 const u64 v = bit_storages[depth][offset]; 162 const u64 v = m_bit_storages[depth][offset];
144 if (v == 0) { 163 if (v == 0) {
145 // If depth is bigger than zero, then a previous level indicated a block was 164 // If depth is bigger than zero, then a previous level indicated a block was
146 // free. 165 // free.
@@ -149,28 +168,69 @@ public:
149 } 168 }
150 offset = offset * Common::BitSize<u64>() + std::countr_zero(v); 169 offset = offset * Common::BitSize<u64>() + std::countr_zero(v);
151 ++depth; 170 ++depth;
152 } while (depth < static_cast<s32>(used_depths)); 171 } while (depth < static_cast<s32>(m_used_depths));
153 } 172 }
154 173
155 return static_cast<s64>(offset); 174 return static_cast<s64>(offset);
156 } 175 }
157 176
158 void SetBit(std::size_t offset) { 177 s64 FindFreeRange(size_t count) {
178 // Check that it is possible to find a range.
179 const u64* const storage_start = m_bit_storages[m_used_depths - 1];
180 const u64* const storage_end = m_end_storages[m_used_depths - 1];
181
182 // If we don't have a storage to iterate (or want more blocks than fit in a single storage),
183 // we can't find a free range.
184 if (!(storage_start < storage_end && count <= Common::BitSize<u64>())) {
185 return -1;
186 }
187
188 // Walk the storages to select a random free range.
189 const size_t options_per_storage = std::max<size_t>(Common::BitSize<u64>() / count, 1);
190 const size_t num_entries = std::max<size_t>(storage_end - storage_start, 1);
191
192 const u64 free_mask = (static_cast<u64>(1) << count) - 1;
193
194 size_t num_valid_options = 0;
195 s64 chosen_offset = -1;
196 for (size_t storage_index = 0; storage_index < num_entries; ++storage_index) {
197 u64 storage = storage_start[storage_index];
198 for (size_t option = 0; option < options_per_storage; ++option) {
199 if ((storage & free_mask) == free_mask) {
200 // We've found a new valid option.
201 ++num_valid_options;
202
203 // Select the Kth valid option with probability 1/K. This leads to an overall
204 // uniform distribution.
205 if (num_valid_options == 1 || m_rng.GenerateRandom(num_valid_options) == 0) {
206 // This is our first option, so select it.
207 chosen_offset = storage_index * Common::BitSize<u64>() + option * count;
208 }
209 }
210 storage >>= count;
211 }
212 }
213
214 // Return the random offset we chose.*/
215 return chosen_offset;
216 }
217
218 void SetBit(size_t offset) {
159 this->SetBit(this->GetHighestDepthIndex(), offset); 219 this->SetBit(this->GetHighestDepthIndex(), offset);
160 num_bits++; 220 m_num_bits++;
161 } 221 }
162 222
163 void ClearBit(std::size_t offset) { 223 void ClearBit(size_t offset) {
164 this->ClearBit(this->GetHighestDepthIndex(), offset); 224 this->ClearBit(this->GetHighestDepthIndex(), offset);
165 num_bits--; 225 m_num_bits--;
166 } 226 }
167 227
168 bool ClearRange(std::size_t offset, std::size_t count) { 228 bool ClearRange(size_t offset, size_t count) {
169 s32 depth = this->GetHighestDepthIndex(); 229 s32 depth = this->GetHighestDepthIndex();
170 u64* bits = bit_storages[depth]; 230 u64* bits = m_bit_storages[depth];
171 std::size_t bit_ind = offset / Common::BitSize<u64>(); 231 size_t bit_ind = offset / Common::BitSize<u64>();
172 if (count < Common::BitSize<u64>()) { 232 if (count < Common::BitSize<u64>()) [[likely]] {
173 const std::size_t shift = offset % Common::BitSize<u64>(); 233 const size_t shift = offset % Common::BitSize<u64>();
174 ASSERT(shift + count <= Common::BitSize<u64>()); 234 ASSERT(shift + count <= Common::BitSize<u64>());
175 // Check that all the bits are set. 235 // Check that all the bits are set.
176 const u64 mask = ((u64(1) << count) - 1) << shift; 236 const u64 mask = ((u64(1) << count) - 1) << shift;
@@ -189,8 +249,8 @@ public:
189 ASSERT(offset % Common::BitSize<u64>() == 0); 249 ASSERT(offset % Common::BitSize<u64>() == 0);
190 ASSERT(count % Common::BitSize<u64>() == 0); 250 ASSERT(count % Common::BitSize<u64>() == 0);
191 // Check that all the bits are set. 251 // Check that all the bits are set.
192 std::size_t remaining = count; 252 size_t remaining = count;
193 std::size_t i = 0; 253 size_t i = 0;
194 do { 254 do {
195 if (bits[bit_ind + i++] != ~u64(0)) { 255 if (bits[bit_ind + i++] != ~u64(0)) {
196 return false; 256 return false;
@@ -209,18 +269,18 @@ public:
209 } while (remaining > 0); 269 } while (remaining > 0);
210 } 270 }
211 271
212 num_bits -= count; 272 m_num_bits -= count;
213 return true; 273 return true;
214 } 274 }
215 275
216private: 276private:
217 void SetBit(s32 depth, std::size_t offset) { 277 void SetBit(s32 depth, size_t offset) {
218 while (depth >= 0) { 278 while (depth >= 0) {
219 std::size_t ind = offset / Common::BitSize<u64>(); 279 size_t ind = offset / Common::BitSize<u64>();
220 std::size_t which = offset % Common::BitSize<u64>(); 280 size_t which = offset % Common::BitSize<u64>();
221 const u64 mask = u64(1) << which; 281 const u64 mask = u64(1) << which;
222 282
223 u64* bit = std::addressof(bit_storages[depth][ind]); 283 u64* bit = std::addressof(m_bit_storages[depth][ind]);
224 u64 v = *bit; 284 u64 v = *bit;
225 ASSERT((v & mask) == 0); 285 ASSERT((v & mask) == 0);
226 *bit = v | mask; 286 *bit = v | mask;
@@ -232,13 +292,13 @@ private:
232 } 292 }
233 } 293 }
234 294
235 void ClearBit(s32 depth, std::size_t offset) { 295 void ClearBit(s32 depth, size_t offset) {
236 while (depth >= 0) { 296 while (depth >= 0) {
237 std::size_t ind = offset / Common::BitSize<u64>(); 297 size_t ind = offset / Common::BitSize<u64>();
238 std::size_t which = offset % Common::BitSize<u64>(); 298 size_t which = offset % Common::BitSize<u64>();
239 const u64 mask = u64(1) << which; 299 const u64 mask = u64(1) << which;
240 300
241 u64* bit = std::addressof(bit_storages[depth][ind]); 301 u64* bit = std::addressof(m_bit_storages[depth][ind]);
242 u64 v = *bit; 302 u64 v = *bit;
243 ASSERT((v & mask) != 0); 303 ASSERT((v & mask) != 0);
244 v &= ~mask; 304 v &= ~mask;
@@ -252,7 +312,7 @@ private:
252 } 312 }
253 313
254private: 314private:
255 static constexpr s32 GetRequiredDepth(std::size_t region_size) { 315 static constexpr s32 GetRequiredDepth(size_t region_size) {
256 s32 depth = 0; 316 s32 depth = 0;
257 while (true) { 317 while (true) {
258 region_size /= Common::BitSize<u64>(); 318 region_size /= Common::BitSize<u64>();
@@ -264,8 +324,8 @@ private:
264 } 324 }
265 325
266public: 326public:
267 static constexpr std::size_t CalculateManagementOverheadSize(std::size_t region_size) { 327 static constexpr size_t CalculateManagementOverheadSize(size_t region_size) {
268 std::size_t overhead_bits = 0; 328 size_t overhead_bits = 0;
269 for (s32 depth = GetRequiredDepth(region_size) - 1; depth >= 0; depth--) { 329 for (s32 depth = GetRequiredDepth(region_size) - 1; depth >= 0; depth--) {
270 region_size = 330 region_size =
271 Common::AlignUp(region_size, Common::BitSize<u64>()) / Common::BitSize<u64>(); 331 Common::AlignUp(region_size, Common::BitSize<u64>()) / Common::BitSize<u64>();
@@ -273,6 +333,13 @@ public:
273 } 333 }
274 return overhead_bits * sizeof(u64); 334 return overhead_bits * sizeof(u64);
275 } 335 }
336
337private:
338 std::array<u64*, MaxDepth> m_bit_storages{};
339 std::array<u64*, MaxDepth> m_end_storages{};
340 RandomBitGenerator m_rng;
341 size_t m_num_bits{};
342 size_t m_used_depths{};
276}; 343};
277 344
278} // namespace Kernel 345} // namespace Kernel