diff options
| author | 2021-02-11 18:39:06 -0800 | |
|---|---|---|
| committer | 2021-02-18 16:16:24 -0800 | |
| commit | a02566136c225e0824a3f217766efcb3b3f55ec2 (patch) | |
| tree | 8b0680b665f2e1f66fe7523cbc32daed39154495 /src/core | |
| parent | hle: kernel: system_control: Add function GenerateRandomU64. (diff) | |
| download | yuzu-a02566136c225e0824a3f217766efcb3b3f55ec2.tar.gz yuzu-a02566136c225e0824a3f217766efcb3b3f55ec2.tar.xz yuzu-a02566136c225e0824a3f217766efcb3b3f55ec2.zip | |
hle: kernel: Add KPageBitmap class.
Diffstat (limited to 'src/core')
| -rw-r--r-- | src/core/CMakeLists.txt | 1 | ||||
| -rw-r--r-- | src/core/hle/kernel/k_page_bitmap.h | 279 |
2 files changed, 280 insertions, 0 deletions
diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt index a6b5fef56..6604bc2c5 100644 --- a/src/core/CMakeLists.txt +++ b/src/core/CMakeLists.txt | |||
| @@ -164,6 +164,7 @@ add_library(core STATIC | |||
| 164 | hle/kernel/k_light_condition_variable.h | 164 | hle/kernel/k_light_condition_variable.h |
| 165 | hle/kernel/k_light_lock.cpp | 165 | hle/kernel/k_light_lock.cpp |
| 166 | hle/kernel/k_light_lock.h | 166 | hle/kernel/k_light_lock.h |
| 167 | hle/kernel/k_page_bitmap.h | ||
| 167 | hle/kernel/k_priority_queue.h | 168 | hle/kernel/k_priority_queue.h |
| 168 | hle/kernel/k_readable_event.cpp | 169 | hle/kernel/k_readable_event.cpp |
| 169 | hle/kernel/k_readable_event.h | 170 | hle/kernel/k_readable_event.h |
diff --git a/src/core/hle/kernel/k_page_bitmap.h b/src/core/hle/kernel/k_page_bitmap.h new file mode 100644 index 000000000..da2d20032 --- /dev/null +++ b/src/core/hle/kernel/k_page_bitmap.h | |||
| @@ -0,0 +1,279 @@ | |||
| 1 | // Copyright 2021 yuzu Emulator Project | ||
| 2 | // Licensed under GPLv2 or any later version | ||
| 3 | // Refer to the license.txt file included. | ||
| 4 | |||
| 5 | #pragma once | ||
| 6 | |||
| 7 | #include <array> | ||
| 8 | #include <bit> | ||
| 9 | |||
| 10 | #include "common/alignment.h" | ||
| 11 | #include "common/assert.h" | ||
| 12 | #include "common/bit_util.h" | ||
| 13 | #include "common/common_types.h" | ||
| 14 | #include "common/tiny_mt.h" | ||
| 15 | #include "core/hle/kernel/memory/system_control.h" | ||
| 16 | |||
| 17 | namespace Kernel { | ||
| 18 | |||
| 19 | class KPageBitmap { | ||
| 20 | private: | ||
| 21 | class RandomBitGenerator { | ||
| 22 | private: | ||
| 23 | Common::TinyMT rng{}; | ||
| 24 | u32 entropy{}; | ||
| 25 | u32 bits_available{}; | ||
| 26 | |||
| 27 | private: | ||
| 28 | void RefreshEntropy() { | ||
| 29 | entropy = rng.GenerateRandomU32(); | ||
| 30 | bits_available = static_cast<u32>(Common::BitSize<decltype(entropy)>()); | ||
| 31 | } | ||
| 32 | |||
| 33 | bool GenerateRandomBit() { | ||
| 34 | if (bits_available == 0) { | ||
| 35 | this->RefreshEntropy(); | ||
| 36 | } | ||
| 37 | |||
| 38 | const bool rnd_bit = (entropy & 1) != 0; | ||
| 39 | entropy >>= 1; | ||
| 40 | --bits_available; | ||
| 41 | return rnd_bit; | ||
| 42 | } | ||
| 43 | |||
| 44 | public: | ||
| 45 | RandomBitGenerator() { | ||
| 46 | rng.Initialize(static_cast<u32>(Memory::SystemControl::GenerateRandomU64())); | ||
| 47 | } | ||
| 48 | |||
| 49 | std::size_t SelectRandomBit(u64 bitmap) { | ||
| 50 | u64 selected = 0; | ||
| 51 | |||
| 52 | u64 cur_num_bits = Common::BitSize<decltype(bitmap)>() / 2; | ||
| 53 | u64 cur_mask = (1ULL << cur_num_bits) - 1; | ||
| 54 | |||
| 55 | while (cur_num_bits) { | ||
| 56 | const u64 low = (bitmap >> 0) & cur_mask; | ||
| 57 | const u64 high = (bitmap >> cur_num_bits) & cur_mask; | ||
| 58 | |||
| 59 | bool choose_low; | ||
| 60 | if (high == 0) { | ||
| 61 | // If only low val is set, choose low. | ||
| 62 | choose_low = true; | ||
| 63 | } else if (low == 0) { | ||
| 64 | // If only high val is set, choose high. | ||
| 65 | choose_low = false; | ||
| 66 | } else { | ||
| 67 | // If both are set, choose random. | ||
| 68 | choose_low = this->GenerateRandomBit(); | ||
| 69 | } | ||
| 70 | |||
| 71 | // If we chose low, proceed with low. | ||
| 72 | if (choose_low) { | ||
| 73 | bitmap = low; | ||
| 74 | selected += 0; | ||
| 75 | } else { | ||
| 76 | bitmap = high; | ||
| 77 | selected += cur_num_bits; | ||
| 78 | } | ||
| 79 | |||
| 80 | // Proceed. | ||
| 81 | cur_num_bits /= 2; | ||
| 82 | cur_mask >>= cur_num_bits; | ||
| 83 | } | ||
| 84 | |||
| 85 | return selected; | ||
| 86 | } | ||
| 87 | }; | ||
| 88 | |||
| 89 | public: | ||
| 90 | static constexpr std::size_t MaxDepth = 4; | ||
| 91 | |||
| 92 | private: | ||
| 93 | std::array<u64*, MaxDepth> bit_storages{}; | ||
| 94 | RandomBitGenerator rng{}; | ||
| 95 | std::size_t num_bits{}; | ||
| 96 | std::size_t used_depths{}; | ||
| 97 | |||
| 98 | public: | ||
| 99 | KPageBitmap() = default; | ||
| 100 | |||
| 101 | constexpr std::size_t GetNumBits() const { | ||
| 102 | return num_bits; | ||
| 103 | } | ||
| 104 | constexpr s32 GetHighestDepthIndex() const { | ||
| 105 | return static_cast<s32>(used_depths) - 1; | ||
| 106 | } | ||
| 107 | |||
| 108 | u64* Initialize(u64* storage, std::size_t size) { | ||
| 109 | // Initially, everything is un-set. | ||
| 110 | num_bits = 0; | ||
| 111 | |||
| 112 | // Calculate the needed bitmap depth. | ||
| 113 | used_depths = static_cast<std::size_t>(GetRequiredDepth(size)); | ||
| 114 | ASSERT(used_depths <= MaxDepth); | ||
| 115 | |||
| 116 | // Set the bitmap pointers. | ||
| 117 | for (s32 depth = this->GetHighestDepthIndex(); depth >= 0; depth--) { | ||
| 118 | bit_storages[depth] = storage; | ||
| 119 | size = Common::AlignUp(size, Common::BitSize<u64>()) / Common::BitSize<u64>(); | ||
| 120 | storage += size; | ||
| 121 | } | ||
| 122 | |||
| 123 | return storage; | ||
| 124 | } | ||
| 125 | |||
| 126 | s64 FindFreeBlock(bool random) { | ||
| 127 | uintptr_t offset = 0; | ||
| 128 | s32 depth = 0; | ||
| 129 | |||
| 130 | if (random) { | ||
| 131 | do { | ||
| 132 | const u64 v = bit_storages[depth][offset]; | ||
| 133 | if (v == 0) { | ||
| 134 | // If depth is bigger than zero, then a previous level indicated a block was | ||
| 135 | // free. | ||
| 136 | ASSERT(depth == 0); | ||
| 137 | return -1; | ||
| 138 | } | ||
| 139 | offset = offset * Common::BitSize<u64>() + rng.SelectRandomBit(v); | ||
| 140 | ++depth; | ||
| 141 | } while (depth < static_cast<s32>(used_depths)); | ||
| 142 | } else { | ||
| 143 | do { | ||
| 144 | const u64 v = bit_storages[depth][offset]; | ||
| 145 | if (v == 0) { | ||
| 146 | // If depth is bigger than zero, then a previous level indicated a block was | ||
| 147 | // free. | ||
| 148 | ASSERT(depth == 0); | ||
| 149 | return -1; | ||
| 150 | } | ||
| 151 | offset = offset * Common::BitSize<u64>() + std::countr_zero(v); | ||
| 152 | ++depth; | ||
| 153 | } while (depth < static_cast<s32>(used_depths)); | ||
| 154 | } | ||
| 155 | |||
| 156 | return static_cast<s64>(offset); | ||
| 157 | } | ||
| 158 | |||
| 159 | void SetBit(std::size_t offset) { | ||
| 160 | this->SetBit(this->GetHighestDepthIndex(), offset); | ||
| 161 | num_bits++; | ||
| 162 | } | ||
| 163 | |||
| 164 | void ClearBit(std::size_t offset) { | ||
| 165 | this->ClearBit(this->GetHighestDepthIndex(), offset); | ||
| 166 | num_bits--; | ||
| 167 | } | ||
| 168 | |||
| 169 | bool ClearRange(std::size_t offset, std::size_t count) { | ||
| 170 | s32 depth = this->GetHighestDepthIndex(); | ||
| 171 | u64* bits = bit_storages[depth]; | ||
| 172 | std::size_t bit_ind = offset / Common::BitSize<u64>(); | ||
| 173 | if (count < Common::BitSize<u64>()) { | ||
| 174 | const std::size_t shift = offset % Common::BitSize<u64>(); | ||
| 175 | ASSERT(shift + count <= Common::BitSize<u64>()); | ||
| 176 | // Check that all the bits are set. | ||
| 177 | const u64 mask = ((u64(1) << count) - 1) << shift; | ||
| 178 | u64 v = bits[bit_ind]; | ||
| 179 | if ((v & mask) != mask) { | ||
| 180 | return false; | ||
| 181 | } | ||
| 182 | |||
| 183 | // Clear the bits. | ||
| 184 | v &= ~mask; | ||
| 185 | bits[bit_ind] = v; | ||
| 186 | if (v == 0) { | ||
| 187 | this->ClearBit(depth - 1, bit_ind); | ||
| 188 | } | ||
| 189 | } else { | ||
| 190 | ASSERT(offset % Common::BitSize<u64>() == 0); | ||
| 191 | ASSERT(count % Common::BitSize<u64>() == 0); | ||
| 192 | // Check that all the bits are set. | ||
| 193 | std::size_t remaining = count; | ||
| 194 | std::size_t i = 0; | ||
| 195 | do { | ||
| 196 | if (bits[bit_ind + i++] != ~u64(0)) { | ||
| 197 | return false; | ||
| 198 | } | ||
| 199 | remaining -= Common::BitSize<u64>(); | ||
| 200 | } while (remaining > 0); | ||
| 201 | |||
| 202 | // Clear the bits. | ||
| 203 | remaining = count; | ||
| 204 | i = 0; | ||
| 205 | do { | ||
| 206 | bits[bit_ind + i] = 0; | ||
| 207 | this->ClearBit(depth - 1, bit_ind + i); | ||
| 208 | i++; | ||
| 209 | remaining -= Common::BitSize<u64>(); | ||
| 210 | } while (remaining > 0); | ||
| 211 | } | ||
| 212 | |||
| 213 | num_bits -= count; | ||
| 214 | return true; | ||
| 215 | } | ||
| 216 | |||
| 217 | private: | ||
| 218 | void SetBit(s32 depth, std::size_t offset) { | ||
| 219 | while (depth >= 0) { | ||
| 220 | std::size_t ind = offset / Common::BitSize<u64>(); | ||
| 221 | std::size_t which = offset % Common::BitSize<u64>(); | ||
| 222 | const u64 mask = u64(1) << which; | ||
| 223 | |||
| 224 | u64* bit = std::addressof(bit_storages[depth][ind]); | ||
| 225 | u64 v = *bit; | ||
| 226 | ASSERT((v & mask) == 0); | ||
| 227 | *bit = v | mask; | ||
| 228 | if (v) { | ||
| 229 | break; | ||
| 230 | } | ||
| 231 | offset = ind; | ||
| 232 | depth--; | ||
| 233 | } | ||
| 234 | } | ||
| 235 | |||
| 236 | void ClearBit(s32 depth, std::size_t offset) { | ||
| 237 | while (depth >= 0) { | ||
| 238 | std::size_t ind = offset / Common::BitSize<u64>(); | ||
| 239 | std::size_t which = offset % Common::BitSize<u64>(); | ||
| 240 | const u64 mask = u64(1) << which; | ||
| 241 | |||
| 242 | u64* bit = std::addressof(bit_storages[depth][ind]); | ||
| 243 | u64 v = *bit; | ||
| 244 | ASSERT((v & mask) != 0); | ||
| 245 | v &= ~mask; | ||
| 246 | *bit = v; | ||
| 247 | if (v) { | ||
| 248 | break; | ||
| 249 | } | ||
| 250 | offset = ind; | ||
| 251 | depth--; | ||
| 252 | } | ||
| 253 | } | ||
| 254 | |||
| 255 | private: | ||
| 256 | static constexpr s32 GetRequiredDepth(std::size_t region_size) { | ||
| 257 | s32 depth = 0; | ||
| 258 | while (true) { | ||
| 259 | region_size /= Common::BitSize<u64>(); | ||
| 260 | depth++; | ||
| 261 | if (region_size == 0) { | ||
| 262 | return depth; | ||
| 263 | } | ||
| 264 | } | ||
| 265 | } | ||
| 266 | |||
| 267 | public: | ||
| 268 | static constexpr std::size_t CalculateManagementOverheadSize(std::size_t region_size) { | ||
| 269 | std::size_t overhead_bits = 0; | ||
| 270 | for (s32 depth = GetRequiredDepth(region_size) - 1; depth >= 0; depth--) { | ||
| 271 | region_size = | ||
| 272 | Common::AlignUp(region_size, Common::BitSize<u64>()) / Common::BitSize<u64>(); | ||
| 273 | overhead_bits += region_size; | ||
| 274 | } | ||
| 275 | return overhead_bits * sizeof(u64); | ||
| 276 | } | ||
| 277 | }; | ||
| 278 | |||
| 279 | } // namespace Kernel | ||