summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGravatar ReinUsesLisp2020-01-06 17:14:41 -0300
committerGravatar ReinUsesLisp2020-01-06 18:13:41 -0300
commitceb851b5905544a08a35cdb7b96b5062ccc16a1f (patch)
treef3635f9292587e419a8c93247b2b805349184bad /src
parentvk_buffer_cache: Temporarily remove buffer cache (diff)
downloadyuzu-ceb851b5905544a08a35cdb7b96b5062ccc16a1f.tar.gz
yuzu-ceb851b5905544a08a35cdb7b96b5062ccc16a1f.tar.xz
yuzu-ceb851b5905544a08a35cdb7b96b5062ccc16a1f.zip
vk_memory_manager: Misc changes
* Allocate memory in discrete exponentially increasing chunks until the 128 MiB threshold. Allocations larger thant that increase linearly by 256 MiB (depending on the required size). This allows to use small allocations for small resources. * Move memory maps to a RAII abstraction. To optimize for debugging tools (like RenderDoc) users will map/unmap on usage. If this ever becomes a noticeable overhead (from my profiling it doesn't) we can transparently move to persistent memory maps without harming the API, getting optimal performance for both gameplay and debugging. * Improve messages on exceptional situations. * Fix typos "requeriments" -> "requirements". * Small style changes.
Diffstat (limited to 'src')
-rw-r--r--src/video_core/renderer_vulkan/vk_memory_manager.cpp158
-rw-r--r--src/video_core/renderer_vulkan/vk_memory_manager.h72
2 files changed, 142 insertions, 88 deletions
diff --git a/src/video_core/renderer_vulkan/vk_memory_manager.cpp b/src/video_core/renderer_vulkan/vk_memory_manager.cpp
index 0451babbf..9cc9979d0 100644
--- a/src/video_core/renderer_vulkan/vk_memory_manager.cpp
+++ b/src/video_core/renderer_vulkan/vk_memory_manager.cpp
@@ -6,6 +6,7 @@
6#include <optional> 6#include <optional>
7#include <tuple> 7#include <tuple>
8#include <vector> 8#include <vector>
9
9#include "common/alignment.h" 10#include "common/alignment.h"
10#include "common/assert.h" 11#include "common/assert.h"
11#include "common/common_types.h" 12#include "common/common_types.h"
@@ -16,34 +17,32 @@
16 17
17namespace Vulkan { 18namespace Vulkan {
18 19
19// TODO(Rodrigo): Fine tune this number 20namespace {
20constexpr u64 ALLOC_CHUNK_SIZE = 64 * 1024 * 1024; 21
22u64 GetAllocationChunkSize(u64 required_size) {
23 static constexpr u64 sizes[] = {16ULL << 20, 32ULL << 20, 64ULL << 20, 128ULL << 20};
24 auto it = std::lower_bound(std::begin(sizes), std::end(sizes), required_size);
25 return it != std::end(sizes) ? *it : Common::AlignUp(required_size, 256ULL << 20);
26}
27
28} // Anonymous namespace
21 29
22class VKMemoryAllocation final { 30class VKMemoryAllocation final {
23public: 31public:
24 explicit VKMemoryAllocation(const VKDevice& device, vk::DeviceMemory memory, 32 explicit VKMemoryAllocation(const VKDevice& device, vk::DeviceMemory memory,
25 vk::MemoryPropertyFlags properties, u64 alloc_size, u32 type) 33 vk::MemoryPropertyFlags properties, u64 allocation_size, u32 type)
26 : device{device}, memory{memory}, properties{properties}, alloc_size{alloc_size}, 34 : device{device}, memory{memory}, properties{properties}, allocation_size{allocation_size},
27 shifted_type{ShiftType(type)}, is_mappable{properties & 35 shifted_type{ShiftType(type)} {}
28 vk::MemoryPropertyFlagBits::eHostVisible} {
29 if (is_mappable) {
30 const auto dev = device.GetLogical();
31 const auto& dld = device.GetDispatchLoader();
32 base_address = static_cast<u8*>(dev.mapMemory(memory, 0, alloc_size, {}, dld));
33 }
34 }
35 36
36 ~VKMemoryAllocation() { 37 ~VKMemoryAllocation() {
37 const auto dev = device.GetLogical(); 38 const auto dev = device.GetLogical();
38 const auto& dld = device.GetDispatchLoader(); 39 const auto& dld = device.GetDispatchLoader();
39 if (is_mappable)
40 dev.unmapMemory(memory, dld);
41 dev.free(memory, nullptr, dld); 40 dev.free(memory, nullptr, dld);
42 } 41 }
43 42
44 VKMemoryCommit Commit(vk::DeviceSize commit_size, vk::DeviceSize alignment) { 43 VKMemoryCommit Commit(vk::DeviceSize commit_size, vk::DeviceSize alignment) {
45 auto found = TryFindFreeSection(free_iterator, alloc_size, static_cast<u64>(commit_size), 44 auto found = TryFindFreeSection(free_iterator, allocation_size,
46 static_cast<u64>(alignment)); 45 static_cast<u64>(commit_size), static_cast<u64>(alignment));
47 if (!found) { 46 if (!found) {
48 found = TryFindFreeSection(0, free_iterator, static_cast<u64>(commit_size), 47 found = TryFindFreeSection(0, free_iterator, static_cast<u64>(commit_size),
49 static_cast<u64>(alignment)); 48 static_cast<u64>(alignment));
@@ -52,8 +51,7 @@ public:
52 return nullptr; 51 return nullptr;
53 } 52 }
54 } 53 }
55 u8* address = is_mappable ? base_address + *found : nullptr; 54 auto commit = std::make_unique<VKMemoryCommitImpl>(device, this, memory, *found,
56 auto commit = std::make_unique<VKMemoryCommitImpl>(this, memory, address, *found,
57 *found + commit_size); 55 *found + commit_size);
58 commits.push_back(commit.get()); 56 commits.push_back(commit.get());
59 57
@@ -65,12 +63,10 @@ public:
65 63
66 void Free(const VKMemoryCommitImpl* commit) { 64 void Free(const VKMemoryCommitImpl* commit) {
67 ASSERT(commit); 65 ASSERT(commit);
68 const auto it = 66
69 std::find_if(commits.begin(), commits.end(), 67 const auto it = std::find(std::begin(commits), std::end(commits), commit);
70 [&](const auto& stored_commit) { return stored_commit == commit; });
71 if (it == commits.end()) { 68 if (it == commits.end()) {
72 LOG_CRITICAL(Render_Vulkan, "Freeing unallocated commit!"); 69 UNREACHABLE_MSG("Freeing unallocated commit!");
73 UNREACHABLE();
74 return; 70 return;
75 } 71 }
76 commits.erase(it); 72 commits.erase(it);
@@ -88,11 +84,11 @@ private:
88 } 84 }
89 85
90 /// A memory allocator, it may return a free region between "start" and "end" with the solicited 86 /// A memory allocator, it may return a free region between "start" and "end" with the solicited
91 /// requeriments. 87 /// requirements.
92 std::optional<u64> TryFindFreeSection(u64 start, u64 end, u64 size, u64 alignment) const { 88 std::optional<u64> TryFindFreeSection(u64 start, u64 end, u64 size, u64 alignment) const {
93 u64 iterator = start; 89 u64 iterator = Common::AlignUp(start, alignment);
94 while (iterator + size < end) { 90 while (iterator + size <= end) {
95 const u64 try_left = Common::AlignUp(iterator, alignment); 91 const u64 try_left = iterator;
96 const u64 try_right = try_left + size; 92 const u64 try_right = try_left + size;
97 93
98 bool overlap = false; 94 bool overlap = false;
@@ -100,7 +96,7 @@ private:
100 const auto [commit_left, commit_right] = commit->interval; 96 const auto [commit_left, commit_right] = commit->interval;
101 if (try_left < commit_right && commit_left < try_right) { 97 if (try_left < commit_right && commit_left < try_right) {
102 // There's an overlap, continue the search where the overlapping commit ends. 98 // There's an overlap, continue the search where the overlapping commit ends.
103 iterator = commit_right; 99 iterator = Common::AlignUp(commit_right, alignment);
104 overlap = true; 100 overlap = true;
105 break; 101 break;
106 } 102 }
@@ -110,6 +106,7 @@ private:
110 return try_left; 106 return try_left;
111 } 107 }
112 } 108 }
109
113 // No free regions where found, return an empty optional. 110 // No free regions where found, return an empty optional.
114 return std::nullopt; 111 return std::nullopt;
115 } 112 }
@@ -117,12 +114,8 @@ private:
117 const VKDevice& device; ///< Vulkan device. 114 const VKDevice& device; ///< Vulkan device.
118 const vk::DeviceMemory memory; ///< Vulkan memory allocation handler. 115 const vk::DeviceMemory memory; ///< Vulkan memory allocation handler.
119 const vk::MemoryPropertyFlags properties; ///< Vulkan properties. 116 const vk::MemoryPropertyFlags properties; ///< Vulkan properties.
120 const u64 alloc_size; ///< Size of this allocation. 117 const u64 allocation_size; ///< Size of this allocation.
121 const u32 shifted_type; ///< Stored Vulkan type of this allocation, shifted. 118 const u32 shifted_type; ///< Stored Vulkan type of this allocation, shifted.
122 const bool is_mappable; ///< Whether the allocation is mappable.
123
124 /// Base address of the mapped pointer.
125 u8* base_address{};
126 119
127 /// Hints where the next free region is likely going to be. 120 /// Hints where the next free region is likely going to be.
128 u64 free_iterator{}; 121 u64 free_iterator{};
@@ -132,13 +125,15 @@ private:
132}; 125};
133 126
134VKMemoryManager::VKMemoryManager(const VKDevice& device) 127VKMemoryManager::VKMemoryManager(const VKDevice& device)
135 : device{device}, props{device.GetPhysical().getMemoryProperties(device.GetDispatchLoader())}, 128 : device{device}, properties{device.GetPhysical().getMemoryProperties(
136 is_memory_unified{GetMemoryUnified(props)} {} 129 device.GetDispatchLoader())},
130 is_memory_unified{GetMemoryUnified(properties)} {}
137 131
138VKMemoryManager::~VKMemoryManager() = default; 132VKMemoryManager::~VKMemoryManager() = default;
139 133
140VKMemoryCommit VKMemoryManager::Commit(const vk::MemoryRequirements& reqs, bool host_visible) { 134VKMemoryCommit VKMemoryManager::Commit(const vk::MemoryRequirements& requirements,
141 ASSERT(reqs.size < ALLOC_CHUNK_SIZE); 135 bool host_visible) {
136 const u64 chunk_size = GetAllocationChunkSize(requirements.size);
142 137
143 // When a host visible commit is asked, search for host visible and coherent, otherwise search 138 // When a host visible commit is asked, search for host visible and coherent, otherwise search
144 // for a fast device local type. 139 // for a fast device local type.
@@ -147,32 +142,21 @@ VKMemoryCommit VKMemoryManager::Commit(const vk::MemoryRequirements& reqs, bool
147 ? vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent 142 ? vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent
148 : vk::MemoryPropertyFlagBits::eDeviceLocal; 143 : vk::MemoryPropertyFlagBits::eDeviceLocal;
149 144
150 const auto TryCommit = [&]() -> VKMemoryCommit { 145 if (auto commit = TryAllocCommit(requirements, wanted_properties)) {
151 for (auto& alloc : allocs) {
152 if (!alloc->IsCompatible(wanted_properties, reqs.memoryTypeBits))
153 continue;
154
155 if (auto commit = alloc->Commit(reqs.size, reqs.alignment); commit) {
156 return commit;
157 }
158 }
159 return {};
160 };
161
162 if (auto commit = TryCommit(); commit) {
163 return commit; 146 return commit;
164 } 147 }
165 148
166 // Commit has failed, allocate more memory. 149 // Commit has failed, allocate more memory.
167 if (!AllocMemory(wanted_properties, reqs.memoryTypeBits, ALLOC_CHUNK_SIZE)) { 150 if (!AllocMemory(wanted_properties, requirements.memoryTypeBits, chunk_size)) {
168 // TODO(Rodrigo): Try to use host memory. 151 // TODO(Rodrigo): Handle these situations in some way like flushing to guest memory.
169 LOG_CRITICAL(Render_Vulkan, "Ran out of memory!"); 152 // Allocation has failed, panic.
170 UNREACHABLE(); 153 UNREACHABLE_MSG("Ran out of VRAM!");
154 return {};
171 } 155 }
172 156
173 // Commit again, this time it won't fail since there's a fresh allocation above. If it does, 157 // Commit again, this time it won't fail since there's a fresh allocation above. If it does,
174 // there's a bug. 158 // there's a bug.
175 auto commit = TryCommit(); 159 auto commit = TryAllocCommit(requirements, wanted_properties);
176 ASSERT(commit); 160 ASSERT(commit);
177 return commit; 161 return commit;
178} 162}
@@ -180,8 +164,7 @@ VKMemoryCommit VKMemoryManager::Commit(const vk::MemoryRequirements& reqs, bool
180VKMemoryCommit VKMemoryManager::Commit(vk::Buffer buffer, bool host_visible) { 164VKMemoryCommit VKMemoryManager::Commit(vk::Buffer buffer, bool host_visible) {
181 const auto dev = device.GetLogical(); 165 const auto dev = device.GetLogical();
182 const auto& dld = device.GetDispatchLoader(); 166 const auto& dld = device.GetDispatchLoader();
183 const auto requeriments = dev.getBufferMemoryRequirements(buffer, dld); 167 auto commit = Commit(dev.getBufferMemoryRequirements(buffer, dld), host_visible);
184 auto commit = Commit(requeriments, host_visible);
185 dev.bindBufferMemory(buffer, commit->GetMemory(), commit->GetOffset(), dld); 168 dev.bindBufferMemory(buffer, commit->GetMemory(), commit->GetOffset(), dld);
186 return commit; 169 return commit;
187} 170}
@@ -189,25 +172,23 @@ VKMemoryCommit VKMemoryManager::Commit(vk::Buffer buffer, bool host_visible) {
189VKMemoryCommit VKMemoryManager::Commit(vk::Image image, bool host_visible) { 172VKMemoryCommit VKMemoryManager::Commit(vk::Image image, bool host_visible) {
190 const auto dev = device.GetLogical(); 173 const auto dev = device.GetLogical();
191 const auto& dld = device.GetDispatchLoader(); 174 const auto& dld = device.GetDispatchLoader();
192 const auto requeriments = dev.getImageMemoryRequirements(image, dld); 175 auto commit = Commit(dev.getImageMemoryRequirements(image, dld), host_visible);
193 auto commit = Commit(requeriments, host_visible);
194 dev.bindImageMemory(image, commit->GetMemory(), commit->GetOffset(), dld); 176 dev.bindImageMemory(image, commit->GetMemory(), commit->GetOffset(), dld);
195 return commit; 177 return commit;
196} 178}
197 179
198bool VKMemoryManager::AllocMemory(vk::MemoryPropertyFlags wanted_properties, u32 type_mask, 180bool VKMemoryManager::AllocMemory(vk::MemoryPropertyFlags wanted_properties, u32 type_mask,
199 u64 size) { 181 u64 size) {
200 const u32 type = [&]() { 182 const u32 type = [&] {
201 for (u32 type_index = 0; type_index < props.memoryTypeCount; ++type_index) { 183 for (u32 type_index = 0; type_index < properties.memoryTypeCount; ++type_index) {
202 const auto flags = props.memoryTypes[type_index].propertyFlags; 184 const auto flags = properties.memoryTypes[type_index].propertyFlags;
203 if ((type_mask & (1U << type_index)) && (flags & wanted_properties)) { 185 if ((type_mask & (1U << type_index)) && (flags & wanted_properties)) {
204 // The type matches in type and in the wanted properties. 186 // The type matches in type and in the wanted properties.
205 return type_index; 187 return type_index;
206 } 188 }
207 } 189 }
208 LOG_CRITICAL(Render_Vulkan, "Couldn't find a compatible memory type!"); 190 UNREACHABLE_MSG("Couldn't find a compatible memory type!");
209 UNREACHABLE(); 191 return 0U;
210 return 0u;
211 }(); 192 }();
212 193
213 const auto dev = device.GetLogical(); 194 const auto dev = device.GetLogical();
@@ -216,19 +197,33 @@ bool VKMemoryManager::AllocMemory(vk::MemoryPropertyFlags wanted_properties, u32
216 // Try to allocate found type. 197 // Try to allocate found type.
217 const vk::MemoryAllocateInfo memory_ai(size, type); 198 const vk::MemoryAllocateInfo memory_ai(size, type);
218 vk::DeviceMemory memory; 199 vk::DeviceMemory memory;
219 if (const vk::Result res = dev.allocateMemory(&memory_ai, nullptr, &memory, dld); 200 if (const auto res = dev.allocateMemory(&memory_ai, nullptr, &memory, dld);
220 res != vk::Result::eSuccess) { 201 res != vk::Result::eSuccess) {
221 LOG_CRITICAL(Render_Vulkan, "Device allocation failed with code {}!", vk::to_string(res)); 202 LOG_CRITICAL(Render_Vulkan, "Device allocation failed with code {}!", vk::to_string(res));
222 return false; 203 return false;
223 } 204 }
224 allocs.push_back( 205 allocations.push_back(
225 std::make_unique<VKMemoryAllocation>(device, memory, wanted_properties, size, type)); 206 std::make_unique<VKMemoryAllocation>(device, memory, wanted_properties, size, type));
226 return true; 207 return true;
227} 208}
228 209
229/*static*/ bool VKMemoryManager::GetMemoryUnified(const vk::PhysicalDeviceMemoryProperties& props) { 210VKMemoryCommit VKMemoryManager::TryAllocCommit(const vk::MemoryRequirements& requirements,
230 for (u32 heap_index = 0; heap_index < props.memoryHeapCount; ++heap_index) { 211 vk::MemoryPropertyFlags wanted_properties) {
231 if (!(props.memoryHeaps[heap_index].flags & vk::MemoryHeapFlagBits::eDeviceLocal)) { 212 for (auto& allocation : allocations) {
213 if (!allocation->IsCompatible(wanted_properties, requirements.memoryTypeBits)) {
214 continue;
215 }
216 if (auto commit = allocation->Commit(requirements.size, requirements.alignment)) {
217 return commit;
218 }
219 }
220 return {};
221}
222
223/*static*/ bool VKMemoryManager::GetMemoryUnified(
224 const vk::PhysicalDeviceMemoryProperties& properties) {
225 for (u32 heap_index = 0; heap_index < properties.memoryHeapCount; ++heap_index) {
226 if (!(properties.memoryHeaps[heap_index].flags & vk::MemoryHeapFlagBits::eDeviceLocal)) {
232 // Memory is considered unified when heaps are device local only. 227 // Memory is considered unified when heaps are device local only.
233 return false; 228 return false;
234 } 229 }
@@ -236,17 +231,28 @@ bool VKMemoryManager::AllocMemory(vk::MemoryPropertyFlags wanted_properties, u32
236 return true; 231 return true;
237} 232}
238 233
239VKMemoryCommitImpl::VKMemoryCommitImpl(VKMemoryAllocation* allocation, vk::DeviceMemory memory, 234VKMemoryCommitImpl::VKMemoryCommitImpl(const VKDevice& device, VKMemoryAllocation* allocation,
240 u8* data, u64 begin, u64 end) 235 vk::DeviceMemory memory, u64 begin, u64 end)
241 : interval(std::make_pair(begin, end)), memory{memory}, allocation{allocation}, data{data} {} 236 : device{device}, interval{begin, end}, memory{memory}, allocation{allocation} {}
242 237
243VKMemoryCommitImpl::~VKMemoryCommitImpl() { 238VKMemoryCommitImpl::~VKMemoryCommitImpl() {
244 allocation->Free(this); 239 allocation->Free(this);
245} 240}
246 241
247u8* VKMemoryCommitImpl::GetData() const { 242MemoryMap VKMemoryCommitImpl::Map(u64 size, u64 offset_) const {
248 ASSERT_MSG(data != nullptr, "Trying to access an unmapped commit."); 243 const auto dev = device.GetLogical();
249 return data; 244 const auto address = reinterpret_cast<u8*>(
245 dev.mapMemory(memory, interval.first + offset_, size, {}, device.GetDispatchLoader()));
246 return MemoryMap{this, address};
247}
248
249void VKMemoryCommitImpl::Unmap() const {
250 const auto dev = device.GetLogical();
251 dev.unmapMemory(memory, device.GetDispatchLoader());
252}
253
254MemoryMap VKMemoryCommitImpl::Map() const {
255 return Map(interval.second - interval.first);
250} 256}
251 257
252} // namespace Vulkan 258} // namespace Vulkan
diff --git a/src/video_core/renderer_vulkan/vk_memory_manager.h b/src/video_core/renderer_vulkan/vk_memory_manager.h
index 073597b35..cd00bb91b 100644
--- a/src/video_core/renderer_vulkan/vk_memory_manager.h
+++ b/src/video_core/renderer_vulkan/vk_memory_manager.h
@@ -12,6 +12,7 @@
12 12
13namespace Vulkan { 13namespace Vulkan {
14 14
15class MemoryMap;
15class VKDevice; 16class VKDevice;
16class VKMemoryAllocation; 17class VKMemoryAllocation;
17class VKMemoryCommitImpl; 18class VKMemoryCommitImpl;
@@ -21,13 +22,14 @@ using VKMemoryCommit = std::unique_ptr<VKMemoryCommitImpl>;
21class VKMemoryManager final { 22class VKMemoryManager final {
22public: 23public:
23 explicit VKMemoryManager(const VKDevice& device); 24 explicit VKMemoryManager(const VKDevice& device);
25 VKMemoryManager(const VKMemoryManager&) = delete;
24 ~VKMemoryManager(); 26 ~VKMemoryManager();
25 27
26 /** 28 /**
27 * Commits a memory with the specified requeriments. 29 * Commits a memory with the specified requeriments.
28 * @param reqs Requeriments returned from a Vulkan call. 30 * @param requirements Requirements returned from a Vulkan call.
29 * @param host_visible Signals the allocator that it *must* use host visible and coherent 31 * @param host_visible Signals the allocator that it *must* use host visible and coherent
30 * memory. When passing false, it will try to allocate device local memory. 32 * memory. When passing false, it will try to allocate device local memory.
31 * @returns A memory commit. 33 * @returns A memory commit.
32 */ 34 */
33 VKMemoryCommit Commit(const vk::MemoryRequirements& reqs, bool host_visible); 35 VKMemoryCommit Commit(const vk::MemoryRequirements& reqs, bool host_visible);
@@ -47,25 +49,35 @@ private:
47 /// Allocates a chunk of memory. 49 /// Allocates a chunk of memory.
48 bool AllocMemory(vk::MemoryPropertyFlags wanted_properties, u32 type_mask, u64 size); 50 bool AllocMemory(vk::MemoryPropertyFlags wanted_properties, u32 type_mask, u64 size);
49 51
52 /// Tries to allocate a memory commit.
53 VKMemoryCommit TryAllocCommit(const vk::MemoryRequirements& requirements,
54 vk::MemoryPropertyFlags wanted_properties);
55
50 /// Returns true if the device uses an unified memory model. 56 /// Returns true if the device uses an unified memory model.
51 static bool GetMemoryUnified(const vk::PhysicalDeviceMemoryProperties& props); 57 static bool GetMemoryUnified(const vk::PhysicalDeviceMemoryProperties& properties);
52 58
53 const VKDevice& device; ///< Device handler. 59 const VKDevice& device; ///< Device handler.
54 const vk::PhysicalDeviceMemoryProperties props; ///< Physical device properties. 60 const vk::PhysicalDeviceMemoryProperties properties; ///< Physical device properties.
55 const bool is_memory_unified; ///< True if memory model is unified. 61 const bool is_memory_unified; ///< True if memory model is unified.
56 std::vector<std::unique_ptr<VKMemoryAllocation>> allocs; ///< Current allocations. 62 std::vector<std::unique_ptr<VKMemoryAllocation>> allocations; ///< Current allocations.
57}; 63};
58 64
59class VKMemoryCommitImpl final { 65class VKMemoryCommitImpl final {
60 friend VKMemoryAllocation; 66 friend VKMemoryAllocation;
67 friend MemoryMap;
61 68
62public: 69public:
63 explicit VKMemoryCommitImpl(VKMemoryAllocation* allocation, vk::DeviceMemory memory, u8* data, 70 explicit VKMemoryCommitImpl(const VKDevice& device, VKMemoryAllocation* allocation,
64 u64 begin, u64 end); 71 vk::DeviceMemory memory, u64 begin, u64 end);
65 ~VKMemoryCommitImpl(); 72 ~VKMemoryCommitImpl();
66 73
67 /// Returns the writeable memory map. The commit has to be mappable. 74 /// Maps a memory region and returns a pointer to it.
68 u8* GetData() const; 75 /// It's illegal to have more than one memory map at the same time.
76 MemoryMap Map(u64 size, u64 offset = 0) const;
77
78 /// Maps the whole commit and returns a pointer to it.
79 /// It's illegal to have more than one memory map at the same time.
80 MemoryMap Map() const;
69 81
70 /// Returns the Vulkan memory handler. 82 /// Returns the Vulkan memory handler.
71 vk::DeviceMemory GetMemory() const { 83 vk::DeviceMemory GetMemory() const {
@@ -78,10 +90,46 @@ public:
78 } 90 }
79 91
80private: 92private:
93 /// Unmaps memory.
94 void Unmap() const;
95
96 const VKDevice& device; ///< Vulkan device.
81 std::pair<u64, u64> interval{}; ///< Interval where the commit exists. 97 std::pair<u64, u64> interval{}; ///< Interval where the commit exists.
82 vk::DeviceMemory memory; ///< Vulkan device memory handler. 98 vk::DeviceMemory memory; ///< Vulkan device memory handler.
83 VKMemoryAllocation* allocation{}; ///< Pointer to the large memory allocation. 99 VKMemoryAllocation* allocation{}; ///< Pointer to the large memory allocation.
84 u8* data{}; ///< Pointer to the host mapped memory, it has the commit offset included. 100};
101
102/// Holds ownership of a memory map.
103class MemoryMap final {
104public:
105 explicit MemoryMap(const VKMemoryCommitImpl* commit, u8* address)
106 : commit{commit}, address{address} {}
107
108 ~MemoryMap() {
109 if (commit) {
110 commit->Unmap();
111 }
112 }
113
114 /// Prematurely releases the memory map.
115 void Release() {
116 commit->Unmap();
117 commit = nullptr;
118 }
119
120 /// Returns the address of the memory map.
121 u8* GetAddress() const {
122 return address;
123 }
124
125 /// Returns the address of the memory map;
126 operator u8*() const {
127 return address;
128 }
129
130private:
131 const VKMemoryCommitImpl* commit{}; ///< Mapped memory commit.
132 u8* address{}; ///< Address to the mapped memory.
85}; 133};
86 134
87} // namespace Vulkan 135} // namespace Vulkan