summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/video_core/CMakeLists.txt2
-rw-r--r--src/video_core/renderer_vulkan/vk_memory_manager.cpp253
-rw-r--r--src/video_core/renderer_vulkan/vk_memory_manager.h87
3 files changed, 342 insertions, 0 deletions
diff --git a/src/video_core/CMakeLists.txt b/src/video_core/CMakeLists.txt
index 59319f206..56f789e0b 100644
--- a/src/video_core/CMakeLists.txt
+++ b/src/video_core/CMakeLists.txt
@@ -106,6 +106,8 @@ if (ENABLE_VULKAN)
106 renderer_vulkan/declarations.h 106 renderer_vulkan/declarations.h
107 renderer_vulkan/vk_device.cpp 107 renderer_vulkan/vk_device.cpp
108 renderer_vulkan/vk_device.h 108 renderer_vulkan/vk_device.h
109 renderer_vulkan/vk_memory_manager.cpp
110 renderer_vulkan/vk_memory_manager.h
109 renderer_vulkan/vk_resource_manager.cpp 111 renderer_vulkan/vk_resource_manager.cpp
110 renderer_vulkan/vk_resource_manager.h) 112 renderer_vulkan/vk_resource_manager.h)
111 113
diff --git a/src/video_core/renderer_vulkan/vk_memory_manager.cpp b/src/video_core/renderer_vulkan/vk_memory_manager.cpp
new file mode 100644
index 000000000..86364ad54
--- /dev/null
+++ b/src/video_core/renderer_vulkan/vk_memory_manager.cpp
@@ -0,0 +1,253 @@
1// Copyright 2018 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include <algorithm>
6#include <optional>
7#include <tuple>
8#include <vector>
9#include "common/alignment.h"
10#include "common/assert.h"
11#include "common/common_types.h"
12#include "common/logging/log.h"
13#include "video_core/renderer_vulkan/declarations.h"
14#include "video_core/renderer_vulkan/vk_device.h"
15#include "video_core/renderer_vulkan/vk_memory_manager.h"
16
17namespace Vulkan {
18
19// TODO(Rodrigo): Fine tune this number
20constexpr u64 ALLOC_CHUNK_SIZE = 64 * 1024 * 1024;
21
22class VKMemoryAllocation final {
23public:
24 explicit VKMemoryAllocation(const VKDevice& device, vk::DeviceMemory memory,
25 vk::MemoryPropertyFlags properties, u64 alloc_size, u32 type)
26 : device{device}, memory{memory}, properties{properties}, alloc_size{alloc_size},
27 shifted_type{ShiftType(type)}, is_mappable{properties &
28 vk::MemoryPropertyFlagBits::eHostVisible} {
29 if (is_mappable) {
30 const auto dev = device.GetLogical();
31 const auto& dld = device.GetDispatchLoader();
32 base_address = static_cast<u8*>(dev.mapMemory(memory, 0, alloc_size, {}, dld));
33 }
34 }
35
36 ~VKMemoryAllocation() {
37 const auto dev = device.GetLogical();
38 const auto& dld = device.GetDispatchLoader();
39 if (is_mappable)
40 dev.unmapMemory(memory, dld);
41 dev.free(memory, nullptr, dld);
42 }
43
44 VKMemoryCommit Commit(vk::DeviceSize commit_size, vk::DeviceSize alignment) {
45 auto found = TryFindFreeSection(free_iterator, alloc_size, static_cast<u64>(commit_size),
46 static_cast<u64>(alignment));
47 if (!found) {
48 found = TryFindFreeSection(0, free_iterator, static_cast<u64>(commit_size),
49 static_cast<u64>(alignment));
50 if (!found) {
51 // Signal out of memory, it'll try to do more allocations.
52 return nullptr;
53 }
54 }
55 u8* address = is_mappable ? base_address + *found : nullptr;
56 auto commit = std::make_unique<VKMemoryCommitImpl>(this, memory, address, *found,
57 *found + commit_size);
58 commits.push_back(commit.get());
59
60 // Last commit's address is highly probable to be free.
61 free_iterator = *found + commit_size;
62
63 return commit;
64 }
65
66 void Free(const VKMemoryCommitImpl* commit) {
67 ASSERT(commit);
68 const auto it =
69 std::find_if(commits.begin(), commits.end(),
70 [&](const auto& stored_commit) { return stored_commit == commit; });
71 if (it == commits.end()) {
72 LOG_CRITICAL(Render_Vulkan, "Freeing unallocated commit!");
73 UNREACHABLE();
74 return;
75 }
76 commits.erase(it);
77 }
78
79 /// Returns whether this allocation is compatible with the arguments.
80 bool IsCompatible(vk::MemoryPropertyFlags wanted_properties, u32 type_mask) const {
81 return (wanted_properties & properties) != vk::MemoryPropertyFlagBits(0) &&
82 (type_mask & shifted_type) != 0;
83 }
84
85private:
86 static constexpr u32 ShiftType(u32 type) {
87 return 1U << type;
88 }
89
90 /// A memory allocator, it may return a free region between "start" and "end" with the solicited
91 /// requeriments.
92 std::optional<u64> TryFindFreeSection(u64 start, u64 end, u64 size, u64 alignment) const {
93 u64 iterator = start;
94 while (iterator + size < end) {
95 const u64 try_left = Common::AlignUp(iterator, alignment);
96 const u64 try_right = try_left + size;
97
98 bool overlap = false;
99 for (const auto& commit : commits) {
100 const auto [commit_left, commit_right] = commit->interval;
101 if (try_left < commit_right && commit_left < try_right) {
102 // There's an overlap, continue the search where the overlapping commit ends.
103 iterator = commit_right;
104 overlap = true;
105 break;
106 }
107 }
108 if (!overlap) {
109 // A free address has been found.
110 return try_left;
111 }
112 }
113 // No free regions where found, return an empty optional.
114 return std::nullopt;
115 }
116
117 const VKDevice& device; ///< Vulkan device.
118 const vk::DeviceMemory memory; ///< Vulkan memory allocation handler.
119 const vk::MemoryPropertyFlags properties; ///< Vulkan properties.
120 const u64 alloc_size; ///< Size of this allocation.
121 const u32 shifted_type; ///< Stored Vulkan type of this allocation, shifted.
122 const bool is_mappable; ///< Whether the allocation is mappable.
123
124 /// Base address of the mapped pointer.
125 u8* base_address{};
126
127 /// Hints where the next free region is likely going to be.
128 u64 free_iterator{};
129
130 /// Stores all commits done from this allocation.
131 std::vector<const VKMemoryCommitImpl*> commits;
132};
133
134VKMemoryManager::VKMemoryManager(const VKDevice& device)
135 : device{device}, props{device.GetPhysical().getMemoryProperties(device.GetDispatchLoader())},
136 is_memory_unified{GetMemoryUnified(props)} {}
137
138VKMemoryManager::~VKMemoryManager() = default;
139
140VKMemoryCommit VKMemoryManager::Commit(const vk::MemoryRequirements& reqs, bool host_visible) {
141 ASSERT(reqs.size < ALLOC_CHUNK_SIZE);
142
143 // When a host visible commit is asked, search for host visible and coherent, otherwise search
144 // for a fast device local type.
145 const vk::MemoryPropertyFlags wanted_properties =
146 host_visible
147 ? vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent
148 : vk::MemoryPropertyFlagBits::eDeviceLocal;
149
150 const auto TryCommit = [&]() -> VKMemoryCommit {
151 for (auto& alloc : allocs) {
152 if (!alloc->IsCompatible(wanted_properties, reqs.memoryTypeBits))
153 continue;
154
155 if (auto commit = alloc->Commit(reqs.size, reqs.alignment); commit) {
156 return commit;
157 }
158 }
159 return {};
160 };
161
162 if (auto commit = TryCommit(); commit) {
163 return commit;
164 }
165
166 // Commit has failed, allocate more memory.
167 if (!AllocMemory(wanted_properties, reqs.memoryTypeBits, ALLOC_CHUNK_SIZE)) {
168 // TODO(Rodrigo): Try to use host memory.
169 LOG_CRITICAL(Render_Vulkan, "Ran out of memory!");
170 UNREACHABLE();
171 }
172
173 // Commit again, this time it won't fail since there's a fresh allocation above. If it does,
174 // there's a bug.
175 auto commit = TryCommit();
176 ASSERT(commit);
177 return commit;
178}
179
180VKMemoryCommit VKMemoryManager::Commit(vk::Buffer buffer, bool host_visible) {
181 const auto dev = device.GetLogical();
182 const auto& dld = device.GetDispatchLoader();
183 const auto requeriments = dev.getBufferMemoryRequirements(buffer, dld);
184 auto commit = Commit(requeriments, host_visible);
185 dev.bindBufferMemory(buffer, commit->GetMemory(), commit->GetOffset(), dld);
186 return commit;
187}
188
189VKMemoryCommit VKMemoryManager::Commit(vk::Image image, bool host_visible) {
190 const auto dev = device.GetLogical();
191 const auto& dld = device.GetDispatchLoader();
192 const auto requeriments = dev.getImageMemoryRequirements(image, dld);
193 auto commit = Commit(requeriments, host_visible);
194 dev.bindImageMemory(image, commit->GetMemory(), commit->GetOffset(), dld);
195 return commit;
196}
197
198bool VKMemoryManager::AllocMemory(vk::MemoryPropertyFlags wanted_properties, u32 type_mask,
199 u64 size) {
200 const u32 type = [&]() {
201 for (u32 type_index = 0; type_index < props.memoryTypeCount; ++type_index) {
202 const auto flags = props.memoryTypes[type_index].propertyFlags;
203 if ((type_mask & (1U << type_index)) && (flags & wanted_properties)) {
204 // The type matches in type and in the wanted properties.
205 return type_index;
206 }
207 }
208 LOG_CRITICAL(Render_Vulkan, "Couldn't find a compatible memory type!");
209 UNREACHABLE();
210 return 0u;
211 }();
212
213 const auto dev = device.GetLogical();
214 const auto& dld = device.GetDispatchLoader();
215
216 // Try to allocate found type.
217 const vk::MemoryAllocateInfo memory_ai(size, type);
218 vk::DeviceMemory memory;
219 if (const vk::Result res = dev.allocateMemory(&memory_ai, nullptr, &memory, dld);
220 res != vk::Result::eSuccess) {
221 LOG_CRITICAL(Render_Vulkan, "Device allocation failed with code {}!", vk::to_string(res));
222 return false;
223 }
224 allocs.push_back(
225 std::make_unique<VKMemoryAllocation>(device, memory, wanted_properties, size, type));
226 return true;
227}
228
229/*static*/ bool VKMemoryManager::GetMemoryUnified(const vk::PhysicalDeviceMemoryProperties& props) {
230 for (u32 heap_index = 0; heap_index < props.memoryHeapCount; ++heap_index) {
231 if (!(props.memoryHeaps[heap_index].flags & vk::MemoryHeapFlagBits::eDeviceLocal)) {
232 // Memory is considered unified when heaps are device local only.
233 return false;
234 }
235 }
236 return true;
237}
238
239VKMemoryCommitImpl::VKMemoryCommitImpl(VKMemoryAllocation* allocation, vk::DeviceMemory memory,
240 u8* data, u64 begin, u64 end)
241 : allocation{allocation}, memory{memory}, data{data},
242 interval(std::make_pair(begin, begin + end)) {}
243
244VKMemoryCommitImpl::~VKMemoryCommitImpl() {
245 allocation->Free(this);
246}
247
248u8* VKMemoryCommitImpl::GetData() const {
249 ASSERT_MSG(data != nullptr, "Trying to access an unmapped commit.");
250 return data;
251}
252
253} // namespace Vulkan
diff --git a/src/video_core/renderer_vulkan/vk_memory_manager.h b/src/video_core/renderer_vulkan/vk_memory_manager.h
new file mode 100644
index 000000000..073597b35
--- /dev/null
+++ b/src/video_core/renderer_vulkan/vk_memory_manager.h
@@ -0,0 +1,87 @@
1// Copyright 2019 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#pragma once
6
7#include <memory>
8#include <utility>
9#include <vector>
10#include "common/common_types.h"
11#include "video_core/renderer_vulkan/declarations.h"
12
13namespace Vulkan {
14
15class VKDevice;
16class VKMemoryAllocation;
17class VKMemoryCommitImpl;
18
19using VKMemoryCommit = std::unique_ptr<VKMemoryCommitImpl>;
20
21class VKMemoryManager final {
22public:
23 explicit VKMemoryManager(const VKDevice& device);
24 ~VKMemoryManager();
25
26 /**
27 * Commits a memory with the specified requeriments.
28 * @param reqs Requeriments returned from a Vulkan call.
29 * @param host_visible Signals the allocator that it *must* use host visible and coherent
30 * memory. When passing false, it will try to allocate device local memory.
31 * @returns A memory commit.
32 */
33 VKMemoryCommit Commit(const vk::MemoryRequirements& reqs, bool host_visible);
34
35 /// Commits memory required by the buffer and binds it.
36 VKMemoryCommit Commit(vk::Buffer buffer, bool host_visible);
37
38 /// Commits memory required by the image and binds it.
39 VKMemoryCommit Commit(vk::Image image, bool host_visible);
40
41 /// Returns true if the memory allocations are done always in host visible and coherent memory.
42 bool IsMemoryUnified() const {
43 return is_memory_unified;
44 }
45
46private:
47 /// Allocates a chunk of memory.
48 bool AllocMemory(vk::MemoryPropertyFlags wanted_properties, u32 type_mask, u64 size);
49
50 /// Returns true if the device uses an unified memory model.
51 static bool GetMemoryUnified(const vk::PhysicalDeviceMemoryProperties& props);
52
53 const VKDevice& device; ///< Device handler.
54 const vk::PhysicalDeviceMemoryProperties props; ///< Physical device properties.
55 const bool is_memory_unified; ///< True if memory model is unified.
56 std::vector<std::unique_ptr<VKMemoryAllocation>> allocs; ///< Current allocations.
57};
58
59class VKMemoryCommitImpl final {
60 friend VKMemoryAllocation;
61
62public:
63 explicit VKMemoryCommitImpl(VKMemoryAllocation* allocation, vk::DeviceMemory memory, u8* data,
64 u64 begin, u64 end);
65 ~VKMemoryCommitImpl();
66
67 /// Returns the writeable memory map. The commit has to be mappable.
68 u8* GetData() const;
69
70 /// Returns the Vulkan memory handler.
71 vk::DeviceMemory GetMemory() const {
72 return memory;
73 }
74
75 /// Returns the start position of the commit relative to the allocation.
76 vk::DeviceSize GetOffset() const {
77 return static_cast<vk::DeviceSize>(interval.first);
78 }
79
80private:
81 std::pair<u64, u64> interval{}; ///< Interval where the commit exists.
82 vk::DeviceMemory memory; ///< Vulkan device memory handler.
83 VKMemoryAllocation* allocation{}; ///< Pointer to the large memory allocation.
84 u8* data{}; ///< Pointer to the host mapped memory, it has the commit offset included.
85};
86
87} // namespace Vulkan