summaryrefslogtreecommitdiff
path: root/src/video_core/vulkan_common
diff options
context:
space:
mode:
authorGravatar Levi2021-01-10 22:09:56 -0700
committerGravatar Levi2021-01-10 22:09:56 -0700
commit7a3c884e39fccfbb498b855080bffabc9ce2e7f1 (patch)
tree5056f9406dec188439cb0deb87603498243a9412 /src/video_core/vulkan_common
parentMore forgetting... duh (diff)
parentMerge pull request #5229 from Morph1984/fullscreen-opt (diff)
downloadyuzu-7a3c884e39fccfbb498b855080bffabc9ce2e7f1.tar.gz
yuzu-7a3c884e39fccfbb498b855080bffabc9ce2e7f1.tar.xz
yuzu-7a3c884e39fccfbb498b855080bffabc9ce2e7f1.zip
Merge remote-tracking branch 'upstream/master' into int-flags
Diffstat (limited to 'src/video_core/vulkan_common')
-rw-r--r--src/video_core/vulkan_common/nsight_aftermath_tracker.cpp212
-rw-r--r--src/video_core/vulkan_common/nsight_aftermath_tracker.h82
-rw-r--r--src/video_core/vulkan_common/vulkan_debug_callback.cpp45
-rw-r--r--src/video_core/vulkan_common/vulkan_debug_callback.h11
-rw-r--r--src/video_core/vulkan_common/vulkan_device.cpp886
-rw-r--r--src/video_core/vulkan_common/vulkan_device.h307
-rw-r--r--src/video_core/vulkan_common/vulkan_instance.cpp151
-rw-r--r--src/video_core/vulkan_common/vulkan_instance.h32
-rw-r--r--src/video_core/vulkan_common/vulkan_library.cpp36
-rw-r--r--src/video_core/vulkan_common/vulkan_library.h13
-rw-r--r--src/video_core/vulkan_common/vulkan_surface.cpp81
-rw-r--r--src/video_core/vulkan_common/vulkan_surface.h18
-rw-r--r--src/video_core/vulkan_common/vulkan_wrapper.cpp900
-rw-r--r--src/video_core/vulkan_common/vulkan_wrapper.h1222
14 files changed, 3996 insertions, 0 deletions
diff --git a/src/video_core/vulkan_common/nsight_aftermath_tracker.cpp b/src/video_core/vulkan_common/nsight_aftermath_tracker.cpp
new file mode 100644
index 000000000..8d10ac29e
--- /dev/null
+++ b/src/video_core/vulkan_common/nsight_aftermath_tracker.cpp
@@ -0,0 +1,212 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#ifdef HAS_NSIGHT_AFTERMATH
6
7#include <mutex>
8#include <string>
9#include <string_view>
10#include <utility>
11#include <vector>
12
13#include <fmt/format.h>
14
15#define VK_NO_PROTOTYPES
16#include <vulkan/vulkan.h>
17
18#include <GFSDK_Aftermath.h>
19#include <GFSDK_Aftermath_Defines.h>
20#include <GFSDK_Aftermath_GpuCrashDump.h>
21#include <GFSDK_Aftermath_GpuCrashDumpDecoding.h>
22
23#include "common/common_paths.h"
24#include "common/common_types.h"
25#include "common/file_util.h"
26#include "common/logging/log.h"
27#include "common/scope_exit.h"
28
29#include "video_core/renderer_vulkan/nsight_aftermath_tracker.h"
30
31namespace Vulkan {
32
33static constexpr char AFTERMATH_LIB_NAME[] = "GFSDK_Aftermath_Lib.x64.dll";
34
35NsightAftermathTracker::NsightAftermathTracker() {
36 if (!dl.Open(AFTERMATH_LIB_NAME)) {
37 LOG_ERROR(Render_Vulkan, "Failed to load Nsight Aftermath DLL");
38 return;
39 }
40 if (!dl.GetSymbol("GFSDK_Aftermath_DisableGpuCrashDumps",
41 &GFSDK_Aftermath_DisableGpuCrashDumps) ||
42 !dl.GetSymbol("GFSDK_Aftermath_EnableGpuCrashDumps",
43 &GFSDK_Aftermath_EnableGpuCrashDumps) ||
44 !dl.GetSymbol("GFSDK_Aftermath_GetShaderDebugInfoIdentifier",
45 &GFSDK_Aftermath_GetShaderDebugInfoIdentifier) ||
46 !dl.GetSymbol("GFSDK_Aftermath_GetShaderHashSpirv", &GFSDK_Aftermath_GetShaderHashSpirv) ||
47 !dl.GetSymbol("GFSDK_Aftermath_GpuCrashDump_CreateDecoder",
48 &GFSDK_Aftermath_GpuCrashDump_CreateDecoder) ||
49 !dl.GetSymbol("GFSDK_Aftermath_GpuCrashDump_DestroyDecoder",
50 &GFSDK_Aftermath_GpuCrashDump_DestroyDecoder) ||
51 !dl.GetSymbol("GFSDK_Aftermath_GpuCrashDump_GenerateJSON",
52 &GFSDK_Aftermath_GpuCrashDump_GenerateJSON) ||
53 !dl.GetSymbol("GFSDK_Aftermath_GpuCrashDump_GetJSON",
54 &GFSDK_Aftermath_GpuCrashDump_GetJSON)) {
55 LOG_ERROR(Render_Vulkan, "Failed to load Nsight Aftermath function pointers");
56 return false;
57 }
58 dump_dir = Common::FS::GetUserPath(Common::FS::UserPath::LogDir) + "gpucrash";
59
60 void(Common::FS::DeleteDirRecursively(dump_dir));
61 if (!Common::FS::CreateDir(dump_dir)) {
62 LOG_ERROR(Render_Vulkan, "Failed to create Nsight Aftermath dump directory");
63 return;
64 }
65 if (!GFSDK_Aftermath_SUCCEED(GFSDK_Aftermath_EnableGpuCrashDumps(
66 GFSDK_Aftermath_Version_API, GFSDK_Aftermath_GpuCrashDumpWatchedApiFlags_Vulkan,
67 GFSDK_Aftermath_GpuCrashDumpFeatureFlags_Default, GpuCrashDumpCallback,
68 ShaderDebugInfoCallback, CrashDumpDescriptionCallback, this))) {
69 LOG_ERROR(Render_Vulkan, "GFSDK_Aftermath_EnableGpuCrashDumps failed");
70 return;
71 }
72 LOG_INFO(Render_Vulkan, "Nsight Aftermath dump directory is \"{}\"", dump_dir);
73 initialized = true;
74}
75
76NsightAftermathTracker::~NsightAftermathTracker() {
77 if (initialized) {
78 (void)GFSDK_Aftermath_DisableGpuCrashDumps();
79 }
80}
81
82void NsightAftermathTracker::SaveShader(const std::vector<u32>& spirv) const {
83 if (!initialized) {
84 return;
85 }
86
87 std::vector<u32> spirv_copy = spirv;
88 GFSDK_Aftermath_SpirvCode shader;
89 shader.pData = spirv_copy.data();
90 shader.size = static_cast<u32>(spirv_copy.size() * 4);
91
92 std::scoped_lock lock{mutex};
93
94 GFSDK_Aftermath_ShaderHash hash;
95 if (!GFSDK_Aftermath_SUCCEED(
96 GFSDK_Aftermath_GetShaderHashSpirv(GFSDK_Aftermath_Version_API, &shader, &hash))) {
97 LOG_ERROR(Render_Vulkan, "Failed to hash SPIR-V module");
98 return;
99 }
100
101 Common::FS::IOFile file(fmt::format("{}/source_{:016x}.spv", dump_dir, hash.hash), "wb");
102 if (!file.IsOpen()) {
103 LOG_ERROR(Render_Vulkan, "Failed to dump SPIR-V module with hash={:016x}", hash.hash);
104 return;
105 }
106 if (file.WriteArray(spirv.data(), spirv.size()) != spirv.size()) {
107 LOG_ERROR(Render_Vulkan, "Failed to write SPIR-V module with hash={:016x}", hash.hash);
108 return;
109 }
110}
111
112void NsightAftermathTracker::OnGpuCrashDumpCallback(const void* gpu_crash_dump,
113 u32 gpu_crash_dump_size) {
114 std::scoped_lock lock{mutex};
115
116 LOG_CRITICAL(Render_Vulkan, "called");
117
118 GFSDK_Aftermath_GpuCrashDump_Decoder decoder;
119 if (!GFSDK_Aftermath_SUCCEED(GFSDK_Aftermath_GpuCrashDump_CreateDecoder(
120 GFSDK_Aftermath_Version_API, gpu_crash_dump, gpu_crash_dump_size, &decoder))) {
121 LOG_ERROR(Render_Vulkan, "Failed to create decoder");
122 return;
123 }
124 SCOPE_EXIT({ GFSDK_Aftermath_GpuCrashDump_DestroyDecoder(decoder); });
125
126 u32 json_size = 0;
127 if (!GFSDK_Aftermath_SUCCEED(GFSDK_Aftermath_GpuCrashDump_GenerateJSON(
128 decoder, GFSDK_Aftermath_GpuCrashDumpDecoderFlags_ALL_INFO,
129 GFSDK_Aftermath_GpuCrashDumpFormatterFlags_NONE, nullptr, nullptr, nullptr, nullptr,
130 this, &json_size))) {
131 LOG_ERROR(Render_Vulkan, "Failed to generate JSON");
132 return;
133 }
134 std::vector<char> json(json_size);
135 if (!GFSDK_Aftermath_SUCCEED(
136 GFSDK_Aftermath_GpuCrashDump_GetJSON(decoder, json_size, json.data()))) {
137 LOG_ERROR(Render_Vulkan, "Failed to query JSON");
138 return;
139 }
140
141 const std::string base_name = [this] {
142 const int id = dump_id++;
143 if (id == 0) {
144 return fmt::format("{}/crash.nv-gpudmp", dump_dir);
145 } else {
146 return fmt::format("{}/crash_{}.nv-gpudmp", dump_dir, id);
147 }
148 }();
149
150 std::string_view dump_view(static_cast<const char*>(gpu_crash_dump), gpu_crash_dump_size);
151 if (Common::FS::WriteStringToFile(false, base_name, dump_view) != gpu_crash_dump_size) {
152 LOG_ERROR(Render_Vulkan, "Failed to write dump file");
153 return;
154 }
155 const std::string_view json_view(json.data(), json.size());
156 if (Common::FS::WriteStringToFile(true, base_name + ".json", json_view) != json.size()) {
157 LOG_ERROR(Render_Vulkan, "Failed to write JSON");
158 return;
159 }
160}
161
162void NsightAftermathTracker::OnShaderDebugInfoCallback(const void* shader_debug_info,
163 u32 shader_debug_info_size) {
164 std::scoped_lock lock{mutex};
165
166 GFSDK_Aftermath_ShaderDebugInfoIdentifier identifier;
167 if (!GFSDK_Aftermath_SUCCEED(GFSDK_Aftermath_GetShaderDebugInfoIdentifier(
168 GFSDK_Aftermath_Version_API, shader_debug_info, shader_debug_info_size, &identifier))) {
169 LOG_ERROR(Render_Vulkan, "GFSDK_Aftermath_GetShaderDebugInfoIdentifier failed");
170 return;
171 }
172
173 const std::string path =
174 fmt::format("{}/shader_{:016x}{:016x}.nvdbg", dump_dir, identifier.id[0], identifier.id[1]);
175 Common::FS::IOFile file(path, "wb");
176 if (!file.IsOpen()) {
177 LOG_ERROR(Render_Vulkan, "Failed to create file {}", path);
178 return;
179 }
180 if (file.WriteBytes(static_cast<const u8*>(shader_debug_info), shader_debug_info_size) !=
181 shader_debug_info_size) {
182 LOG_ERROR(Render_Vulkan, "Failed to write file {}", path);
183 return;
184 }
185}
186
187void NsightAftermathTracker::OnCrashDumpDescriptionCallback(
188 PFN_GFSDK_Aftermath_AddGpuCrashDumpDescription add_description) {
189 add_description(GFSDK_Aftermath_GpuCrashDumpDescriptionKey_ApplicationName, "yuzu");
190}
191
192void NsightAftermathTracker::GpuCrashDumpCallback(const void* gpu_crash_dump,
193 u32 gpu_crash_dump_size, void* user_data) {
194 static_cast<NsightAftermathTracker*>(user_data)->OnGpuCrashDumpCallback(gpu_crash_dump,
195 gpu_crash_dump_size);
196}
197
198void NsightAftermathTracker::ShaderDebugInfoCallback(const void* shader_debug_info,
199 u32 shader_debug_info_size, void* user_data) {
200 static_cast<NsightAftermathTracker*>(user_data)->OnShaderDebugInfoCallback(
201 shader_debug_info, shader_debug_info_size);
202}
203
204void NsightAftermathTracker::CrashDumpDescriptionCallback(
205 PFN_GFSDK_Aftermath_AddGpuCrashDumpDescription add_description, void* user_data) {
206 static_cast<NsightAftermathTracker*>(user_data)->OnCrashDumpDescriptionCallback(
207 add_description);
208}
209
210} // namespace Vulkan
211
212#endif // HAS_NSIGHT_AFTERMATH
diff --git a/src/video_core/vulkan_common/nsight_aftermath_tracker.h b/src/video_core/vulkan_common/nsight_aftermath_tracker.h
new file mode 100644
index 000000000..cee3847fb
--- /dev/null
+++ b/src/video_core/vulkan_common/nsight_aftermath_tracker.h
@@ -0,0 +1,82 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#pragma once
6
7#include <mutex>
8#include <string>
9#include <vector>
10
11#define VK_NO_PROTOTYPES
12#include <vulkan/vulkan.h>
13
14#ifdef HAS_NSIGHT_AFTERMATH
15#include <GFSDK_Aftermath_Defines.h>
16#include <GFSDK_Aftermath_GpuCrashDump.h>
17#include <GFSDK_Aftermath_GpuCrashDumpDecoding.h>
18#endif
19
20#include "common/common_types.h"
21#include "common/dynamic_library.h"
22
23namespace Vulkan {
24
25class NsightAftermathTracker {
26public:
27 NsightAftermathTracker();
28 ~NsightAftermathTracker();
29
30 NsightAftermathTracker(const NsightAftermathTracker&) = delete;
31 NsightAftermathTracker& operator=(const NsightAftermathTracker&) = delete;
32
33 // Delete move semantics because Aftermath initialization uses a pointer to this.
34 NsightAftermathTracker(NsightAftermathTracker&&) = delete;
35 NsightAftermathTracker& operator=(NsightAftermathTracker&&) = delete;
36
37 void SaveShader(const std::vector<u32>& spirv) const;
38
39private:
40#ifdef HAS_NSIGHT_AFTERMATH
41 static void GpuCrashDumpCallback(const void* gpu_crash_dump, u32 gpu_crash_dump_size,
42 void* user_data);
43
44 static void ShaderDebugInfoCallback(const void* shader_debug_info, u32 shader_debug_info_size,
45 void* user_data);
46
47 static void CrashDumpDescriptionCallback(
48 PFN_GFSDK_Aftermath_AddGpuCrashDumpDescription add_description, void* user_data);
49
50 void OnGpuCrashDumpCallback(const void* gpu_crash_dump, u32 gpu_crash_dump_size);
51
52 void OnShaderDebugInfoCallback(const void* shader_debug_info, u32 shader_debug_info_size);
53
54 void OnCrashDumpDescriptionCallback(
55 PFN_GFSDK_Aftermath_AddGpuCrashDumpDescription add_description);
56
57 mutable std::mutex mutex;
58
59 std::string dump_dir;
60 int dump_id = 0;
61
62 bool initialized = false;
63
64 Common::DynamicLibrary dl;
65 PFN_GFSDK_Aftermath_DisableGpuCrashDumps GFSDK_Aftermath_DisableGpuCrashDumps;
66 PFN_GFSDK_Aftermath_EnableGpuCrashDumps GFSDK_Aftermath_EnableGpuCrashDumps;
67 PFN_GFSDK_Aftermath_GetShaderDebugInfoIdentifier GFSDK_Aftermath_GetShaderDebugInfoIdentifier;
68 PFN_GFSDK_Aftermath_GetShaderHashSpirv GFSDK_Aftermath_GetShaderHashSpirv;
69 PFN_GFSDK_Aftermath_GpuCrashDump_CreateDecoder GFSDK_Aftermath_GpuCrashDump_CreateDecoder;
70 PFN_GFSDK_Aftermath_GpuCrashDump_DestroyDecoder GFSDK_Aftermath_GpuCrashDump_DestroyDecoder;
71 PFN_GFSDK_Aftermath_GpuCrashDump_GenerateJSON GFSDK_Aftermath_GpuCrashDump_GenerateJSON;
72 PFN_GFSDK_Aftermath_GpuCrashDump_GetJSON GFSDK_Aftermath_GpuCrashDump_GetJSON;
73#endif
74};
75
76#ifndef HAS_NSIGHT_AFTERMATH
77inline NsightAftermathTracker::NsightAftermathTracker() = default;
78inline NsightAftermathTracker::~NsightAftermathTracker() = default;
79inline void NsightAftermathTracker::SaveShader(const std::vector<u32>&) const {}
80#endif
81
82} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_debug_callback.cpp b/src/video_core/vulkan_common/vulkan_debug_callback.cpp
new file mode 100644
index 000000000..ea7af8ad4
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_debug_callback.cpp
@@ -0,0 +1,45 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include <string_view>
6#include "common/logging/log.h"
7#include "video_core/vulkan_common/vulkan_debug_callback.h"
8
9namespace Vulkan {
10namespace {
11VkBool32 Callback(VkDebugUtilsMessageSeverityFlagBitsEXT severity,
12 VkDebugUtilsMessageTypeFlagsEXT type,
13 const VkDebugUtilsMessengerCallbackDataEXT* data,
14 [[maybe_unused]] void* user_data) {
15 const std::string_view message{data->pMessage};
16 if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) {
17 LOG_CRITICAL(Render_Vulkan, "{}", message);
18 } else if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) {
19 LOG_WARNING(Render_Vulkan, "{}", message);
20 } else if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT) {
21 LOG_INFO(Render_Vulkan, "{}", message);
22 } else if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT) {
23 LOG_DEBUG(Render_Vulkan, "{}", message);
24 }
25 return VK_FALSE;
26}
27} // Anonymous namespace
28
29vk::DebugUtilsMessenger CreateDebugCallback(const vk::Instance& instance) {
30 return instance.CreateDebugUtilsMessenger(VkDebugUtilsMessengerCreateInfoEXT{
31 .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
32 .pNext = nullptr,
33 .flags = 0,
34 .messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT |
35 VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT |
36 VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT |
37 VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT,
38 .messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
39 VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
40 VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
41 .pfnUserCallback = Callback,
42 });
43}
44
45} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_debug_callback.h b/src/video_core/vulkan_common/vulkan_debug_callback.h
new file mode 100644
index 000000000..2efcd244c
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_debug_callback.h
@@ -0,0 +1,11 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include "video_core/vulkan_common/vulkan_wrapper.h"
6
7namespace Vulkan {
8
9vk::DebugUtilsMessenger CreateDebugCallback(const vk::Instance& instance);
10
11} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_device.cpp b/src/video_core/vulkan_common/vulkan_device.cpp
new file mode 100644
index 000000000..75173324e
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_device.cpp
@@ -0,0 +1,886 @@
1// Copyright 2018 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include <bitset>
6#include <chrono>
7#include <optional>
8#include <string_view>
9#include <thread>
10#include <unordered_set>
11#include <utility>
12#include <vector>
13
14#include "common/assert.h"
15#include "core/settings.h"
16#include "video_core/vulkan_common/nsight_aftermath_tracker.h"
17#include "video_core/vulkan_common/vulkan_device.h"
18#include "video_core/vulkan_common/vulkan_wrapper.h"
19
20namespace Vulkan {
21
22namespace {
23
24namespace Alternatives {
25
26constexpr std::array Depth24UnormS8_UINT{
27 VK_FORMAT_D32_SFLOAT_S8_UINT,
28 VK_FORMAT_D16_UNORM_S8_UINT,
29 VkFormat{},
30};
31
32constexpr std::array Depth16UnormS8_UINT{
33 VK_FORMAT_D24_UNORM_S8_UINT,
34 VK_FORMAT_D32_SFLOAT_S8_UINT,
35 VkFormat{},
36};
37
38} // namespace Alternatives
39
40constexpr std::array REQUIRED_EXTENSIONS{
41 VK_KHR_SWAPCHAIN_EXTENSION_NAME,
42 VK_KHR_MAINTENANCE1_EXTENSION_NAME,
43 VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME,
44 VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME,
45 VK_KHR_16BIT_STORAGE_EXTENSION_NAME,
46 VK_KHR_8BIT_STORAGE_EXTENSION_NAME,
47 VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME,
48 VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME,
49 VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME,
50 VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME,
51 VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME,
52 VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME,
53 VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME,
54 VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME,
55};
56
57template <typename T>
58void SetNext(void**& next, T& data) {
59 *next = &data;
60 next = &data.pNext;
61}
62
63constexpr const VkFormat* GetFormatAlternatives(VkFormat format) {
64 switch (format) {
65 case VK_FORMAT_D24_UNORM_S8_UINT:
66 return Alternatives::Depth24UnormS8_UINT.data();
67 case VK_FORMAT_D16_UNORM_S8_UINT:
68 return Alternatives::Depth16UnormS8_UINT.data();
69 default:
70 return nullptr;
71 }
72}
73
74VkFormatFeatureFlags GetFormatFeatures(VkFormatProperties properties, FormatType format_type) {
75 switch (format_type) {
76 case FormatType::Linear:
77 return properties.linearTilingFeatures;
78 case FormatType::Optimal:
79 return properties.optimalTilingFeatures;
80 case FormatType::Buffer:
81 return properties.bufferFeatures;
82 default:
83 return {};
84 }
85}
86
87[[nodiscard]] bool IsRDNA(std::string_view device_name, VkDriverIdKHR driver_id) {
88 static constexpr std::array RDNA_DEVICES{
89 "5700",
90 "5600",
91 "5500",
92 "5300",
93 };
94 if (driver_id != VK_DRIVER_ID_AMD_PROPRIETARY_KHR) {
95 return false;
96 }
97 return std::any_of(RDNA_DEVICES.begin(), RDNA_DEVICES.end(), [device_name](const char* name) {
98 return device_name.find(name) != std::string_view::npos;
99 });
100}
101
102std::unordered_map<VkFormat, VkFormatProperties> GetFormatProperties(
103 vk::PhysicalDevice physical, const vk::InstanceDispatch& dld) {
104 static constexpr std::array formats{
105 VK_FORMAT_A8B8G8R8_UNORM_PACK32,
106 VK_FORMAT_A8B8G8R8_UINT_PACK32,
107 VK_FORMAT_A8B8G8R8_SNORM_PACK32,
108 VK_FORMAT_A8B8G8R8_SINT_PACK32,
109 VK_FORMAT_A8B8G8R8_SRGB_PACK32,
110 VK_FORMAT_B5G6R5_UNORM_PACK16,
111 VK_FORMAT_A2B10G10R10_UNORM_PACK32,
112 VK_FORMAT_A2B10G10R10_UINT_PACK32,
113 VK_FORMAT_A1R5G5B5_UNORM_PACK16,
114 VK_FORMAT_R32G32B32A32_SFLOAT,
115 VK_FORMAT_R32G32B32A32_SINT,
116 VK_FORMAT_R32G32B32A32_UINT,
117 VK_FORMAT_R32G32_SFLOAT,
118 VK_FORMAT_R32G32_SINT,
119 VK_FORMAT_R32G32_UINT,
120 VK_FORMAT_R16G16B16A16_SINT,
121 VK_FORMAT_R16G16B16A16_UINT,
122 VK_FORMAT_R16G16B16A16_SNORM,
123 VK_FORMAT_R16G16B16A16_UNORM,
124 VK_FORMAT_R16G16_UNORM,
125 VK_FORMAT_R16G16_SNORM,
126 VK_FORMAT_R16G16_SFLOAT,
127 VK_FORMAT_R16G16_SINT,
128 VK_FORMAT_R16_UNORM,
129 VK_FORMAT_R16_UINT,
130 VK_FORMAT_R8G8B8A8_SRGB,
131 VK_FORMAT_R8G8_UNORM,
132 VK_FORMAT_R8G8_SNORM,
133 VK_FORMAT_R8G8_SINT,
134 VK_FORMAT_R8G8_UINT,
135 VK_FORMAT_R8_UNORM,
136 VK_FORMAT_R8_SNORM,
137 VK_FORMAT_R8_SINT,
138 VK_FORMAT_R8_UINT,
139 VK_FORMAT_B10G11R11_UFLOAT_PACK32,
140 VK_FORMAT_R32_SFLOAT,
141 VK_FORMAT_R32_UINT,
142 VK_FORMAT_R32_SINT,
143 VK_FORMAT_R16_SFLOAT,
144 VK_FORMAT_R16G16B16A16_SFLOAT,
145 VK_FORMAT_B8G8R8A8_UNORM,
146 VK_FORMAT_B8G8R8A8_SRGB,
147 VK_FORMAT_R4G4B4A4_UNORM_PACK16,
148 VK_FORMAT_D32_SFLOAT,
149 VK_FORMAT_D16_UNORM,
150 VK_FORMAT_D16_UNORM_S8_UINT,
151 VK_FORMAT_D24_UNORM_S8_UINT,
152 VK_FORMAT_D32_SFLOAT_S8_UINT,
153 VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
154 VK_FORMAT_BC2_UNORM_BLOCK,
155 VK_FORMAT_BC3_UNORM_BLOCK,
156 VK_FORMAT_BC4_UNORM_BLOCK,
157 VK_FORMAT_BC4_SNORM_BLOCK,
158 VK_FORMAT_BC5_UNORM_BLOCK,
159 VK_FORMAT_BC5_SNORM_BLOCK,
160 VK_FORMAT_BC7_UNORM_BLOCK,
161 VK_FORMAT_BC6H_UFLOAT_BLOCK,
162 VK_FORMAT_BC6H_SFLOAT_BLOCK,
163 VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
164 VK_FORMAT_BC2_SRGB_BLOCK,
165 VK_FORMAT_BC3_SRGB_BLOCK,
166 VK_FORMAT_BC7_SRGB_BLOCK,
167 VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
168 VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
169 VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
170 VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
171 VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
172 VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
173 VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
174 VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
175 VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
176 VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
177 VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
178 VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
179 VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
180 VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
181 VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
182 VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
183 VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
184 VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
185 VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
186 VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
187 VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
188 VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
189 VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
190 VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
191 VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
192 VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
193 VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
194 VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
195 VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
196 VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
197 VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
198 VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
199 VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
200 };
201 std::unordered_map<VkFormat, VkFormatProperties> format_properties;
202 for (const auto format : formats) {
203 format_properties.emplace(format, physical.GetFormatProperties(format));
204 }
205 return format_properties;
206}
207
208} // Anonymous namespace
209
210Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR surface,
211 const vk::InstanceDispatch& dld_)
212 : instance{instance_}, dld{dld_}, physical{physical_}, properties{physical.GetProperties()},
213 format_properties{GetFormatProperties(physical, dld)} {
214 CheckSuitability();
215 SetupFamilies(surface);
216 SetupFeatures();
217
218 const auto queue_cis = GetDeviceQueueCreateInfos();
219 const std::vector extensions = LoadExtensions();
220
221 VkPhysicalDeviceFeatures2 features2{
222 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
223 .pNext = nullptr,
224 };
225 const void* first_next = &features2;
226 void** next = &features2.pNext;
227
228 features2.features = {
229 .robustBufferAccess = false,
230 .fullDrawIndexUint32 = false,
231 .imageCubeArray = true,
232 .independentBlend = true,
233 .geometryShader = true,
234 .tessellationShader = true,
235 .sampleRateShading = false,
236 .dualSrcBlend = false,
237 .logicOp = false,
238 .multiDrawIndirect = false,
239 .drawIndirectFirstInstance = false,
240 .depthClamp = true,
241 .depthBiasClamp = true,
242 .fillModeNonSolid = false,
243 .depthBounds = false,
244 .wideLines = false,
245 .largePoints = true,
246 .alphaToOne = false,
247 .multiViewport = true,
248 .samplerAnisotropy = true,
249 .textureCompressionETC2 = false,
250 .textureCompressionASTC_LDR = is_optimal_astc_supported,
251 .textureCompressionBC = false,
252 .occlusionQueryPrecise = true,
253 .pipelineStatisticsQuery = false,
254 .vertexPipelineStoresAndAtomics = true,
255 .fragmentStoresAndAtomics = true,
256 .shaderTessellationAndGeometryPointSize = false,
257 .shaderImageGatherExtended = true,
258 .shaderStorageImageExtendedFormats = false,
259 .shaderStorageImageMultisample = true,
260 .shaderStorageImageReadWithoutFormat = is_formatless_image_load_supported,
261 .shaderStorageImageWriteWithoutFormat = true,
262 .shaderUniformBufferArrayDynamicIndexing = false,
263 .shaderSampledImageArrayDynamicIndexing = false,
264 .shaderStorageBufferArrayDynamicIndexing = false,
265 .shaderStorageImageArrayDynamicIndexing = false,
266 .shaderClipDistance = false,
267 .shaderCullDistance = false,
268 .shaderFloat64 = false,
269 .shaderInt64 = false,
270 .shaderInt16 = false,
271 .shaderResourceResidency = false,
272 .shaderResourceMinLod = false,
273 .sparseBinding = false,
274 .sparseResidencyBuffer = false,
275 .sparseResidencyImage2D = false,
276 .sparseResidencyImage3D = false,
277 .sparseResidency2Samples = false,
278 .sparseResidency4Samples = false,
279 .sparseResidency8Samples = false,
280 .sparseResidency16Samples = false,
281 .sparseResidencyAliased = false,
282 .variableMultisampleRate = false,
283 .inheritedQueries = false,
284 };
285 VkPhysicalDeviceTimelineSemaphoreFeaturesKHR timeline_semaphore{
286 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR,
287 .pNext = nullptr,
288 .timelineSemaphore = true,
289 };
290 SetNext(next, timeline_semaphore);
291
292 VkPhysicalDevice16BitStorageFeaturesKHR bit16_storage{
293 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR,
294 .pNext = nullptr,
295 .storageBuffer16BitAccess = false,
296 .uniformAndStorageBuffer16BitAccess = true,
297 .storagePushConstant16 = false,
298 .storageInputOutput16 = false,
299 };
300 SetNext(next, bit16_storage);
301
302 VkPhysicalDevice8BitStorageFeaturesKHR bit8_storage{
303 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR,
304 .pNext = nullptr,
305 .storageBuffer8BitAccess = false,
306 .uniformAndStorageBuffer8BitAccess = true,
307 .storagePushConstant8 = false,
308 };
309 SetNext(next, bit8_storage);
310
311 VkPhysicalDeviceHostQueryResetFeaturesEXT host_query_reset{
312 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT,
313 .hostQueryReset = true,
314 };
315 SetNext(next, host_query_reset);
316
317 VkPhysicalDeviceFloat16Int8FeaturesKHR float16_int8;
318 if (is_float16_supported) {
319 float16_int8 = {
320 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR,
321 .pNext = nullptr,
322 .shaderFloat16 = true,
323 .shaderInt8 = false,
324 };
325 SetNext(next, float16_int8);
326 } else {
327 LOG_INFO(Render_Vulkan, "Device doesn't support float16 natively");
328 }
329
330 if (!nv_viewport_swizzle) {
331 LOG_INFO(Render_Vulkan, "Device doesn't support viewport swizzles");
332 }
333
334 VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR std430_layout;
335 if (khr_uniform_buffer_standard_layout) {
336 std430_layout = {
337 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR,
338 .pNext = nullptr,
339 .uniformBufferStandardLayout = true,
340 };
341 SetNext(next, std430_layout);
342 } else {
343 LOG_INFO(Render_Vulkan, "Device doesn't support packed UBOs");
344 }
345
346 VkPhysicalDeviceIndexTypeUint8FeaturesEXT index_type_uint8;
347 if (ext_index_type_uint8) {
348 index_type_uint8 = {
349 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT,
350 .pNext = nullptr,
351 .indexTypeUint8 = true,
352 };
353 SetNext(next, index_type_uint8);
354 } else {
355 LOG_INFO(Render_Vulkan, "Device doesn't support uint8 indexes");
356 }
357
358 VkPhysicalDeviceTransformFeedbackFeaturesEXT transform_feedback;
359 if (ext_transform_feedback) {
360 transform_feedback = {
361 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT,
362 .pNext = nullptr,
363 .transformFeedback = true,
364 .geometryStreams = true,
365 };
366 SetNext(next, transform_feedback);
367 } else {
368 LOG_INFO(Render_Vulkan, "Device doesn't support transform feedbacks");
369 }
370
371 VkPhysicalDeviceCustomBorderColorFeaturesEXT custom_border;
372 if (ext_custom_border_color) {
373 custom_border = {
374 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT,
375 .pNext = nullptr,
376 .customBorderColors = VK_TRUE,
377 .customBorderColorWithoutFormat = VK_TRUE,
378 };
379 SetNext(next, custom_border);
380 } else {
381 LOG_INFO(Render_Vulkan, "Device doesn't support custom border colors");
382 }
383
384 VkPhysicalDeviceExtendedDynamicStateFeaturesEXT dynamic_state;
385 if (ext_extended_dynamic_state) {
386 dynamic_state = {
387 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT,
388 .pNext = nullptr,
389 .extendedDynamicState = VK_TRUE,
390 };
391 SetNext(next, dynamic_state);
392 } else {
393 LOG_INFO(Render_Vulkan, "Device doesn't support extended dynamic state");
394 }
395
396 VkPhysicalDeviceRobustness2FeaturesEXT robustness2;
397 if (ext_robustness2) {
398 robustness2 = {
399 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT,
400 .pNext = nullptr,
401 .robustBufferAccess2 = false,
402 .robustImageAccess2 = true,
403 .nullDescriptor = true,
404 };
405 SetNext(next, robustness2);
406 } else {
407 LOG_INFO(Render_Vulkan, "Device doesn't support robustness2");
408 }
409
410 if (!ext_depth_range_unrestricted) {
411 LOG_INFO(Render_Vulkan, "Device doesn't support depth range unrestricted");
412 }
413
414 VkDeviceDiagnosticsConfigCreateInfoNV diagnostics_nv;
415 if (nv_device_diagnostics_config) {
416 nsight_aftermath_tracker = std::make_unique<NsightAftermathTracker>();
417
418 diagnostics_nv = {
419 .sType = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV,
420 .pNext = &features2,
421 .flags = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV |
422 VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV |
423 VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV,
424 };
425 first_next = &diagnostics_nv;
426 }
427 logical = vk::Device::Create(physical, queue_cis, extensions, first_next, dld);
428
429 CollectTelemetryParameters();
430 CollectToolingInfo();
431
432 if (ext_extended_dynamic_state && driver_id == VK_DRIVER_ID_MESA_RADV) {
433 LOG_WARNING(
434 Render_Vulkan,
435 "Blacklisting RADV for VK_EXT_extended_dynamic state, likely due to a bug in yuzu");
436 ext_extended_dynamic_state = false;
437 }
438 if (ext_extended_dynamic_state && IsRDNA(properties.deviceName, driver_id)) {
439 // AMD's proprietary driver supports VK_EXT_extended_dynamic_state but on RDNA devices it
440 // seems to cause stability issues
441 LOG_WARNING(
442 Render_Vulkan,
443 "Blacklisting AMD proprietary on RDNA devices from VK_EXT_extended_dynamic_state");
444 ext_extended_dynamic_state = false;
445 }
446
447 graphics_queue = logical.GetQueue(graphics_family);
448 present_queue = logical.GetQueue(present_family);
449
450 use_asynchronous_shaders = Settings::values.use_asynchronous_shaders.GetValue();
451}
452
453Device::~Device() = default;
454
455VkFormat Device::GetSupportedFormat(VkFormat wanted_format, VkFormatFeatureFlags wanted_usage,
456 FormatType format_type) const {
457 if (IsFormatSupported(wanted_format, wanted_usage, format_type)) {
458 return wanted_format;
459 }
460 // The wanted format is not supported by hardware, search for alternatives
461 const VkFormat* alternatives = GetFormatAlternatives(wanted_format);
462 if (alternatives == nullptr) {
463 UNREACHABLE_MSG("Format={} with usage={} and type={} has no defined alternatives and host "
464 "hardware does not support it",
465 wanted_format, wanted_usage, format_type);
466 return wanted_format;
467 }
468
469 std::size_t i = 0;
470 for (VkFormat alternative = *alternatives; alternative; alternative = alternatives[++i]) {
471 if (!IsFormatSupported(alternative, wanted_usage, format_type)) {
472 continue;
473 }
474 LOG_WARNING(Render_Vulkan,
475 "Emulating format={} with alternative format={} with usage={} and type={}",
476 wanted_format, alternative, wanted_usage, format_type);
477 return alternative;
478 }
479
480 // No alternatives found, panic
481 UNREACHABLE_MSG("Format={} with usage={} and type={} is not supported by the host hardware and "
482 "doesn't support any of the alternatives",
483 wanted_format, wanted_usage, format_type);
484 return wanted_format;
485}
486
487void Device::ReportLoss() const {
488 LOG_CRITICAL(Render_Vulkan, "Device loss occured!");
489
490 // Wait for the log to flush and for Nsight Aftermath to dump the results
491 std::this_thread::sleep_for(std::chrono::seconds{15});
492}
493
494void Device::SaveShader(const std::vector<u32>& spirv) const {
495 if (nsight_aftermath_tracker) {
496 nsight_aftermath_tracker->SaveShader(spirv);
497 }
498}
499
500bool Device::IsOptimalAstcSupported(const VkPhysicalDeviceFeatures& features) const {
501 // Disable for now to avoid converting ASTC twice.
502 static constexpr std::array astc_formats = {
503 VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
504 VK_FORMAT_ASTC_5x4_UNORM_BLOCK, VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
505 VK_FORMAT_ASTC_5x5_UNORM_BLOCK, VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
506 VK_FORMAT_ASTC_6x5_UNORM_BLOCK, VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
507 VK_FORMAT_ASTC_6x6_UNORM_BLOCK, VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
508 VK_FORMAT_ASTC_8x5_UNORM_BLOCK, VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
509 VK_FORMAT_ASTC_8x6_UNORM_BLOCK, VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
510 VK_FORMAT_ASTC_8x8_UNORM_BLOCK, VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
511 VK_FORMAT_ASTC_10x5_UNORM_BLOCK, VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
512 VK_FORMAT_ASTC_10x6_UNORM_BLOCK, VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
513 VK_FORMAT_ASTC_10x8_UNORM_BLOCK, VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
514 VK_FORMAT_ASTC_10x10_UNORM_BLOCK, VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
515 VK_FORMAT_ASTC_12x10_UNORM_BLOCK, VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
516 VK_FORMAT_ASTC_12x12_UNORM_BLOCK, VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
517 };
518 if (!features.textureCompressionASTC_LDR) {
519 return false;
520 }
521 const auto format_feature_usage{
522 VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_BLIT_SRC_BIT |
523 VK_FORMAT_FEATURE_BLIT_DST_BIT | VK_FORMAT_FEATURE_TRANSFER_SRC_BIT |
524 VK_FORMAT_FEATURE_TRANSFER_DST_BIT};
525 for (const auto format : astc_formats) {
526 const auto physical_format_properties{physical.GetFormatProperties(format)};
527 if ((physical_format_properties.optimalTilingFeatures & format_feature_usage) == 0) {
528 return false;
529 }
530 }
531 return true;
532}
533
534bool Device::TestDepthStencilBlits() const {
535 static constexpr VkFormatFeatureFlags required_features =
536 VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT;
537 const auto test_features = [](VkFormatProperties props) {
538 return (props.optimalTilingFeatures & required_features) == required_features;
539 };
540 return test_features(format_properties.at(VK_FORMAT_D32_SFLOAT_S8_UINT)) &&
541 test_features(format_properties.at(VK_FORMAT_D24_UNORM_S8_UINT));
542}
543
544bool Device::IsFormatSupported(VkFormat wanted_format, VkFormatFeatureFlags wanted_usage,
545 FormatType format_type) const {
546 const auto it = format_properties.find(wanted_format);
547 if (it == format_properties.end()) {
548 UNIMPLEMENTED_MSG("Unimplemented format query={}", wanted_format);
549 return true;
550 }
551 const auto supported_usage = GetFormatFeatures(it->second, format_type);
552 return (supported_usage & wanted_usage) == wanted_usage;
553}
554
555void Device::CheckSuitability() const {
556 std::bitset<REQUIRED_EXTENSIONS.size()> available_extensions;
557 for (const VkExtensionProperties& property : physical.EnumerateDeviceExtensionProperties()) {
558 for (std::size_t i = 0; i < REQUIRED_EXTENSIONS.size(); ++i) {
559 if (available_extensions[i]) {
560 continue;
561 }
562 const std::string_view name{property.extensionName};
563 available_extensions[i] = name == REQUIRED_EXTENSIONS[i];
564 }
565 }
566 for (size_t i = 0; i < REQUIRED_EXTENSIONS.size(); ++i) {
567 if (available_extensions[i]) {
568 continue;
569 }
570 LOG_ERROR(Render_Vulkan, "Missing required extension: {}", REQUIRED_EXTENSIONS[i]);
571 throw vk::Exception(VK_ERROR_EXTENSION_NOT_PRESENT);
572 }
573 struct LimitTuple {
574 u32 minimum;
575 u32 value;
576 const char* name;
577 };
578 const VkPhysicalDeviceLimits& limits{properties.limits};
579 const std::array limits_report{
580 LimitTuple{65536, limits.maxUniformBufferRange, "maxUniformBufferRange"},
581 LimitTuple{16, limits.maxViewports, "maxViewports"},
582 LimitTuple{8, limits.maxColorAttachments, "maxColorAttachments"},
583 LimitTuple{8, limits.maxClipDistances, "maxClipDistances"},
584 };
585 for (const auto& tuple : limits_report) {
586 if (tuple.value < tuple.minimum) {
587 LOG_ERROR(Render_Vulkan, "{} has to be {} or greater but it is {}", tuple.name,
588 tuple.minimum, tuple.value);
589 throw vk::Exception(VK_ERROR_FEATURE_NOT_PRESENT);
590 }
591 }
592 const VkPhysicalDeviceFeatures features{physical.GetFeatures()};
593 const std::array feature_report{
594 std::make_pair(features.vertexPipelineStoresAndAtomics, "vertexPipelineStoresAndAtomics"),
595 std::make_pair(features.imageCubeArray, "imageCubeArray"),
596 std::make_pair(features.independentBlend, "independentBlend"),
597 std::make_pair(features.depthClamp, "depthClamp"),
598 std::make_pair(features.samplerAnisotropy, "samplerAnisotropy"),
599 std::make_pair(features.largePoints, "largePoints"),
600 std::make_pair(features.multiViewport, "multiViewport"),
601 std::make_pair(features.depthBiasClamp, "depthBiasClamp"),
602 std::make_pair(features.geometryShader, "geometryShader"),
603 std::make_pair(features.tessellationShader, "tessellationShader"),
604 std::make_pair(features.occlusionQueryPrecise, "occlusionQueryPrecise"),
605 std::make_pair(features.fragmentStoresAndAtomics, "fragmentStoresAndAtomics"),
606 std::make_pair(features.shaderImageGatherExtended, "shaderImageGatherExtended"),
607 std::make_pair(features.shaderStorageImageMultisample, "shaderStorageImageMultisample"),
608 std::make_pair(features.shaderStorageImageWriteWithoutFormat,
609 "shaderStorageImageWriteWithoutFormat"),
610 };
611 for (const auto& [is_supported, name] : feature_report) {
612 if (is_supported) {
613 continue;
614 }
615 LOG_ERROR(Render_Vulkan, "Missing required feature: {}", name);
616 throw vk::Exception(VK_ERROR_FEATURE_NOT_PRESENT);
617 }
618}
619
620std::vector<const char*> Device::LoadExtensions() {
621 std::vector<const char*> extensions;
622 extensions.reserve(7 + REQUIRED_EXTENSIONS.size());
623 extensions.insert(extensions.begin(), REQUIRED_EXTENSIONS.begin(), REQUIRED_EXTENSIONS.end());
624
625 bool has_khr_shader_float16_int8{};
626 bool has_ext_subgroup_size_control{};
627 bool has_ext_transform_feedback{};
628 bool has_ext_custom_border_color{};
629 bool has_ext_extended_dynamic_state{};
630 bool has_ext_robustness2{};
631 for (const VkExtensionProperties& extension : physical.EnumerateDeviceExtensionProperties()) {
632 const auto test = [&](std::optional<std::reference_wrapper<bool>> status, const char* name,
633 bool push) {
634 if (extension.extensionName != std::string_view(name)) {
635 return;
636 }
637 if (push) {
638 extensions.push_back(name);
639 }
640 if (status) {
641 status->get() = true;
642 }
643 };
644 test(nv_viewport_swizzle, VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME, true);
645 test(khr_uniform_buffer_standard_layout,
646 VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME, true);
647 test(has_khr_shader_float16_int8, VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME, false);
648 test(ext_depth_range_unrestricted, VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME, true);
649 test(ext_index_type_uint8, VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME, true);
650 test(ext_sampler_filter_minmax, VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME, true);
651 test(ext_shader_viewport_index_layer, VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME,
652 true);
653 test(ext_tooling_info, VK_EXT_TOOLING_INFO_EXTENSION_NAME, true);
654 test(ext_shader_stencil_export, VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, true);
655 test(has_ext_transform_feedback, VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME, false);
656 test(has_ext_custom_border_color, VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME, false);
657 test(has_ext_extended_dynamic_state, VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME, false);
658 test(has_ext_robustness2, VK_EXT_ROBUSTNESS_2_EXTENSION_NAME, false);
659 test(has_ext_subgroup_size_control, VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME, false);
660 if (Settings::values.renderer_debug) {
661 test(nv_device_diagnostics_config, VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME,
662 true);
663 }
664 }
665
666 VkPhysicalDeviceFeatures2KHR features;
667 features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR;
668
669 VkPhysicalDeviceProperties2KHR physical_properties;
670 physical_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR;
671
672 if (has_khr_shader_float16_int8) {
673 VkPhysicalDeviceFloat16Int8FeaturesKHR float16_int8_features;
674 float16_int8_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR;
675 float16_int8_features.pNext = nullptr;
676 features.pNext = &float16_int8_features;
677
678 physical.GetFeatures2KHR(features);
679 is_float16_supported = float16_int8_features.shaderFloat16;
680 extensions.push_back(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME);
681 }
682
683 if (has_ext_subgroup_size_control) {
684 VkPhysicalDeviceSubgroupSizeControlFeaturesEXT subgroup_features;
685 subgroup_features.sType =
686 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT;
687 subgroup_features.pNext = nullptr;
688 features.pNext = &subgroup_features;
689 physical.GetFeatures2KHR(features);
690
691 VkPhysicalDeviceSubgroupSizeControlPropertiesEXT subgroup_properties;
692 subgroup_properties.sType =
693 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT;
694 subgroup_properties.pNext = nullptr;
695 physical_properties.pNext = &subgroup_properties;
696 physical.GetProperties2KHR(physical_properties);
697
698 is_warp_potentially_bigger = subgroup_properties.maxSubgroupSize > GuestWarpSize;
699
700 if (subgroup_features.subgroupSizeControl &&
701 subgroup_properties.minSubgroupSize <= GuestWarpSize &&
702 subgroup_properties.maxSubgroupSize >= GuestWarpSize) {
703 extensions.push_back(VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME);
704 guest_warp_stages = subgroup_properties.requiredSubgroupSizeStages;
705 }
706 } else {
707 is_warp_potentially_bigger = true;
708 }
709
710 if (has_ext_transform_feedback) {
711 VkPhysicalDeviceTransformFeedbackFeaturesEXT tfb_features;
712 tfb_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT;
713 tfb_features.pNext = nullptr;
714 features.pNext = &tfb_features;
715 physical.GetFeatures2KHR(features);
716
717 VkPhysicalDeviceTransformFeedbackPropertiesEXT tfb_properties;
718 tfb_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT;
719 tfb_properties.pNext = nullptr;
720 physical_properties.pNext = &tfb_properties;
721 physical.GetProperties2KHR(physical_properties);
722
723 if (tfb_features.transformFeedback && tfb_features.geometryStreams &&
724 tfb_properties.maxTransformFeedbackStreams >= 4 &&
725 tfb_properties.maxTransformFeedbackBuffers && tfb_properties.transformFeedbackQueries &&
726 tfb_properties.transformFeedbackDraw) {
727 extensions.push_back(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
728 ext_transform_feedback = true;
729 }
730 }
731
732 if (has_ext_custom_border_color) {
733 VkPhysicalDeviceCustomBorderColorFeaturesEXT border_features;
734 border_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT;
735 border_features.pNext = nullptr;
736 features.pNext = &border_features;
737 physical.GetFeatures2KHR(features);
738
739 if (border_features.customBorderColors && border_features.customBorderColorWithoutFormat) {
740 extensions.push_back(VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME);
741 ext_custom_border_color = true;
742 }
743 }
744
745 if (has_ext_extended_dynamic_state) {
746 VkPhysicalDeviceExtendedDynamicStateFeaturesEXT dynamic_state;
747 dynamic_state.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT;
748 dynamic_state.pNext = nullptr;
749 features.pNext = &dynamic_state;
750 physical.GetFeatures2KHR(features);
751
752 if (dynamic_state.extendedDynamicState) {
753 extensions.push_back(VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME);
754 ext_extended_dynamic_state = true;
755 }
756 }
757
758 if (has_ext_robustness2) {
759 VkPhysicalDeviceRobustness2FeaturesEXT robustness2;
760 robustness2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT;
761 robustness2.pNext = nullptr;
762 features.pNext = &robustness2;
763 physical.GetFeatures2KHR(features);
764 if (robustness2.nullDescriptor && robustness2.robustImageAccess2) {
765 extensions.push_back(VK_EXT_ROBUSTNESS_2_EXTENSION_NAME);
766 ext_robustness2 = true;
767 }
768 }
769
770 return extensions;
771}
772
773void Device::SetupFamilies(VkSurfaceKHR surface) {
774 const std::vector queue_family_properties = physical.GetQueueFamilyProperties();
775 std::optional<u32> graphics;
776 std::optional<u32> present;
777 for (u32 index = 0; index < static_cast<u32>(queue_family_properties.size()); ++index) {
778 if (graphics && (present || !surface)) {
779 break;
780 }
781 const VkQueueFamilyProperties& queue_family = queue_family_properties[index];
782 if (queue_family.queueCount == 0) {
783 continue;
784 }
785 if (queue_family.queueFlags & VK_QUEUE_GRAPHICS_BIT) {
786 graphics = index;
787 }
788 if (surface && physical.GetSurfaceSupportKHR(index, surface)) {
789 present = index;
790 }
791 }
792 if (!graphics) {
793 LOG_ERROR(Render_Vulkan, "Device lacks a graphics queue");
794 throw vk::Exception(VK_ERROR_FEATURE_NOT_PRESENT);
795 }
796 if (surface && !present) {
797 LOG_ERROR(Render_Vulkan, "Device lacks a present queue");
798 throw vk::Exception(VK_ERROR_FEATURE_NOT_PRESENT);
799 }
800 graphics_family = *graphics;
801 present_family = *present;
802}
803
804void Device::SetupFeatures() {
805 const auto supported_features{physical.GetFeatures()};
806 is_formatless_image_load_supported = supported_features.shaderStorageImageReadWithoutFormat;
807 is_blit_depth_stencil_supported = TestDepthStencilBlits();
808 is_optimal_astc_supported = IsOptimalAstcSupported(supported_features);
809}
810
811void Device::CollectTelemetryParameters() {
812 VkPhysicalDeviceDriverPropertiesKHR driver{
813 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR,
814 .pNext = nullptr,
815 .driverID = {},
816 .driverName = {},
817 .driverInfo = {},
818 .conformanceVersion = {},
819 };
820
821 VkPhysicalDeviceProperties2KHR device_properties{
822 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
823 .pNext = &driver,
824 .properties = {},
825 };
826 physical.GetProperties2KHR(device_properties);
827
828 driver_id = driver.driverID;
829 vendor_name = driver.driverName;
830
831 const std::vector extensions = physical.EnumerateDeviceExtensionProperties();
832 reported_extensions.reserve(std::size(extensions));
833 for (const auto& extension : extensions) {
834 reported_extensions.emplace_back(extension.extensionName);
835 }
836}
837
838void Device::CollectToolingInfo() {
839 if (!ext_tooling_info) {
840 return;
841 }
842 const auto vkGetPhysicalDeviceToolPropertiesEXT =
843 reinterpret_cast<PFN_vkGetPhysicalDeviceToolPropertiesEXT>(
844 dld.vkGetInstanceProcAddr(instance, "vkGetPhysicalDeviceToolPropertiesEXT"));
845 if (!vkGetPhysicalDeviceToolPropertiesEXT) {
846 return;
847 }
848 u32 tool_count = 0;
849 if (vkGetPhysicalDeviceToolPropertiesEXT(physical, &tool_count, nullptr) != VK_SUCCESS) {
850 return;
851 }
852 std::vector<VkPhysicalDeviceToolPropertiesEXT> tools(tool_count);
853 if (vkGetPhysicalDeviceToolPropertiesEXT(physical, &tool_count, tools.data()) != VK_SUCCESS) {
854 return;
855 }
856 for (const VkPhysicalDeviceToolPropertiesEXT& tool : tools) {
857 const std::string_view name = tool.name;
858 LOG_INFO(Render_Vulkan, "{}", name);
859 has_renderdoc = has_renderdoc || name == "RenderDoc";
860 has_nsight_graphics = has_nsight_graphics || name == "NVIDIA Nsight Graphics";
861 }
862}
863
864std::vector<VkDeviceQueueCreateInfo> Device::GetDeviceQueueCreateInfos() const {
865 static constexpr float QUEUE_PRIORITY = 1.0f;
866
867 std::unordered_set<u32> unique_queue_families{graphics_family, present_family};
868 std::vector<VkDeviceQueueCreateInfo> queue_cis;
869 queue_cis.reserve(unique_queue_families.size());
870
871 for (const u32 queue_family : unique_queue_families) {
872 auto& ci = queue_cis.emplace_back(VkDeviceQueueCreateInfo{
873 .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
874 .pNext = nullptr,
875 .flags = 0,
876 .queueFamilyIndex = queue_family,
877 .queueCount = 1,
878 .pQueuePriorities = nullptr,
879 });
880 ci.pQueuePriorities = &QUEUE_PRIORITY;
881 }
882
883 return queue_cis;
884}
885
886} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_device.h b/src/video_core/vulkan_common/vulkan_device.h
new file mode 100644
index 000000000..a973c3ce4
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_device.h
@@ -0,0 +1,307 @@
1// Copyright 2018 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#pragma once
6
7#include <string>
8#include <string_view>
9#include <unordered_map>
10#include <vector>
11
12#include "common/common_types.h"
13#include "video_core/vulkan_common/vulkan_wrapper.h"
14
15namespace Vulkan {
16
17class NsightAftermathTracker;
18
19/// Format usage descriptor.
20enum class FormatType { Linear, Optimal, Buffer };
21
22/// Subgroup size of the guest emulated hardware (Nvidia has 32 threads per subgroup).
23const u32 GuestWarpSize = 32;
24
25/// Handles data specific to a physical device.
26class Device final {
27public:
28 explicit Device(VkInstance instance, vk::PhysicalDevice physical, VkSurfaceKHR surface,
29 const vk::InstanceDispatch& dld);
30 ~Device();
31
32 /**
33 * Returns a format supported by the device for the passed requeriments.
34 * @param wanted_format The ideal format to be returned. It may not be the returned format.
35 * @param wanted_usage The usage that must be fulfilled even if the format is not supported.
36 * @param format_type Format type usage.
37 * @returns A format supported by the device.
38 */
39 VkFormat GetSupportedFormat(VkFormat wanted_format, VkFormatFeatureFlags wanted_usage,
40 FormatType format_type) const;
41
42 /// Reports a device loss.
43 void ReportLoss() const;
44
45 /// Reports a shader to Nsight Aftermath.
46 void SaveShader(const std::vector<u32>& spirv) const;
47
48 /// Returns the dispatch loader with direct function pointers of the device.
49 const vk::DeviceDispatch& GetDispatchLoader() const {
50 return dld;
51 }
52
53 /// Returns the logical device.
54 const vk::Device& GetLogical() const {
55 return logical;
56 }
57
58 /// Returns the physical device.
59 vk::PhysicalDevice GetPhysical() const {
60 return physical;
61 }
62
63 /// Returns the main graphics queue.
64 vk::Queue GetGraphicsQueue() const {
65 return graphics_queue;
66 }
67
68 /// Returns the main present queue.
69 vk::Queue GetPresentQueue() const {
70 return present_queue;
71 }
72
73 /// Returns main graphics queue family index.
74 u32 GetGraphicsFamily() const {
75 return graphics_family;
76 }
77
78 /// Returns main present queue family index.
79 u32 GetPresentFamily() const {
80 return present_family;
81 }
82
83 /// Returns the current Vulkan API version provided in Vulkan-formatted version numbers.
84 u32 ApiVersion() const {
85 return properties.apiVersion;
86 }
87
88 /// Returns the current driver version provided in Vulkan-formatted version numbers.
89 u32 GetDriverVersion() const {
90 return properties.driverVersion;
91 }
92
93 /// Returns the device name.
94 std::string_view GetModelName() const {
95 return properties.deviceName;
96 }
97
98 /// Returns the driver ID.
99 VkDriverIdKHR GetDriverID() const {
100 return driver_id;
101 }
102
103 /// Returns uniform buffer alignment requeriment.
104 VkDeviceSize GetUniformBufferAlignment() const {
105 return properties.limits.minUniformBufferOffsetAlignment;
106 }
107
108 /// Returns storage alignment requeriment.
109 VkDeviceSize GetStorageBufferAlignment() const {
110 return properties.limits.minStorageBufferOffsetAlignment;
111 }
112
113 /// Returns the maximum range for storage buffers.
114 VkDeviceSize GetMaxStorageBufferRange() const {
115 return properties.limits.maxStorageBufferRange;
116 }
117
118 /// Returns the maximum size for push constants.
119 VkDeviceSize GetMaxPushConstantsSize() const {
120 return properties.limits.maxPushConstantsSize;
121 }
122
123 /// Returns the maximum size for shared memory.
124 u32 GetMaxComputeSharedMemorySize() const {
125 return properties.limits.maxComputeSharedMemorySize;
126 }
127
128 /// Returns true if ASTC is natively supported.
129 bool IsOptimalAstcSupported() const {
130 return is_optimal_astc_supported;
131 }
132
133 /// Returns true if the device supports float16 natively
134 bool IsFloat16Supported() const {
135 return is_float16_supported;
136 }
137
138 /// Returns true if the device warp size can potentially be bigger than guest's warp size.
139 bool IsWarpSizePotentiallyBiggerThanGuest() const {
140 return is_warp_potentially_bigger;
141 }
142
143 /// Returns true if the device can be forced to use the guest warp size.
144 bool IsGuestWarpSizeSupported(VkShaderStageFlagBits stage) const {
145 return guest_warp_stages & stage;
146 }
147
148 /// Returns true if formatless image load is supported.
149 bool IsFormatlessImageLoadSupported() const {
150 return is_formatless_image_load_supported;
151 }
152
153 /// Returns true when blitting from and to depth stencil images is supported.
154 bool IsBlitDepthStencilSupported() const {
155 return is_blit_depth_stencil_supported;
156 }
157
158 /// Returns true if the device supports VK_NV_viewport_swizzle.
159 bool IsNvViewportSwizzleSupported() const {
160 return nv_viewport_swizzle;
161 }
162
163 /// Returns true if the device supports VK_EXT_scalar_block_layout.
164 bool IsKhrUniformBufferStandardLayoutSupported() const {
165 return khr_uniform_buffer_standard_layout;
166 }
167
168 /// Returns true if the device supports VK_EXT_index_type_uint8.
169 bool IsExtIndexTypeUint8Supported() const {
170 return ext_index_type_uint8;
171 }
172
173 /// Returns true if the device supports VK_EXT_sampler_filter_minmax.
174 bool IsExtSamplerFilterMinmaxSupported() const {
175 return ext_sampler_filter_minmax;
176 }
177
178 /// Returns true if the device supports VK_EXT_depth_range_unrestricted.
179 bool IsExtDepthRangeUnrestrictedSupported() const {
180 return ext_depth_range_unrestricted;
181 }
182
183 /// Returns true if the device supports VK_EXT_shader_viewport_index_layer.
184 bool IsExtShaderViewportIndexLayerSupported() const {
185 return ext_shader_viewport_index_layer;
186 }
187
188 /// Returns true if the device supports VK_EXT_transform_feedback.
189 bool IsExtTransformFeedbackSupported() const {
190 return ext_transform_feedback;
191 }
192
193 /// Returns true if the device supports VK_EXT_custom_border_color.
194 bool IsExtCustomBorderColorSupported() const {
195 return ext_custom_border_color;
196 }
197
198 /// Returns true if the device supports VK_EXT_extended_dynamic_state.
199 bool IsExtExtendedDynamicStateSupported() const {
200 return ext_extended_dynamic_state;
201 }
202
203 /// Returns true if the device supports VK_EXT_shader_stencil_export.
204 bool IsExtShaderStencilExportSupported() const {
205 return ext_shader_stencil_export;
206 }
207
208 /// Returns true when a known debugging tool is attached.
209 bool HasDebuggingToolAttached() const {
210 return has_renderdoc || has_nsight_graphics;
211 }
212
213 /// Returns the vendor name reported from Vulkan.
214 std::string_view GetVendorName() const {
215 return vendor_name;
216 }
217
218 /// Returns the list of available extensions.
219 const std::vector<std::string>& GetAvailableExtensions() const {
220 return reported_extensions;
221 }
222
223 /// Returns true if the setting for async shader compilation is enabled.
224 bool UseAsynchronousShaders() const {
225 return use_asynchronous_shaders;
226 }
227
228private:
229 /// Checks if the physical device is suitable.
230 void CheckSuitability() const;
231
232 /// Loads extensions into a vector and stores available ones in this object.
233 std::vector<const char*> LoadExtensions();
234
235 /// Sets up queue families.
236 void SetupFamilies(VkSurfaceKHR surface);
237
238 /// Sets up device features.
239 void SetupFeatures();
240
241 /// Collects telemetry information from the device.
242 void CollectTelemetryParameters();
243
244 /// Collects information about attached tools.
245 void CollectToolingInfo();
246
247 /// Returns a list of queue initialization descriptors.
248 std::vector<VkDeviceQueueCreateInfo> GetDeviceQueueCreateInfos() const;
249
250 /// Returns true if ASTC textures are natively supported.
251 bool IsOptimalAstcSupported(const VkPhysicalDeviceFeatures& features) const;
252
253 /// Returns true if the device natively supports blitting depth stencil images.
254 bool TestDepthStencilBlits() const;
255
256 /// Returns true if a format is supported.
257 bool IsFormatSupported(VkFormat wanted_format, VkFormatFeatureFlags wanted_usage,
258 FormatType format_type) const;
259
260 VkInstance instance; ///< Vulkan instance.
261 vk::DeviceDispatch dld; ///< Device function pointers.
262 vk::PhysicalDevice physical; ///< Physical device.
263 VkPhysicalDeviceProperties properties; ///< Device properties.
264 vk::Device logical; ///< Logical device.
265 vk::Queue graphics_queue; ///< Main graphics queue.
266 vk::Queue present_queue; ///< Main present queue.
267 u32 instance_version{}; ///< Vulkan onstance version.
268 u32 graphics_family{}; ///< Main graphics queue family index.
269 u32 present_family{}; ///< Main present queue family index.
270 VkDriverIdKHR driver_id{}; ///< Driver ID.
271 VkShaderStageFlags guest_warp_stages{}; ///< Stages where the guest warp size can be forced.ed
272 bool is_optimal_astc_supported{}; ///< Support for native ASTC.
273 bool is_float16_supported{}; ///< Support for float16 arithmetics.
274 bool is_warp_potentially_bigger{}; ///< Host warp size can be bigger than guest.
275 bool is_formatless_image_load_supported{}; ///< Support for shader image read without format.
276 bool is_blit_depth_stencil_supported{}; ///< Support for blitting from and to depth stencil.
277 bool nv_viewport_swizzle{}; ///< Support for VK_NV_viewport_swizzle.
278 bool khr_uniform_buffer_standard_layout{}; ///< Support for std430 on UBOs.
279 bool ext_index_type_uint8{}; ///< Support for VK_EXT_index_type_uint8.
280 bool ext_sampler_filter_minmax{}; ///< Support for VK_EXT_sampler_filter_minmax.
281 bool ext_depth_range_unrestricted{}; ///< Support for VK_EXT_depth_range_unrestricted.
282 bool ext_shader_viewport_index_layer{}; ///< Support for VK_EXT_shader_viewport_index_layer.
283 bool ext_tooling_info{}; ///< Support for VK_EXT_tooling_info.
284 bool ext_transform_feedback{}; ///< Support for VK_EXT_transform_feedback.
285 bool ext_custom_border_color{}; ///< Support for VK_EXT_custom_border_color.
286 bool ext_extended_dynamic_state{}; ///< Support for VK_EXT_extended_dynamic_state.
287 bool ext_robustness2{}; ///< Support for VK_EXT_robustness2.
288 bool ext_shader_stencil_export{}; ///< Support for VK_EXT_shader_stencil_export.
289 bool nv_device_diagnostics_config{}; ///< Support for VK_NV_device_diagnostics_config.
290 bool has_renderdoc{}; ///< Has RenderDoc attached
291 bool has_nsight_graphics{}; ///< Has Nsight Graphics attached
292
293 // Asynchronous Graphics Pipeline setting
294 bool use_asynchronous_shaders{}; ///< Setting to use asynchronous shaders/graphics pipeline
295
296 // Telemetry parameters
297 std::string vendor_name; ///< Device's driver name.
298 std::vector<std::string> reported_extensions; ///< Reported Vulkan extensions.
299
300 /// Format properties dictionary.
301 std::unordered_map<VkFormat, VkFormatProperties> format_properties;
302
303 /// Nsight Aftermath GPU crash tracker
304 std::unique_ptr<NsightAftermathTracker> nsight_aftermath_tracker;
305};
306
307} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_instance.cpp b/src/video_core/vulkan_common/vulkan_instance.cpp
new file mode 100644
index 000000000..889ecda0c
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_instance.cpp
@@ -0,0 +1,151 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include <algorithm>
6#include <optional>
7#include <span>
8#include <utility>
9#include <vector>
10
11#include "common/common_types.h"
12#include "common/dynamic_library.h"
13#include "common/logging/log.h"
14#include "core/frontend/emu_window.h"
15#include "video_core/vulkan_common/vulkan_instance.h"
16#include "video_core/vulkan_common/vulkan_wrapper.h"
17
18// Include these late to avoid polluting previous headers
19#ifdef _WIN32
20#include <windows.h>
21// ensure include order
22#include <vulkan/vulkan_win32.h>
23#endif
24
25#if !defined(_WIN32) && !defined(__APPLE__)
26#include <X11/Xlib.h>
27#include <vulkan/vulkan_wayland.h>
28#include <vulkan/vulkan_xlib.h>
29#endif
30
31namespace Vulkan {
32namespace {
33[[nodiscard]] std::vector<const char*> RequiredExtensions(
34 Core::Frontend::WindowSystemType window_type, bool enable_debug_utils) {
35 std::vector<const char*> extensions;
36 extensions.reserve(6);
37 switch (window_type) {
38 case Core::Frontend::WindowSystemType::Headless:
39 break;
40#ifdef _WIN32
41 case Core::Frontend::WindowSystemType::Windows:
42 extensions.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
43 break;
44#endif
45#if !defined(_WIN32) && !defined(__APPLE__)
46 case Core::Frontend::WindowSystemType::X11:
47 extensions.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
48 break;
49 case Core::Frontend::WindowSystemType::Wayland:
50 extensions.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
51 break;
52#endif
53 default:
54 LOG_ERROR(Render_Vulkan, "Presentation not supported on this platform");
55 break;
56 }
57 if (window_type != Core::Frontend::WindowSystemType::Headless) {
58 extensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
59 }
60 if (enable_debug_utils) {
61 extensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
62 }
63 extensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
64 return extensions;
65}
66
67[[nodiscard]] bool AreExtensionsSupported(const vk::InstanceDispatch& dld,
68 std::span<const char* const> extensions) {
69 const std::optional properties = vk::EnumerateInstanceExtensionProperties(dld);
70 if (!properties) {
71 LOG_ERROR(Render_Vulkan, "Failed to query extension properties");
72 return false;
73 }
74 for (const char* extension : extensions) {
75 const auto it = std::ranges::find_if(*properties, [extension](const auto& prop) {
76 return std::strcmp(extension, prop.extensionName) == 0;
77 });
78 if (it == properties->end()) {
79 LOG_ERROR(Render_Vulkan, "Required instance extension {} is not available", extension);
80 return false;
81 }
82 }
83 return true;
84}
85
86[[nodiscard]] std::vector<const char*> Layers(bool enable_layers) {
87 std::vector<const char*> layers;
88 if (enable_layers) {
89 layers.push_back("VK_LAYER_KHRONOS_validation");
90 }
91 return layers;
92}
93
94void RemoveUnavailableLayers(const vk::InstanceDispatch& dld, std::vector<const char*>& layers) {
95 const std::optional layer_properties = vk::EnumerateInstanceLayerProperties(dld);
96 if (!layer_properties) {
97 LOG_ERROR(Render_Vulkan, "Failed to query layer properties, disabling layers");
98 layers.clear();
99 }
100 std::erase_if(layers, [&layer_properties](const char* layer) {
101 const auto comp = [layer](const VkLayerProperties& layer_property) {
102 return std::strcmp(layer, layer_property.layerName) == 0;
103 };
104 const auto it = std::ranges::find_if(*layer_properties, comp);
105 if (it == layer_properties->end()) {
106 LOG_ERROR(Render_Vulkan, "Layer {} not available, removing it", layer);
107 return true;
108 }
109 return false;
110 });
111}
112} // Anonymous namespace
113
114vk::Instance CreateInstance(const Common::DynamicLibrary& library, vk::InstanceDispatch& dld,
115 u32 required_version, Core::Frontend::WindowSystemType window_type,
116 bool enable_debug_utils, bool enable_layers) {
117 if (!library.IsOpen()) {
118 LOG_ERROR(Render_Vulkan, "Vulkan library not available");
119 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
120 }
121 if (!library.GetSymbol("vkGetInstanceProcAddr", &dld.vkGetInstanceProcAddr)) {
122 LOG_ERROR(Render_Vulkan, "vkGetInstanceProcAddr not present in Vulkan");
123 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
124 }
125 if (!vk::Load(dld)) {
126 LOG_ERROR(Render_Vulkan, "Failed to load Vulkan function pointers");
127 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
128 }
129 const std::vector<const char*> extensions = RequiredExtensions(window_type, enable_debug_utils);
130 if (!AreExtensionsSupported(dld, extensions)) {
131 throw vk::Exception(VK_ERROR_EXTENSION_NOT_PRESENT);
132 }
133 std::vector<const char*> layers = Layers(enable_layers);
134 RemoveUnavailableLayers(dld, layers);
135
136 const u32 available_version = vk::AvailableVersion(dld);
137 if (available_version < required_version) {
138 LOG_ERROR(Render_Vulkan, "Vulkan {}.{} is not supported, {}.{} is required",
139 VK_VERSION_MAJOR(available_version), VK_VERSION_MINOR(available_version),
140 VK_VERSION_MAJOR(required_version), VK_VERSION_MINOR(required_version));
141 throw vk::Exception(VK_ERROR_INCOMPATIBLE_DRIVER);
142 }
143 vk::Instance instance = vk::Instance::Create(required_version, layers, extensions, dld);
144 if (!vk::Load(*instance, dld)) {
145 LOG_ERROR(Render_Vulkan, "Failed to load Vulkan instance function pointers");
146 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
147 }
148 return instance;
149}
150
151} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_instance.h b/src/video_core/vulkan_common/vulkan_instance.h
new file mode 100644
index 000000000..e5e3a7144
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_instance.h
@@ -0,0 +1,32 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#pragma once
6
7#include "common/common_types.h"
8#include "common/dynamic_library.h"
9#include "core/frontend/emu_window.h"
10#include "video_core/vulkan_common/vulkan_wrapper.h"
11
12namespace Vulkan {
13
14/**
15 * Create a Vulkan instance
16 *
17 * @param library Dynamic library to load the Vulkan instance from
18 * @param dld Dispatch table to load function pointers into
19 * @param required_version Required Vulkan version (for example, VK_API_VERSION_1_1)
20 * @param window_type Window system type's enabled extension
21 * @param enable_debug_utils Whether to enable VK_EXT_debug_utils_extension_name or not
22 * @param enable_layers Whether to enable Vulkan validation layers or not
23 *
24 * @return A new Vulkan instance
25 * @throw vk::Exception on failure
26 */
27[[nodiscard]] vk::Instance CreateInstance(
28 const Common::DynamicLibrary& library, vk::InstanceDispatch& dld, u32 required_version,
29 Core::Frontend::WindowSystemType window_type = Core::Frontend::WindowSystemType::Headless,
30 bool enable_debug_utils = false, bool enable_layers = false);
31
32} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_library.cpp b/src/video_core/vulkan_common/vulkan_library.cpp
new file mode 100644
index 000000000..557871d81
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_library.cpp
@@ -0,0 +1,36 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include <cstdlib>
6#include <string>
7
8#include "common/dynamic_library.h"
9#include "common/file_util.h"
10#include "video_core/vulkan_common/vulkan_library.h"
11
12namespace Vulkan {
13
14Common::DynamicLibrary OpenLibrary() {
15 Common::DynamicLibrary library;
16#ifdef __APPLE__
17 // Check if a path to a specific Vulkan library has been specified.
18 char* const libvulkan_env = std::getenv("LIBVULKAN_PATH");
19 if (!libvulkan_env || !library.Open(libvulkan_env)) {
20 // Use the libvulkan.dylib from the application bundle.
21 const std::string filename =
22 Common::FS::GetBundleDirectory() + "/Contents/Frameworks/libvulkan.dylib";
23 void(library.Open(filename.c_str()));
24 }
25#else
26 std::string filename = Common::DynamicLibrary::GetVersionedFilename("vulkan", 1);
27 if (!library.Open(filename.c_str())) {
28 // Android devices may not have libvulkan.so.1, only libvulkan.so.
29 filename = Common::DynamicLibrary::GetVersionedFilename("vulkan");
30 void(library.Open(filename.c_str()));
31 }
32#endif
33 return library;
34}
35
36} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_library.h b/src/video_core/vulkan_common/vulkan_library.h
new file mode 100644
index 000000000..8b28b0e17
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_library.h
@@ -0,0 +1,13 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#pragma once
6
7#include "common/dynamic_library.h"
8
9namespace Vulkan {
10
11Common::DynamicLibrary OpenLibrary();
12
13} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_surface.cpp b/src/video_core/vulkan_common/vulkan_surface.cpp
new file mode 100644
index 000000000..3c3238f96
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_surface.cpp
@@ -0,0 +1,81 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include "common/logging/log.h"
6#include "core/frontend/emu_window.h"
7#include "video_core/vulkan_common/vulkan_surface.h"
8#include "video_core/vulkan_common/vulkan_wrapper.h"
9
10// Include these late to avoid polluting previous headers
11#ifdef _WIN32
12#include <windows.h>
13// ensure include order
14#include <vulkan/vulkan_win32.h>
15#endif
16
17#if !defined(_WIN32) && !defined(__APPLE__)
18#include <X11/Xlib.h>
19#include <vulkan/vulkan_wayland.h>
20#include <vulkan/vulkan_xlib.h>
21#endif
22
23namespace Vulkan {
24
25vk::SurfaceKHR CreateSurface(const vk::Instance& instance,
26 const Core::Frontend::EmuWindow& emu_window) {
27 [[maybe_unused]] const vk::InstanceDispatch& dld = instance.Dispatch();
28 [[maybe_unused]] const auto& window_info = emu_window.GetWindowInfo();
29 VkSurfaceKHR unsafe_surface = nullptr;
30
31#ifdef _WIN32
32 if (window_info.type == Core::Frontend::WindowSystemType::Windows) {
33 const HWND hWnd = static_cast<HWND>(window_info.render_surface);
34 const VkWin32SurfaceCreateInfoKHR win32_ci{VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
35 nullptr, 0, nullptr, hWnd};
36 const auto vkCreateWin32SurfaceKHR = reinterpret_cast<PFN_vkCreateWin32SurfaceKHR>(
37 dld.vkGetInstanceProcAddr(*instance, "vkCreateWin32SurfaceKHR"));
38 if (!vkCreateWin32SurfaceKHR ||
39 vkCreateWin32SurfaceKHR(*instance, &win32_ci, nullptr, &unsafe_surface) != VK_SUCCESS) {
40 LOG_ERROR(Render_Vulkan, "Failed to initialize Win32 surface");
41 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
42 }
43 }
44#endif
45#if !defined(_WIN32) && !defined(__APPLE__)
46 if (window_info.type == Core::Frontend::WindowSystemType::X11) {
47 const VkXlibSurfaceCreateInfoKHR xlib_ci{
48 VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, nullptr, 0,
49 static_cast<Display*>(window_info.display_connection),
50 reinterpret_cast<Window>(window_info.render_surface)};
51 const auto vkCreateXlibSurfaceKHR = reinterpret_cast<PFN_vkCreateXlibSurfaceKHR>(
52 dld.vkGetInstanceProcAddr(*instance, "vkCreateXlibSurfaceKHR"));
53 if (!vkCreateXlibSurfaceKHR ||
54 vkCreateXlibSurfaceKHR(*instance, &xlib_ci, nullptr, &unsafe_surface) != VK_SUCCESS) {
55 LOG_ERROR(Render_Vulkan, "Failed to initialize Xlib surface");
56 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
57 }
58 }
59 if (window_info.type == Core::Frontend::WindowSystemType::Wayland) {
60 const VkWaylandSurfaceCreateInfoKHR wayland_ci{
61 VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, nullptr, 0,
62 static_cast<wl_display*>(window_info.display_connection),
63 static_cast<wl_surface*>(window_info.render_surface)};
64 const auto vkCreateWaylandSurfaceKHR = reinterpret_cast<PFN_vkCreateWaylandSurfaceKHR>(
65 dld.vkGetInstanceProcAddr(*instance, "vkCreateWaylandSurfaceKHR"));
66 if (!vkCreateWaylandSurfaceKHR ||
67 vkCreateWaylandSurfaceKHR(*instance, &wayland_ci, nullptr, &unsafe_surface) !=
68 VK_SUCCESS) {
69 LOG_ERROR(Render_Vulkan, "Failed to initialize Wayland surface");
70 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
71 }
72 }
73#endif
74 if (!unsafe_surface) {
75 LOG_ERROR(Render_Vulkan, "Presentation not supported on this platform");
76 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
77 }
78 return vk::SurfaceKHR(unsafe_surface, *instance, dld);
79}
80
81} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_surface.h b/src/video_core/vulkan_common/vulkan_surface.h
new file mode 100644
index 000000000..05a169e32
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_surface.h
@@ -0,0 +1,18 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#pragma once
6
7#include "video_core/vulkan_common/vulkan_wrapper.h"
8
9namespace Core::Frontend {
10class EmuWindow;
11}
12
13namespace Vulkan {
14
15[[nodiscard]] vk::SurfaceKHR CreateSurface(const vk::Instance& instance,
16 const Core::Frontend::EmuWindow& emu_window);
17
18} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_wrapper.cpp b/src/video_core/vulkan_common/vulkan_wrapper.cpp
new file mode 100644
index 000000000..5e15ad607
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_wrapper.cpp
@@ -0,0 +1,900 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include <algorithm>
6#include <exception>
7#include <memory>
8#include <optional>
9#include <string_view>
10#include <utility>
11#include <vector>
12
13#include "common/common_types.h"
14#include "common/logging/log.h"
15
16#include "video_core/vulkan_common/vulkan_wrapper.h"
17
18namespace Vulkan::vk {
19
20namespace {
21
22template <typename Func>
23void SortPhysicalDevices(std::vector<VkPhysicalDevice>& devices, const InstanceDispatch& dld,
24 Func&& func) {
25 // Calling GetProperties calls Vulkan more than needed. But they are supposed to be cheap
26 // functions.
27 std::stable_sort(devices.begin(), devices.end(),
28 [&dld, &func](VkPhysicalDevice lhs, VkPhysicalDevice rhs) {
29 return func(vk::PhysicalDevice(lhs, dld).GetProperties(),
30 vk::PhysicalDevice(rhs, dld).GetProperties());
31 });
32}
33
34void SortPhysicalDevicesPerVendor(std::vector<VkPhysicalDevice>& devices,
35 const InstanceDispatch& dld,
36 std::initializer_list<u32> vendor_ids) {
37 for (auto it = vendor_ids.end(); it != vendor_ids.begin();) {
38 --it;
39 SortPhysicalDevices(devices, dld, [id = *it](const auto& lhs, const auto& rhs) {
40 return lhs.vendorID == id && rhs.vendorID != id;
41 });
42 }
43}
44
45void SortPhysicalDevices(std::vector<VkPhysicalDevice>& devices, const InstanceDispatch& dld) {
46 // Sort by name, this will set a base and make GPUs with higher numbers appear first
47 // (e.g. GTX 1650 will intentionally be listed before a GTX 1080).
48 SortPhysicalDevices(devices, dld, [](const auto& lhs, const auto& rhs) {
49 return std::string_view{lhs.deviceName} > std::string_view{rhs.deviceName};
50 });
51 // Prefer discrete over non-discrete
52 SortPhysicalDevices(devices, dld, [](const auto& lhs, const auto& rhs) {
53 return lhs.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU &&
54 rhs.deviceType != VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU;
55 });
56 // Prefer Nvidia over AMD, AMD over Intel, Intel over the rest.
57 SortPhysicalDevicesPerVendor(devices, dld, {0x10DE, 0x1002, 0x8086});
58}
59
60template <typename T>
61bool Proc(T& result, const InstanceDispatch& dld, const char* proc_name,
62 VkInstance instance = nullptr) noexcept {
63 result = reinterpret_cast<T>(dld.vkGetInstanceProcAddr(instance, proc_name));
64 return result != nullptr;
65}
66
67template <typename T>
68void Proc(T& result, const DeviceDispatch& dld, const char* proc_name, VkDevice device) noexcept {
69 result = reinterpret_cast<T>(dld.vkGetDeviceProcAddr(device, proc_name));
70}
71
72void Load(VkDevice device, DeviceDispatch& dld) noexcept {
73#define X(name) Proc(dld.name, dld, #name, device)
74 X(vkAcquireNextImageKHR);
75 X(vkAllocateCommandBuffers);
76 X(vkAllocateDescriptorSets);
77 X(vkAllocateMemory);
78 X(vkBeginCommandBuffer);
79 X(vkBindBufferMemory);
80 X(vkBindImageMemory);
81 X(vkCmdBeginQuery);
82 X(vkCmdBeginRenderPass);
83 X(vkCmdBeginTransformFeedbackEXT);
84 X(vkCmdBeginDebugUtilsLabelEXT);
85 X(vkCmdBindDescriptorSets);
86 X(vkCmdBindIndexBuffer);
87 X(vkCmdBindPipeline);
88 X(vkCmdBindTransformFeedbackBuffersEXT);
89 X(vkCmdBindVertexBuffers);
90 X(vkCmdBlitImage);
91 X(vkCmdClearAttachments);
92 X(vkCmdCopyBuffer);
93 X(vkCmdCopyBufferToImage);
94 X(vkCmdCopyImage);
95 X(vkCmdCopyImageToBuffer);
96 X(vkCmdDispatch);
97 X(vkCmdDraw);
98 X(vkCmdDrawIndexed);
99 X(vkCmdEndQuery);
100 X(vkCmdEndRenderPass);
101 X(vkCmdEndTransformFeedbackEXT);
102 X(vkCmdEndDebugUtilsLabelEXT);
103 X(vkCmdFillBuffer);
104 X(vkCmdPipelineBarrier);
105 X(vkCmdPushConstants);
106 X(vkCmdSetBlendConstants);
107 X(vkCmdSetDepthBias);
108 X(vkCmdSetDepthBounds);
109 X(vkCmdSetEvent);
110 X(vkCmdSetScissor);
111 X(vkCmdSetStencilCompareMask);
112 X(vkCmdSetStencilReference);
113 X(vkCmdSetStencilWriteMask);
114 X(vkCmdSetViewport);
115 X(vkCmdWaitEvents);
116 X(vkCmdBindVertexBuffers2EXT);
117 X(vkCmdSetCullModeEXT);
118 X(vkCmdSetDepthBoundsTestEnableEXT);
119 X(vkCmdSetDepthCompareOpEXT);
120 X(vkCmdSetDepthTestEnableEXT);
121 X(vkCmdSetDepthWriteEnableEXT);
122 X(vkCmdSetFrontFaceEXT);
123 X(vkCmdSetPrimitiveTopologyEXT);
124 X(vkCmdSetStencilOpEXT);
125 X(vkCmdSetStencilTestEnableEXT);
126 X(vkCmdResolveImage);
127 X(vkCreateBuffer);
128 X(vkCreateBufferView);
129 X(vkCreateCommandPool);
130 X(vkCreateComputePipelines);
131 X(vkCreateDescriptorPool);
132 X(vkCreateDescriptorSetLayout);
133 X(vkCreateDescriptorUpdateTemplateKHR);
134 X(vkCreateEvent);
135 X(vkCreateFence);
136 X(vkCreateFramebuffer);
137 X(vkCreateGraphicsPipelines);
138 X(vkCreateImage);
139 X(vkCreateImageView);
140 X(vkCreatePipelineLayout);
141 X(vkCreateQueryPool);
142 X(vkCreateRenderPass);
143 X(vkCreateSampler);
144 X(vkCreateSemaphore);
145 X(vkCreateShaderModule);
146 X(vkCreateSwapchainKHR);
147 X(vkDestroyBuffer);
148 X(vkDestroyBufferView);
149 X(vkDestroyCommandPool);
150 X(vkDestroyDescriptorPool);
151 X(vkDestroyDescriptorSetLayout);
152 X(vkDestroyDescriptorUpdateTemplateKHR);
153 X(vkDestroyEvent);
154 X(vkDestroyFence);
155 X(vkDestroyFramebuffer);
156 X(vkDestroyImage);
157 X(vkDestroyImageView);
158 X(vkDestroyPipeline);
159 X(vkDestroyPipelineLayout);
160 X(vkDestroyQueryPool);
161 X(vkDestroyRenderPass);
162 X(vkDestroySampler);
163 X(vkDestroySemaphore);
164 X(vkDestroyShaderModule);
165 X(vkDestroySwapchainKHR);
166 X(vkDeviceWaitIdle);
167 X(vkEndCommandBuffer);
168 X(vkFreeCommandBuffers);
169 X(vkFreeDescriptorSets);
170 X(vkFreeMemory);
171 X(vkGetBufferMemoryRequirements);
172 X(vkGetDeviceQueue);
173 X(vkGetEventStatus);
174 X(vkGetFenceStatus);
175 X(vkGetImageMemoryRequirements);
176 X(vkGetQueryPoolResults);
177 X(vkGetSemaphoreCounterValueKHR);
178 X(vkMapMemory);
179 X(vkQueueSubmit);
180 X(vkResetFences);
181 X(vkResetQueryPoolEXT);
182 X(vkSetDebugUtilsObjectNameEXT);
183 X(vkSetDebugUtilsObjectTagEXT);
184 X(vkUnmapMemory);
185 X(vkUpdateDescriptorSetWithTemplateKHR);
186 X(vkUpdateDescriptorSets);
187 X(vkWaitForFences);
188 X(vkWaitSemaphoresKHR);
189#undef X
190}
191
192template <typename T>
193void SetObjectName(const DeviceDispatch* dld, VkDevice device, T handle, VkObjectType type,
194 const char* name) {
195 const VkDebugUtilsObjectNameInfoEXT name_info{
196 .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT,
197 .pNext = nullptr,
198 .objectType = VK_OBJECT_TYPE_IMAGE,
199 .objectHandle = reinterpret_cast<u64>(handle),
200 .pObjectName = name,
201 };
202 Check(dld->vkSetDebugUtilsObjectNameEXT(device, &name_info));
203}
204
205} // Anonymous namespace
206
207bool Load(InstanceDispatch& dld) noexcept {
208#define X(name) Proc(dld.name, dld, #name)
209 return X(vkCreateInstance) && X(vkEnumerateInstanceExtensionProperties) &&
210 X(vkEnumerateInstanceLayerProperties);
211#undef X
212}
213
214bool Load(VkInstance instance, InstanceDispatch& dld) noexcept {
215#define X(name) Proc(dld.name, dld, #name, instance)
216 // These functions may fail to load depending on the enabled extensions.
217 // Don't return a failure on these.
218 X(vkCreateDebugUtilsMessengerEXT);
219 X(vkDestroyDebugUtilsMessengerEXT);
220 X(vkDestroySurfaceKHR);
221 X(vkGetPhysicalDeviceFeatures2KHR);
222 X(vkGetPhysicalDeviceProperties2KHR);
223 X(vkGetPhysicalDeviceSurfaceCapabilitiesKHR);
224 X(vkGetPhysicalDeviceSurfaceFormatsKHR);
225 X(vkGetPhysicalDeviceSurfacePresentModesKHR);
226 X(vkGetPhysicalDeviceSurfaceSupportKHR);
227 X(vkGetSwapchainImagesKHR);
228 X(vkQueuePresentKHR);
229
230 return X(vkCreateDevice) && X(vkDestroyDevice) && X(vkDestroyDevice) &&
231 X(vkEnumerateDeviceExtensionProperties) && X(vkEnumeratePhysicalDevices) &&
232 X(vkGetDeviceProcAddr) && X(vkGetPhysicalDeviceFormatProperties) &&
233 X(vkGetPhysicalDeviceMemoryProperties) && X(vkGetPhysicalDeviceProperties) &&
234 X(vkGetPhysicalDeviceQueueFamilyProperties);
235#undef X
236}
237
238const char* Exception::what() const noexcept {
239 return ToString(result);
240}
241
242const char* ToString(VkResult result) noexcept {
243 switch (result) {
244 case VkResult::VK_SUCCESS:
245 return "VK_SUCCESS";
246 case VkResult::VK_NOT_READY:
247 return "VK_NOT_READY";
248 case VkResult::VK_TIMEOUT:
249 return "VK_TIMEOUT";
250 case VkResult::VK_EVENT_SET:
251 return "VK_EVENT_SET";
252 case VkResult::VK_EVENT_RESET:
253 return "VK_EVENT_RESET";
254 case VkResult::VK_INCOMPLETE:
255 return "VK_INCOMPLETE";
256 case VkResult::VK_ERROR_OUT_OF_HOST_MEMORY:
257 return "VK_ERROR_OUT_OF_HOST_MEMORY";
258 case VkResult::VK_ERROR_OUT_OF_DEVICE_MEMORY:
259 return "VK_ERROR_OUT_OF_DEVICE_MEMORY";
260 case VkResult::VK_ERROR_INITIALIZATION_FAILED:
261 return "VK_ERROR_INITIALIZATION_FAILED";
262 case VkResult::VK_ERROR_DEVICE_LOST:
263 return "VK_ERROR_DEVICE_LOST";
264 case VkResult::VK_ERROR_MEMORY_MAP_FAILED:
265 return "VK_ERROR_MEMORY_MAP_FAILED";
266 case VkResult::VK_ERROR_LAYER_NOT_PRESENT:
267 return "VK_ERROR_LAYER_NOT_PRESENT";
268 case VkResult::VK_ERROR_EXTENSION_NOT_PRESENT:
269 return "VK_ERROR_EXTENSION_NOT_PRESENT";
270 case VkResult::VK_ERROR_FEATURE_NOT_PRESENT:
271 return "VK_ERROR_FEATURE_NOT_PRESENT";
272 case VkResult::VK_ERROR_INCOMPATIBLE_DRIVER:
273 return "VK_ERROR_INCOMPATIBLE_DRIVER";
274 case VkResult::VK_ERROR_TOO_MANY_OBJECTS:
275 return "VK_ERROR_TOO_MANY_OBJECTS";
276 case VkResult::VK_ERROR_FORMAT_NOT_SUPPORTED:
277 return "VK_ERROR_FORMAT_NOT_SUPPORTED";
278 case VkResult::VK_ERROR_FRAGMENTED_POOL:
279 return "VK_ERROR_FRAGMENTED_POOL";
280 case VkResult::VK_ERROR_OUT_OF_POOL_MEMORY:
281 return "VK_ERROR_OUT_OF_POOL_MEMORY";
282 case VkResult::VK_ERROR_INVALID_EXTERNAL_HANDLE:
283 return "VK_ERROR_INVALID_EXTERNAL_HANDLE";
284 case VkResult::VK_ERROR_SURFACE_LOST_KHR:
285 return "VK_ERROR_SURFACE_LOST_KHR";
286 case VkResult::VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
287 return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR";
288 case VkResult::VK_SUBOPTIMAL_KHR:
289 return "VK_SUBOPTIMAL_KHR";
290 case VkResult::VK_ERROR_OUT_OF_DATE_KHR:
291 return "VK_ERROR_OUT_OF_DATE_KHR";
292 case VkResult::VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
293 return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR";
294 case VkResult::VK_ERROR_VALIDATION_FAILED_EXT:
295 return "VK_ERROR_VALIDATION_FAILED_EXT";
296 case VkResult::VK_ERROR_INVALID_SHADER_NV:
297 return "VK_ERROR_INVALID_SHADER_NV";
298 case VkResult::VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT:
299 return "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT";
300 case VkResult::VK_ERROR_FRAGMENTATION_EXT:
301 return "VK_ERROR_FRAGMENTATION_EXT";
302 case VkResult::VK_ERROR_NOT_PERMITTED_EXT:
303 return "VK_ERROR_NOT_PERMITTED_EXT";
304 case VkResult::VK_ERROR_INVALID_DEVICE_ADDRESS_EXT:
305 return "VK_ERROR_INVALID_DEVICE_ADDRESS_EXT";
306 case VkResult::VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT:
307 return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT";
308 case VkResult::VK_ERROR_UNKNOWN:
309 return "VK_ERROR_UNKNOWN";
310 case VkResult::VK_ERROR_INCOMPATIBLE_VERSION_KHR:
311 return "VK_ERROR_INCOMPATIBLE_VERSION_KHR";
312 case VkResult::VK_THREAD_IDLE_KHR:
313 return "VK_THREAD_IDLE_KHR";
314 case VkResult::VK_THREAD_DONE_KHR:
315 return "VK_THREAD_DONE_KHR";
316 case VkResult::VK_OPERATION_DEFERRED_KHR:
317 return "VK_OPERATION_DEFERRED_KHR";
318 case VkResult::VK_OPERATION_NOT_DEFERRED_KHR:
319 return "VK_OPERATION_NOT_DEFERRED_KHR";
320 case VkResult::VK_PIPELINE_COMPILE_REQUIRED_EXT:
321 return "VK_PIPELINE_COMPILE_REQUIRED_EXT";
322 case VkResult::VK_RESULT_MAX_ENUM:
323 return "VK_RESULT_MAX_ENUM";
324 }
325 return "Unknown";
326}
327
328void Destroy(VkInstance instance, const InstanceDispatch& dld) noexcept {
329 dld.vkDestroyInstance(instance, nullptr);
330}
331
332void Destroy(VkDevice device, const InstanceDispatch& dld) noexcept {
333 dld.vkDestroyDevice(device, nullptr);
334}
335
336void Destroy(VkDevice device, VkBuffer handle, const DeviceDispatch& dld) noexcept {
337 dld.vkDestroyBuffer(device, handle, nullptr);
338}
339
340void Destroy(VkDevice device, VkBufferView handle, const DeviceDispatch& dld) noexcept {
341 dld.vkDestroyBufferView(device, handle, nullptr);
342}
343
344void Destroy(VkDevice device, VkCommandPool handle, const DeviceDispatch& dld) noexcept {
345 dld.vkDestroyCommandPool(device, handle, nullptr);
346}
347
348void Destroy(VkDevice device, VkDescriptorPool handle, const DeviceDispatch& dld) noexcept {
349 dld.vkDestroyDescriptorPool(device, handle, nullptr);
350}
351
352void Destroy(VkDevice device, VkDescriptorSetLayout handle, const DeviceDispatch& dld) noexcept {
353 dld.vkDestroyDescriptorSetLayout(device, handle, nullptr);
354}
355
356void Destroy(VkDevice device, VkDescriptorUpdateTemplateKHR handle,
357 const DeviceDispatch& dld) noexcept {
358 dld.vkDestroyDescriptorUpdateTemplateKHR(device, handle, nullptr);
359}
360
361void Destroy(VkDevice device, VkDeviceMemory handle, const DeviceDispatch& dld) noexcept {
362 dld.vkFreeMemory(device, handle, nullptr);
363}
364
365void Destroy(VkDevice device, VkEvent handle, const DeviceDispatch& dld) noexcept {
366 dld.vkDestroyEvent(device, handle, nullptr);
367}
368
369void Destroy(VkDevice device, VkFence handle, const DeviceDispatch& dld) noexcept {
370 dld.vkDestroyFence(device, handle, nullptr);
371}
372
373void Destroy(VkDevice device, VkFramebuffer handle, const DeviceDispatch& dld) noexcept {
374 dld.vkDestroyFramebuffer(device, handle, nullptr);
375}
376
377void Destroy(VkDevice device, VkImage handle, const DeviceDispatch& dld) noexcept {
378 dld.vkDestroyImage(device, handle, nullptr);
379}
380
381void Destroy(VkDevice device, VkImageView handle, const DeviceDispatch& dld) noexcept {
382 dld.vkDestroyImageView(device, handle, nullptr);
383}
384
385void Destroy(VkDevice device, VkPipeline handle, const DeviceDispatch& dld) noexcept {
386 dld.vkDestroyPipeline(device, handle, nullptr);
387}
388
389void Destroy(VkDevice device, VkPipelineLayout handle, const DeviceDispatch& dld) noexcept {
390 dld.vkDestroyPipelineLayout(device, handle, nullptr);
391}
392
393void Destroy(VkDevice device, VkQueryPool handle, const DeviceDispatch& dld) noexcept {
394 dld.vkDestroyQueryPool(device, handle, nullptr);
395}
396
397void Destroy(VkDevice device, VkRenderPass handle, const DeviceDispatch& dld) noexcept {
398 dld.vkDestroyRenderPass(device, handle, nullptr);
399}
400
401void Destroy(VkDevice device, VkSampler handle, const DeviceDispatch& dld) noexcept {
402 dld.vkDestroySampler(device, handle, nullptr);
403}
404
405void Destroy(VkDevice device, VkSwapchainKHR handle, const DeviceDispatch& dld) noexcept {
406 dld.vkDestroySwapchainKHR(device, handle, nullptr);
407}
408
409void Destroy(VkDevice device, VkSemaphore handle, const DeviceDispatch& dld) noexcept {
410 dld.vkDestroySemaphore(device, handle, nullptr);
411}
412
413void Destroy(VkDevice device, VkShaderModule handle, const DeviceDispatch& dld) noexcept {
414 dld.vkDestroyShaderModule(device, handle, nullptr);
415}
416
417void Destroy(VkInstance instance, VkDebugUtilsMessengerEXT handle,
418 const InstanceDispatch& dld) noexcept {
419 dld.vkDestroyDebugUtilsMessengerEXT(instance, handle, nullptr);
420}
421
422void Destroy(VkInstance instance, VkSurfaceKHR handle, const InstanceDispatch& dld) noexcept {
423 dld.vkDestroySurfaceKHR(instance, handle, nullptr);
424}
425
426VkResult Free(VkDevice device, VkDescriptorPool handle, Span<VkDescriptorSet> sets,
427 const DeviceDispatch& dld) noexcept {
428 return dld.vkFreeDescriptorSets(device, handle, sets.size(), sets.data());
429}
430
431VkResult Free(VkDevice device, VkCommandPool handle, Span<VkCommandBuffer> buffers,
432 const DeviceDispatch& dld) noexcept {
433 dld.vkFreeCommandBuffers(device, handle, buffers.size(), buffers.data());
434 return VK_SUCCESS;
435}
436
437Instance Instance::Create(u32 version, Span<const char*> layers, Span<const char*> extensions,
438 InstanceDispatch& dispatch) {
439 const VkApplicationInfo application_info{
440 .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
441 .pNext = nullptr,
442 .pApplicationName = "yuzu Emulator",
443 .applicationVersion = VK_MAKE_VERSION(0, 1, 0),
444 .pEngineName = "yuzu Emulator",
445 .engineVersion = VK_MAKE_VERSION(0, 1, 0),
446 .apiVersion = version,
447 };
448 const VkInstanceCreateInfo ci{
449 .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
450 .pNext = nullptr,
451 .flags = 0,
452 .pApplicationInfo = &application_info,
453 .enabledLayerCount = layers.size(),
454 .ppEnabledLayerNames = layers.data(),
455 .enabledExtensionCount = extensions.size(),
456 .ppEnabledExtensionNames = extensions.data(),
457 };
458 VkInstance instance;
459 Check(dispatch.vkCreateInstance(&ci, nullptr, &instance));
460 if (!Proc(dispatch.vkDestroyInstance, dispatch, "vkDestroyInstance", instance)) {
461 // We successfully created an instance but the destroy function couldn't be loaded.
462 // This is a good moment to panic.
463 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
464 }
465 return Instance(instance, dispatch);
466}
467
468std::vector<VkPhysicalDevice> Instance::EnumeratePhysicalDevices() const {
469 u32 num;
470 Check(dld->vkEnumeratePhysicalDevices(handle, &num, nullptr));
471 std::vector<VkPhysicalDevice> physical_devices(num);
472 Check(dld->vkEnumeratePhysicalDevices(handle, &num, physical_devices.data()));
473 SortPhysicalDevices(physical_devices, *dld);
474 return physical_devices;
475}
476
477DebugUtilsMessenger Instance::CreateDebugUtilsMessenger(
478 const VkDebugUtilsMessengerCreateInfoEXT& create_info) const {
479 VkDebugUtilsMessengerEXT object;
480 Check(dld->vkCreateDebugUtilsMessengerEXT(handle, &create_info, nullptr, &object));
481 return DebugUtilsMessenger(object, handle, *dld);
482}
483
484void Buffer::BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const {
485 Check(dld->vkBindBufferMemory(owner, handle, memory, offset));
486}
487
488void Buffer::SetObjectNameEXT(const char* name) const {
489 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_BUFFER, name);
490}
491
492void BufferView::SetObjectNameEXT(const char* name) const {
493 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_BUFFER_VIEW, name);
494}
495
496void Image::BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const {
497 Check(dld->vkBindImageMemory(owner, handle, memory, offset));
498}
499
500void Image::SetObjectNameEXT(const char* name) const {
501 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_IMAGE, name);
502}
503
504void ImageView::SetObjectNameEXT(const char* name) const {
505 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_IMAGE_VIEW, name);
506}
507
508void DeviceMemory::SetObjectNameEXT(const char* name) const {
509 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_DEVICE_MEMORY, name);
510}
511
512void Fence::SetObjectNameEXT(const char* name) const {
513 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_FENCE, name);
514}
515
516void Framebuffer::SetObjectNameEXT(const char* name) const {
517 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_FRAMEBUFFER, name);
518}
519
520DescriptorSets DescriptorPool::Allocate(const VkDescriptorSetAllocateInfo& ai) const {
521 const std::size_t num = ai.descriptorSetCount;
522 std::unique_ptr sets = std::make_unique<VkDescriptorSet[]>(num);
523 switch (const VkResult result = dld->vkAllocateDescriptorSets(owner, &ai, sets.get())) {
524 case VK_SUCCESS:
525 return DescriptorSets(std::move(sets), num, owner, handle, *dld);
526 case VK_ERROR_OUT_OF_POOL_MEMORY:
527 return {};
528 default:
529 throw Exception(result);
530 }
531}
532
533void DescriptorPool::SetObjectNameEXT(const char* name) const {
534 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_DESCRIPTOR_POOL, name);
535}
536
537CommandBuffers CommandPool::Allocate(std::size_t num_buffers, VkCommandBufferLevel level) const {
538 const VkCommandBufferAllocateInfo ai{
539 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
540 .pNext = nullptr,
541 .commandPool = handle,
542 .level = level,
543 .commandBufferCount = static_cast<u32>(num_buffers),
544 };
545
546 std::unique_ptr buffers = std::make_unique<VkCommandBuffer[]>(num_buffers);
547 switch (const VkResult result = dld->vkAllocateCommandBuffers(owner, &ai, buffers.get())) {
548 case VK_SUCCESS:
549 return CommandBuffers(std::move(buffers), num_buffers, owner, handle, *dld);
550 case VK_ERROR_OUT_OF_POOL_MEMORY:
551 return {};
552 default:
553 throw Exception(result);
554 }
555}
556
557void CommandPool::SetObjectNameEXT(const char* name) const {
558 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_COMMAND_POOL, name);
559}
560
561std::vector<VkImage> SwapchainKHR::GetImages() const {
562 u32 num;
563 Check(dld->vkGetSwapchainImagesKHR(owner, handle, &num, nullptr));
564 std::vector<VkImage> images(num);
565 Check(dld->vkGetSwapchainImagesKHR(owner, handle, &num, images.data()));
566 return images;
567}
568
569void Event::SetObjectNameEXT(const char* name) const {
570 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_EVENT, name);
571}
572
573void ShaderModule::SetObjectNameEXT(const char* name) const {
574 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_SHADER_MODULE, name);
575}
576
577void Semaphore::SetObjectNameEXT(const char* name) const {
578 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_SEMAPHORE, name);
579}
580
581Device Device::Create(VkPhysicalDevice physical_device, Span<VkDeviceQueueCreateInfo> queues_ci,
582 Span<const char*> enabled_extensions, const void* next,
583 DeviceDispatch& dispatch) {
584 const VkDeviceCreateInfo ci{
585 .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
586 .pNext = next,
587 .flags = 0,
588 .queueCreateInfoCount = queues_ci.size(),
589 .pQueueCreateInfos = queues_ci.data(),
590 .enabledLayerCount = 0,
591 .ppEnabledLayerNames = nullptr,
592 .enabledExtensionCount = enabled_extensions.size(),
593 .ppEnabledExtensionNames = enabled_extensions.data(),
594 .pEnabledFeatures = nullptr,
595 };
596 VkDevice device;
597 Check(dispatch.vkCreateDevice(physical_device, &ci, nullptr, &device));
598 Load(device, dispatch);
599 return Device(device, dispatch);
600}
601
602Queue Device::GetQueue(u32 family_index) const noexcept {
603 VkQueue queue;
604 dld->vkGetDeviceQueue(handle, family_index, 0, &queue);
605 return Queue(queue, *dld);
606}
607
608Buffer Device::CreateBuffer(const VkBufferCreateInfo& ci) const {
609 VkBuffer object;
610 Check(dld->vkCreateBuffer(handle, &ci, nullptr, &object));
611 return Buffer(object, handle, *dld);
612}
613
614BufferView Device::CreateBufferView(const VkBufferViewCreateInfo& ci) const {
615 VkBufferView object;
616 Check(dld->vkCreateBufferView(handle, &ci, nullptr, &object));
617 return BufferView(object, handle, *dld);
618}
619
620Image Device::CreateImage(const VkImageCreateInfo& ci) const {
621 VkImage object;
622 Check(dld->vkCreateImage(handle, &ci, nullptr, &object));
623 return Image(object, handle, *dld);
624}
625
626ImageView Device::CreateImageView(const VkImageViewCreateInfo& ci) const {
627 VkImageView object;
628 Check(dld->vkCreateImageView(handle, &ci, nullptr, &object));
629 return ImageView(object, handle, *dld);
630}
631
632Semaphore Device::CreateSemaphore() const {
633 static constexpr VkSemaphoreCreateInfo ci{
634 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
635 .pNext = nullptr,
636 .flags = 0,
637 };
638 return CreateSemaphore(ci);
639}
640
641Semaphore Device::CreateSemaphore(const VkSemaphoreCreateInfo& ci) const {
642 VkSemaphore object;
643 Check(dld->vkCreateSemaphore(handle, &ci, nullptr, &object));
644 return Semaphore(object, handle, *dld);
645}
646
647Fence Device::CreateFence(const VkFenceCreateInfo& ci) const {
648 VkFence object;
649 Check(dld->vkCreateFence(handle, &ci, nullptr, &object));
650 return Fence(object, handle, *dld);
651}
652
653DescriptorPool Device::CreateDescriptorPool(const VkDescriptorPoolCreateInfo& ci) const {
654 VkDescriptorPool object;
655 Check(dld->vkCreateDescriptorPool(handle, &ci, nullptr, &object));
656 return DescriptorPool(object, handle, *dld);
657}
658
659RenderPass Device::CreateRenderPass(const VkRenderPassCreateInfo& ci) const {
660 VkRenderPass object;
661 Check(dld->vkCreateRenderPass(handle, &ci, nullptr, &object));
662 return RenderPass(object, handle, *dld);
663}
664
665DescriptorSetLayout Device::CreateDescriptorSetLayout(
666 const VkDescriptorSetLayoutCreateInfo& ci) const {
667 VkDescriptorSetLayout object;
668 Check(dld->vkCreateDescriptorSetLayout(handle, &ci, nullptr, &object));
669 return DescriptorSetLayout(object, handle, *dld);
670}
671
672PipelineLayout Device::CreatePipelineLayout(const VkPipelineLayoutCreateInfo& ci) const {
673 VkPipelineLayout object;
674 Check(dld->vkCreatePipelineLayout(handle, &ci, nullptr, &object));
675 return PipelineLayout(object, handle, *dld);
676}
677
678Pipeline Device::CreateGraphicsPipeline(const VkGraphicsPipelineCreateInfo& ci) const {
679 VkPipeline object;
680 Check(dld->vkCreateGraphicsPipelines(handle, nullptr, 1, &ci, nullptr, &object));
681 return Pipeline(object, handle, *dld);
682}
683
684Pipeline Device::CreateComputePipeline(const VkComputePipelineCreateInfo& ci) const {
685 VkPipeline object;
686 Check(dld->vkCreateComputePipelines(handle, nullptr, 1, &ci, nullptr, &object));
687 return Pipeline(object, handle, *dld);
688}
689
690Sampler Device::CreateSampler(const VkSamplerCreateInfo& ci) const {
691 VkSampler object;
692 Check(dld->vkCreateSampler(handle, &ci, nullptr, &object));
693 return Sampler(object, handle, *dld);
694}
695
696Framebuffer Device::CreateFramebuffer(const VkFramebufferCreateInfo& ci) const {
697 VkFramebuffer object;
698 Check(dld->vkCreateFramebuffer(handle, &ci, nullptr, &object));
699 return Framebuffer(object, handle, *dld);
700}
701
702CommandPool Device::CreateCommandPool(const VkCommandPoolCreateInfo& ci) const {
703 VkCommandPool object;
704 Check(dld->vkCreateCommandPool(handle, &ci, nullptr, &object));
705 return CommandPool(object, handle, *dld);
706}
707
708DescriptorUpdateTemplateKHR Device::CreateDescriptorUpdateTemplateKHR(
709 const VkDescriptorUpdateTemplateCreateInfoKHR& ci) const {
710 VkDescriptorUpdateTemplateKHR object;
711 Check(dld->vkCreateDescriptorUpdateTemplateKHR(handle, &ci, nullptr, &object));
712 return DescriptorUpdateTemplateKHR(object, handle, *dld);
713}
714
715QueryPool Device::CreateQueryPool(const VkQueryPoolCreateInfo& ci) const {
716 VkQueryPool object;
717 Check(dld->vkCreateQueryPool(handle, &ci, nullptr, &object));
718 return QueryPool(object, handle, *dld);
719}
720
721ShaderModule Device::CreateShaderModule(const VkShaderModuleCreateInfo& ci) const {
722 VkShaderModule object;
723 Check(dld->vkCreateShaderModule(handle, &ci, nullptr, &object));
724 return ShaderModule(object, handle, *dld);
725}
726
727Event Device::CreateEvent() const {
728 static constexpr VkEventCreateInfo ci{
729 .sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
730 .pNext = nullptr,
731 .flags = 0,
732 };
733
734 VkEvent object;
735 Check(dld->vkCreateEvent(handle, &ci, nullptr, &object));
736 return Event(object, handle, *dld);
737}
738
739SwapchainKHR Device::CreateSwapchainKHR(const VkSwapchainCreateInfoKHR& ci) const {
740 VkSwapchainKHR object;
741 Check(dld->vkCreateSwapchainKHR(handle, &ci, nullptr, &object));
742 return SwapchainKHR(object, handle, *dld);
743}
744
745DeviceMemory Device::TryAllocateMemory(const VkMemoryAllocateInfo& ai) const noexcept {
746 VkDeviceMemory memory;
747 if (dld->vkAllocateMemory(handle, &ai, nullptr, &memory) != VK_SUCCESS) {
748 return {};
749 }
750 return DeviceMemory(memory, handle, *dld);
751}
752
753DeviceMemory Device::AllocateMemory(const VkMemoryAllocateInfo& ai) const {
754 VkDeviceMemory memory;
755 Check(dld->vkAllocateMemory(handle, &ai, nullptr, &memory));
756 return DeviceMemory(memory, handle, *dld);
757}
758
759VkMemoryRequirements Device::GetBufferMemoryRequirements(VkBuffer buffer) const noexcept {
760 VkMemoryRequirements requirements;
761 dld->vkGetBufferMemoryRequirements(handle, buffer, &requirements);
762 return requirements;
763}
764
765VkMemoryRequirements Device::GetImageMemoryRequirements(VkImage image) const noexcept {
766 VkMemoryRequirements requirements;
767 dld->vkGetImageMemoryRequirements(handle, image, &requirements);
768 return requirements;
769}
770
771void Device::UpdateDescriptorSets(Span<VkWriteDescriptorSet> writes,
772 Span<VkCopyDescriptorSet> copies) const noexcept {
773 dld->vkUpdateDescriptorSets(handle, writes.size(), writes.data(), copies.size(), copies.data());
774}
775
776VkPhysicalDeviceProperties PhysicalDevice::GetProperties() const noexcept {
777 VkPhysicalDeviceProperties properties;
778 dld->vkGetPhysicalDeviceProperties(physical_device, &properties);
779 return properties;
780}
781
782void PhysicalDevice::GetProperties2KHR(VkPhysicalDeviceProperties2KHR& properties) const noexcept {
783 dld->vkGetPhysicalDeviceProperties2KHR(physical_device, &properties);
784}
785
786VkPhysicalDeviceFeatures PhysicalDevice::GetFeatures() const noexcept {
787 VkPhysicalDeviceFeatures2KHR features2;
788 features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR;
789 features2.pNext = nullptr;
790 dld->vkGetPhysicalDeviceFeatures2KHR(physical_device, &features2);
791 return features2.features;
792}
793
794void PhysicalDevice::GetFeatures2KHR(VkPhysicalDeviceFeatures2KHR& features) const noexcept {
795 dld->vkGetPhysicalDeviceFeatures2KHR(physical_device, &features);
796}
797
798VkFormatProperties PhysicalDevice::GetFormatProperties(VkFormat format) const noexcept {
799 VkFormatProperties properties;
800 dld->vkGetPhysicalDeviceFormatProperties(physical_device, format, &properties);
801 return properties;
802}
803
804std::vector<VkExtensionProperties> PhysicalDevice::EnumerateDeviceExtensionProperties() const {
805 u32 num;
806 dld->vkEnumerateDeviceExtensionProperties(physical_device, nullptr, &num, nullptr);
807 std::vector<VkExtensionProperties> properties(num);
808 dld->vkEnumerateDeviceExtensionProperties(physical_device, nullptr, &num, properties.data());
809 return properties;
810}
811
812std::vector<VkQueueFamilyProperties> PhysicalDevice::GetQueueFamilyProperties() const {
813 u32 num;
814 dld->vkGetPhysicalDeviceQueueFamilyProperties(physical_device, &num, nullptr);
815 std::vector<VkQueueFamilyProperties> properties(num);
816 dld->vkGetPhysicalDeviceQueueFamilyProperties(physical_device, &num, properties.data());
817 return properties;
818}
819
820bool PhysicalDevice::GetSurfaceSupportKHR(u32 queue_family_index, VkSurfaceKHR surface) const {
821 VkBool32 supported;
822 Check(dld->vkGetPhysicalDeviceSurfaceSupportKHR(physical_device, queue_family_index, surface,
823 &supported));
824 return supported == VK_TRUE;
825}
826
827VkSurfaceCapabilitiesKHR PhysicalDevice::GetSurfaceCapabilitiesKHR(VkSurfaceKHR surface) const {
828 VkSurfaceCapabilitiesKHR capabilities;
829 Check(dld->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_device, surface, &capabilities));
830 return capabilities;
831}
832
833std::vector<VkSurfaceFormatKHR> PhysicalDevice::GetSurfaceFormatsKHR(VkSurfaceKHR surface) const {
834 u32 num;
835 Check(dld->vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device, surface, &num, nullptr));
836 std::vector<VkSurfaceFormatKHR> formats(num);
837 Check(
838 dld->vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device, surface, &num, formats.data()));
839 return formats;
840}
841
842std::vector<VkPresentModeKHR> PhysicalDevice::GetSurfacePresentModesKHR(
843 VkSurfaceKHR surface) const {
844 u32 num;
845 Check(dld->vkGetPhysicalDeviceSurfacePresentModesKHR(physical_device, surface, &num, nullptr));
846 std::vector<VkPresentModeKHR> modes(num);
847 Check(dld->vkGetPhysicalDeviceSurfacePresentModesKHR(physical_device, surface, &num,
848 modes.data()));
849 return modes;
850}
851
852VkPhysicalDeviceMemoryProperties PhysicalDevice::GetMemoryProperties() const noexcept {
853 VkPhysicalDeviceMemoryProperties properties;
854 dld->vkGetPhysicalDeviceMemoryProperties(physical_device, &properties);
855 return properties;
856}
857
858u32 AvailableVersion(const InstanceDispatch& dld) noexcept {
859 PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion;
860 if (!Proc(vkEnumerateInstanceVersion, dld, "vkEnumerateInstanceVersion")) {
861 // If the procedure is not found, Vulkan 1.0 is assumed
862 return VK_API_VERSION_1_0;
863 }
864 u32 version;
865 if (const VkResult result = vkEnumerateInstanceVersion(&version); result != VK_SUCCESS) {
866 LOG_ERROR(Render_Vulkan, "vkEnumerateInstanceVersion returned {}, assuming Vulkan 1.1",
867 ToString(result));
868 return VK_API_VERSION_1_1;
869 }
870 return version;
871}
872
873std::optional<std::vector<VkExtensionProperties>> EnumerateInstanceExtensionProperties(
874 const InstanceDispatch& dld) {
875 u32 num;
876 if (dld.vkEnumerateInstanceExtensionProperties(nullptr, &num, nullptr) != VK_SUCCESS) {
877 return std::nullopt;
878 }
879 std::vector<VkExtensionProperties> properties(num);
880 if (dld.vkEnumerateInstanceExtensionProperties(nullptr, &num, properties.data()) !=
881 VK_SUCCESS) {
882 return std::nullopt;
883 }
884 return properties;
885}
886
887std::optional<std::vector<VkLayerProperties>> EnumerateInstanceLayerProperties(
888 const InstanceDispatch& dld) {
889 u32 num;
890 if (dld.vkEnumerateInstanceLayerProperties(&num, nullptr) != VK_SUCCESS) {
891 return std::nullopt;
892 }
893 std::vector<VkLayerProperties> properties(num);
894 if (dld.vkEnumerateInstanceLayerProperties(&num, properties.data()) != VK_SUCCESS) {
895 return std::nullopt;
896 }
897 return properties;
898}
899
900} // namespace Vulkan::vk
diff --git a/src/video_core/vulkan_common/vulkan_wrapper.h b/src/video_core/vulkan_common/vulkan_wrapper.h
new file mode 100644
index 000000000..912cab46c
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_wrapper.h
@@ -0,0 +1,1222 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#pragma once
6
7#include <exception>
8#include <iterator>
9#include <limits>
10#include <memory>
11#include <optional>
12#include <span>
13#include <type_traits>
14#include <utility>
15#include <vector>
16
17#define VK_NO_PROTOTYPES
18#include <vulkan/vulkan.h>
19
20#include "common/common_types.h"
21
22#ifdef _MSC_VER
23#pragma warning(disable : 26812) // Disable prefer enum class over enum
24#endif
25
26namespace Vulkan::vk {
27
28/**
29 * Span for Vulkan arrays.
30 * Based on std::span but optimized for array access instead of iterators.
31 * Size returns uint32_t instead of size_t to ease interaction with Vulkan functions.
32 */
33template <typename T>
34class Span {
35public:
36 using value_type = T;
37 using size_type = u32;
38 using difference_type = std::ptrdiff_t;
39 using reference = const T&;
40 using const_reference = const T&;
41 using pointer = const T*;
42 using const_pointer = const T*;
43 using iterator = const T*;
44 using const_iterator = const T*;
45
46 /// Construct an empty span.
47 constexpr Span() noexcept = default;
48
49 /// Construct an empty span
50 constexpr Span(std::nullptr_t) noexcept {}
51
52 /// Construct a span from a single element.
53 constexpr Span(const T& value) noexcept : ptr{&value}, num{1} {}
54
55 /// Construct a span from a range.
56 template <typename Range>
57 // requires std::data(const Range&)
58 // requires std::size(const Range&)
59 constexpr Span(const Range& range) : ptr{std::data(range)}, num{std::size(range)} {}
60
61 /// Construct a span from a pointer and a size.
62 /// This is inteded for subranges.
63 constexpr Span(const T* ptr_, std::size_t num_) noexcept : ptr{ptr_}, num{num_} {}
64
65 /// Returns the data pointer by the span.
66 constexpr const T* data() const noexcept {
67 return ptr;
68 }
69
70 /// Returns the number of elements in the span.
71 /// @note Returns a 32 bits integer because most Vulkan functions expect this type.
72 constexpr u32 size() const noexcept {
73 return static_cast<u32>(num);
74 }
75
76 /// Returns true when the span is empty.
77 constexpr bool empty() const noexcept {
78 return num == 0;
79 }
80
81 /// Returns a reference to the element in the passed index.
82 /// @pre: index < size()
83 constexpr const T& operator[](std::size_t index) const noexcept {
84 return ptr[index];
85 }
86
87 /// Returns an iterator to the beginning of the span.
88 constexpr const T* begin() const noexcept {
89 return ptr;
90 }
91
92 /// Returns an iterator to the end of the span.
93 constexpr const T* end() const noexcept {
94 return ptr + num;
95 }
96
97 /// Returns an iterator to the beginning of the span.
98 constexpr const T* cbegin() const noexcept {
99 return ptr;
100 }
101
102 /// Returns an iterator to the end of the span.
103 constexpr const T* cend() const noexcept {
104 return ptr + num;
105 }
106
107private:
108 const T* ptr = nullptr;
109 std::size_t num = 0;
110};
111
112/// Vulkan exception generated from a VkResult.
113class Exception final : public std::exception {
114public:
115 /// Construct the exception with a result.
116 /// @pre result != VK_SUCCESS
117 explicit Exception(VkResult result_) : result{result_} {}
118 virtual ~Exception() = default;
119
120 const char* what() const noexcept override;
121
122private:
123 VkResult result;
124};
125
126/// Converts a VkResult enum into a rodata string
127const char* ToString(VkResult) noexcept;
128
129/// Throws a Vulkan exception if result is not success.
130inline void Check(VkResult result) {
131 if (result != VK_SUCCESS) {
132 throw Exception(result);
133 }
134}
135
136/// Throws a Vulkan exception if result is an error.
137/// @return result
138inline VkResult Filter(VkResult result) {
139 if (result < 0) {
140 throw Exception(result);
141 }
142 return result;
143}
144
145/// Table holding Vulkan instance function pointers.
146struct InstanceDispatch {
147 PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
148
149 PFN_vkCreateInstance vkCreateInstance;
150 PFN_vkDestroyInstance vkDestroyInstance;
151 PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties;
152 PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties;
153
154 PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT;
155 PFN_vkCreateDevice vkCreateDevice;
156 PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT;
157 PFN_vkDestroyDevice vkDestroyDevice;
158 PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR;
159 PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties;
160 PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices;
161 PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
162 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR;
163 PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties;
164 PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
165 PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
166 PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR;
167 PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties;
168 PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR;
169 PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR;
170 PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR;
171 PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR;
172 PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR;
173 PFN_vkQueuePresentKHR vkQueuePresentKHR;
174};
175
176/// Table holding Vulkan device function pointers.
177struct DeviceDispatch : public InstanceDispatch {
178 PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR;
179 PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers;
180 PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets;
181 PFN_vkAllocateMemory vkAllocateMemory;
182 PFN_vkBeginCommandBuffer vkBeginCommandBuffer;
183 PFN_vkBindBufferMemory vkBindBufferMemory;
184 PFN_vkBindImageMemory vkBindImageMemory;
185 PFN_vkCmdBeginQuery vkCmdBeginQuery;
186 PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass;
187 PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT;
188 PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT;
189 PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets;
190 PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer;
191 PFN_vkCmdBindPipeline vkCmdBindPipeline;
192 PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT;
193 PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers;
194 PFN_vkCmdBlitImage vkCmdBlitImage;
195 PFN_vkCmdClearAttachments vkCmdClearAttachments;
196 PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
197 PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage;
198 PFN_vkCmdCopyImage vkCmdCopyImage;
199 PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer;
200 PFN_vkCmdDispatch vkCmdDispatch;
201 PFN_vkCmdDraw vkCmdDraw;
202 PFN_vkCmdDrawIndexed vkCmdDrawIndexed;
203 PFN_vkCmdEndQuery vkCmdEndQuery;
204 PFN_vkCmdEndRenderPass vkCmdEndRenderPass;
205 PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT;
206 PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT;
207 PFN_vkCmdFillBuffer vkCmdFillBuffer;
208 PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier;
209 PFN_vkCmdPushConstants vkCmdPushConstants;
210 PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants;
211 PFN_vkCmdSetDepthBias vkCmdSetDepthBias;
212 PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds;
213 PFN_vkCmdSetEvent vkCmdSetEvent;
214 PFN_vkCmdSetScissor vkCmdSetScissor;
215 PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask;
216 PFN_vkCmdSetStencilReference vkCmdSetStencilReference;
217 PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask;
218 PFN_vkCmdSetViewport vkCmdSetViewport;
219 PFN_vkCmdWaitEvents vkCmdWaitEvents;
220 PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT;
221 PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT;
222 PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT;
223 PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT;
224 PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT;
225 PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT;
226 PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT;
227 PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT;
228 PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT;
229 PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT;
230 PFN_vkCmdResolveImage vkCmdResolveImage;
231 PFN_vkCreateBuffer vkCreateBuffer;
232 PFN_vkCreateBufferView vkCreateBufferView;
233 PFN_vkCreateCommandPool vkCreateCommandPool;
234 PFN_vkCreateComputePipelines vkCreateComputePipelines;
235 PFN_vkCreateDescriptorPool vkCreateDescriptorPool;
236 PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout;
237 PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR;
238 PFN_vkCreateEvent vkCreateEvent;
239 PFN_vkCreateFence vkCreateFence;
240 PFN_vkCreateFramebuffer vkCreateFramebuffer;
241 PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines;
242 PFN_vkCreateImage vkCreateImage;
243 PFN_vkCreateImageView vkCreateImageView;
244 PFN_vkCreatePipelineLayout vkCreatePipelineLayout;
245 PFN_vkCreateQueryPool vkCreateQueryPool;
246 PFN_vkCreateRenderPass vkCreateRenderPass;
247 PFN_vkCreateSampler vkCreateSampler;
248 PFN_vkCreateSemaphore vkCreateSemaphore;
249 PFN_vkCreateShaderModule vkCreateShaderModule;
250 PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR;
251 PFN_vkDestroyBuffer vkDestroyBuffer;
252 PFN_vkDestroyBufferView vkDestroyBufferView;
253 PFN_vkDestroyCommandPool vkDestroyCommandPool;
254 PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool;
255 PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout;
256 PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR;
257 PFN_vkDestroyEvent vkDestroyEvent;
258 PFN_vkDestroyFence vkDestroyFence;
259 PFN_vkDestroyFramebuffer vkDestroyFramebuffer;
260 PFN_vkDestroyImage vkDestroyImage;
261 PFN_vkDestroyImageView vkDestroyImageView;
262 PFN_vkDestroyPipeline vkDestroyPipeline;
263 PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout;
264 PFN_vkDestroyQueryPool vkDestroyQueryPool;
265 PFN_vkDestroyRenderPass vkDestroyRenderPass;
266 PFN_vkDestroySampler vkDestroySampler;
267 PFN_vkDestroySemaphore vkDestroySemaphore;
268 PFN_vkDestroyShaderModule vkDestroyShaderModule;
269 PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR;
270 PFN_vkDeviceWaitIdle vkDeviceWaitIdle;
271 PFN_vkEndCommandBuffer vkEndCommandBuffer;
272 PFN_vkFreeCommandBuffers vkFreeCommandBuffers;
273 PFN_vkFreeDescriptorSets vkFreeDescriptorSets;
274 PFN_vkFreeMemory vkFreeMemory;
275 PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
276 PFN_vkGetDeviceQueue vkGetDeviceQueue;
277 PFN_vkGetEventStatus vkGetEventStatus;
278 PFN_vkGetFenceStatus vkGetFenceStatus;
279 PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
280 PFN_vkGetQueryPoolResults vkGetQueryPoolResults;
281 PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR;
282 PFN_vkMapMemory vkMapMemory;
283 PFN_vkQueueSubmit vkQueueSubmit;
284 PFN_vkResetFences vkResetFences;
285 PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT;
286 PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT;
287 PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT;
288 PFN_vkUnmapMemory vkUnmapMemory;
289 PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR;
290 PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets;
291 PFN_vkWaitForFences vkWaitForFences;
292 PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR;
293};
294
295/// Loads instance agnostic function pointers.
296/// @return True on success, false on error.
297bool Load(InstanceDispatch&) noexcept;
298
299/// Loads instance function pointers.
300/// @return True on success, false on error.
301bool Load(VkInstance, InstanceDispatch&) noexcept;
302
303void Destroy(VkInstance, const InstanceDispatch&) noexcept;
304void Destroy(VkDevice, const InstanceDispatch&) noexcept;
305
306void Destroy(VkDevice, VkBuffer, const DeviceDispatch&) noexcept;
307void Destroy(VkDevice, VkBufferView, const DeviceDispatch&) noexcept;
308void Destroy(VkDevice, VkCommandPool, const DeviceDispatch&) noexcept;
309void Destroy(VkDevice, VkDescriptorPool, const DeviceDispatch&) noexcept;
310void Destroy(VkDevice, VkDescriptorSetLayout, const DeviceDispatch&) noexcept;
311void Destroy(VkDevice, VkDescriptorUpdateTemplateKHR, const DeviceDispatch&) noexcept;
312void Destroy(VkDevice, VkDeviceMemory, const DeviceDispatch&) noexcept;
313void Destroy(VkDevice, VkEvent, const DeviceDispatch&) noexcept;
314void Destroy(VkDevice, VkFence, const DeviceDispatch&) noexcept;
315void Destroy(VkDevice, VkFramebuffer, const DeviceDispatch&) noexcept;
316void Destroy(VkDevice, VkImage, const DeviceDispatch&) noexcept;
317void Destroy(VkDevice, VkImageView, const DeviceDispatch&) noexcept;
318void Destroy(VkDevice, VkPipeline, const DeviceDispatch&) noexcept;
319void Destroy(VkDevice, VkPipelineLayout, const DeviceDispatch&) noexcept;
320void Destroy(VkDevice, VkQueryPool, const DeviceDispatch&) noexcept;
321void Destroy(VkDevice, VkRenderPass, const DeviceDispatch&) noexcept;
322void Destroy(VkDevice, VkSampler, const DeviceDispatch&) noexcept;
323void Destroy(VkDevice, VkSwapchainKHR, const DeviceDispatch&) noexcept;
324void Destroy(VkDevice, VkSemaphore, const DeviceDispatch&) noexcept;
325void Destroy(VkDevice, VkShaderModule, const DeviceDispatch&) noexcept;
326void Destroy(VkInstance, VkDebugUtilsMessengerEXT, const InstanceDispatch&) noexcept;
327void Destroy(VkInstance, VkSurfaceKHR, const InstanceDispatch&) noexcept;
328
329VkResult Free(VkDevice, VkDescriptorPool, Span<VkDescriptorSet>, const DeviceDispatch&) noexcept;
330VkResult Free(VkDevice, VkCommandPool, Span<VkCommandBuffer>, const DeviceDispatch&) noexcept;
331
332template <typename Type, typename OwnerType, typename Dispatch>
333class Handle;
334
335/// Handle with an owning type.
336/// Analogue to std::unique_ptr.
337template <typename Type, typename OwnerType, typename Dispatch>
338class Handle {
339public:
340 /// Construct a handle and hold it's ownership.
341 explicit Handle(Type handle_, OwnerType owner_, const Dispatch& dld_) noexcept
342 : handle{handle_}, owner{owner_}, dld{&dld_} {}
343
344 /// Construct an empty handle.
345 Handle() = default;
346
347 /// Copying Vulkan objects is not supported and will never be.
348 Handle(const Handle&) = delete;
349 Handle& operator=(const Handle&) = delete;
350
351 /// Construct a handle transfering the ownership from another handle.
352 Handle(Handle&& rhs) noexcept
353 : handle{std::exchange(rhs.handle, nullptr)}, owner{rhs.owner}, dld{rhs.dld} {}
354
355 /// Assign the current handle transfering the ownership from another handle.
356 /// Destroys any previously held object.
357 Handle& operator=(Handle&& rhs) noexcept {
358 Release();
359 handle = std::exchange(rhs.handle, nullptr);
360 owner = rhs.owner;
361 dld = rhs.dld;
362 return *this;
363 }
364
365 /// Destroys the current handle if it existed.
366 ~Handle() noexcept {
367 Release();
368 }
369
370 /// Destroys any held object.
371 void reset() noexcept {
372 Release();
373 handle = nullptr;
374 }
375
376 /// Returns the address of the held object.
377 /// Intended for Vulkan structures that expect a pointer to an array.
378 const Type* address() const noexcept {
379 return std::addressof(handle);
380 }
381
382 /// Returns the held Vulkan handle.
383 Type operator*() const noexcept {
384 return handle;
385 }
386
387 /// Returns true when there's a held object.
388 explicit operator bool() const noexcept {
389 return handle != nullptr;
390 }
391
392protected:
393 Type handle = nullptr;
394 OwnerType owner = nullptr;
395 const Dispatch* dld = nullptr;
396
397private:
398 /// Destroys the held object if it exists.
399 void Release() noexcept {
400 if (handle) {
401 Destroy(owner, handle, *dld);
402 }
403 }
404};
405
406/// Dummy type used to specify a handle has no owner.
407struct NoOwner {};
408
409/// Handle without an owning type.
410/// Analogue to std::unique_ptr
411template <typename Type, typename Dispatch>
412class Handle<Type, NoOwner, Dispatch> {
413public:
414 /// Construct a handle and hold it's ownership.
415 explicit Handle(Type handle_, const Dispatch& dld_) noexcept : handle{handle_}, dld{&dld_} {}
416
417 /// Construct an empty handle.
418 Handle() noexcept = default;
419
420 /// Copying Vulkan objects is not supported and will never be.
421 Handle(const Handle&) = delete;
422 Handle& operator=(const Handle&) = delete;
423
424 /// Construct a handle transfering ownership from another handle.
425 Handle(Handle&& rhs) noexcept : handle{std::exchange(rhs.handle, nullptr)}, dld{rhs.dld} {}
426
427 /// Assign the current handle transfering the ownership from another handle.
428 /// Destroys any previously held object.
429 Handle& operator=(Handle&& rhs) noexcept {
430 Release();
431 handle = std::exchange(rhs.handle, nullptr);
432 dld = rhs.dld;
433 return *this;
434 }
435
436 /// Destroys the current handle if it existed.
437 ~Handle() noexcept {
438 Release();
439 }
440
441 /// Destroys any held object.
442 void reset() noexcept {
443 Release();
444 handle = nullptr;
445 }
446
447 /// Returns the address of the held object.
448 /// Intended for Vulkan structures that expect a pointer to an array.
449 const Type* address() const noexcept {
450 return std::addressof(handle);
451 }
452
453 /// Returns the held Vulkan handle.
454 Type operator*() const noexcept {
455 return handle;
456 }
457
458 /// Returns true when there's a held object.
459 operator bool() const noexcept {
460 return handle != nullptr;
461 }
462
463protected:
464 Type handle = nullptr;
465 const Dispatch* dld = nullptr;
466
467private:
468 /// Destroys the held object if it exists.
469 void Release() noexcept {
470 if (handle) {
471 Destroy(handle, *dld);
472 }
473 }
474};
475
476/// Array of a pool allocation.
477/// Analogue to std::vector
478template <typename AllocationType, typename PoolType>
479class PoolAllocations {
480public:
481 /// Construct an empty allocation.
482 PoolAllocations() = default;
483
484 /// Construct an allocation. Errors are reported through IsOutOfPoolMemory().
485 explicit PoolAllocations(std::unique_ptr<AllocationType[]> allocations_, std::size_t num_,
486 VkDevice device_, PoolType pool_, const DeviceDispatch& dld_) noexcept
487 : allocations{std::move(allocations_)}, num{num_}, device{device_}, pool{pool_},
488 dld{&dld_} {}
489
490 /// Copying Vulkan allocations is not supported and will never be.
491 PoolAllocations(const PoolAllocations&) = delete;
492 PoolAllocations& operator=(const PoolAllocations&) = delete;
493
494 /// Construct an allocation transfering ownership from another allocation.
495 PoolAllocations(PoolAllocations&& rhs) noexcept
496 : allocations{std::move(rhs.allocations)}, num{rhs.num}, device{rhs.device}, pool{rhs.pool},
497 dld{rhs.dld} {}
498
499 /// Assign an allocation transfering ownership from another allocation.
500 /// Releases any previously held allocation.
501 PoolAllocations& operator=(PoolAllocations&& rhs) noexcept {
502 Release();
503 allocations = std::move(rhs.allocations);
504 num = rhs.num;
505 device = rhs.device;
506 pool = rhs.pool;
507 dld = rhs.dld;
508 return *this;
509 }
510
511 /// Destroys any held allocation.
512 ~PoolAllocations() {
513 Release();
514 }
515
516 /// Returns the number of allocations.
517 std::size_t size() const noexcept {
518 return num;
519 }
520
521 /// Returns a pointer to the array of allocations.
522 AllocationType const* data() const noexcept {
523 return allocations.get();
524 }
525
526 /// Returns the allocation in the specified index.
527 /// @pre index < size()
528 AllocationType operator[](std::size_t index) const noexcept {
529 return allocations[index];
530 }
531
532 /// True when a pool fails to construct.
533 bool IsOutOfPoolMemory() const noexcept {
534 return !device;
535 }
536
537private:
538 /// Destroys the held allocations if they exist.
539 void Release() noexcept {
540 if (!allocations) {
541 return;
542 }
543 const Span<AllocationType> span(allocations.get(), num);
544 const VkResult result = Free(device, pool, span, *dld);
545 // There's no way to report errors from a destructor.
546 if (result != VK_SUCCESS) {
547 std::terminate();
548 }
549 }
550
551 std::unique_ptr<AllocationType[]> allocations;
552 std::size_t num = 0;
553 VkDevice device = nullptr;
554 PoolType pool = nullptr;
555 const DeviceDispatch* dld = nullptr;
556};
557
558using DebugUtilsMessenger = Handle<VkDebugUtilsMessengerEXT, VkInstance, InstanceDispatch>;
559using DescriptorSetLayout = Handle<VkDescriptorSetLayout, VkDevice, DeviceDispatch>;
560using DescriptorUpdateTemplateKHR = Handle<VkDescriptorUpdateTemplateKHR, VkDevice, DeviceDispatch>;
561using Pipeline = Handle<VkPipeline, VkDevice, DeviceDispatch>;
562using PipelineLayout = Handle<VkPipelineLayout, VkDevice, DeviceDispatch>;
563using QueryPool = Handle<VkQueryPool, VkDevice, DeviceDispatch>;
564using RenderPass = Handle<VkRenderPass, VkDevice, DeviceDispatch>;
565using Sampler = Handle<VkSampler, VkDevice, DeviceDispatch>;
566using SurfaceKHR = Handle<VkSurfaceKHR, VkInstance, InstanceDispatch>;
567
568using DescriptorSets = PoolAllocations<VkDescriptorSet, VkDescriptorPool>;
569using CommandBuffers = PoolAllocations<VkCommandBuffer, VkCommandPool>;
570
571/// Vulkan instance owning handle.
572class Instance : public Handle<VkInstance, NoOwner, InstanceDispatch> {
573 using Handle<VkInstance, NoOwner, InstanceDispatch>::Handle;
574
575public:
576 /// Creates a Vulkan instance.
577 /// @throw Exception on initialization error.
578 static Instance Create(u32 version, Span<const char*> layers, Span<const char*> extensions,
579 InstanceDispatch& dispatch);
580
581 /// Enumerates physical devices.
582 /// @return Physical devices and an empty handle on failure.
583 /// @throw Exception on Vulkan error.
584 std::vector<VkPhysicalDevice> EnumeratePhysicalDevices() const;
585
586 /// Creates a debug callback messenger.
587 /// @throw Exception on creation failure.
588 DebugUtilsMessenger CreateDebugUtilsMessenger(
589 const VkDebugUtilsMessengerCreateInfoEXT& create_info) const;
590
591 /// Returns dispatch table.
592 const InstanceDispatch& Dispatch() const noexcept {
593 return *dld;
594 }
595};
596
597class Queue {
598public:
599 /// Construct an empty queue handle.
600 constexpr Queue() noexcept = default;
601
602 /// Construct a queue handle.
603 constexpr Queue(VkQueue queue_, const DeviceDispatch& dld_) noexcept
604 : queue{queue_}, dld{&dld_} {}
605
606 VkResult Submit(Span<VkSubmitInfo> submit_infos,
607 VkFence fence = VK_NULL_HANDLE) const noexcept {
608 return dld->vkQueueSubmit(queue, submit_infos.size(), submit_infos.data(), fence);
609 }
610
611 VkResult Present(const VkPresentInfoKHR& present_info) const noexcept {
612 return dld->vkQueuePresentKHR(queue, &present_info);
613 }
614
615private:
616 VkQueue queue = nullptr;
617 const DeviceDispatch* dld = nullptr;
618};
619
620class Buffer : public Handle<VkBuffer, VkDevice, DeviceDispatch> {
621 using Handle<VkBuffer, VkDevice, DeviceDispatch>::Handle;
622
623public:
624 /// Attaches a memory allocation.
625 void BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const;
626
627 /// Set object name.
628 void SetObjectNameEXT(const char* name) const;
629};
630
631class BufferView : public Handle<VkBufferView, VkDevice, DeviceDispatch> {
632 using Handle<VkBufferView, VkDevice, DeviceDispatch>::Handle;
633
634public:
635 /// Set object name.
636 void SetObjectNameEXT(const char* name) const;
637};
638
639class Image : public Handle<VkImage, VkDevice, DeviceDispatch> {
640 using Handle<VkImage, VkDevice, DeviceDispatch>::Handle;
641
642public:
643 /// Attaches a memory allocation.
644 void BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const;
645
646 /// Set object name.
647 void SetObjectNameEXT(const char* name) const;
648};
649
650class ImageView : public Handle<VkImageView, VkDevice, DeviceDispatch> {
651 using Handle<VkImageView, VkDevice, DeviceDispatch>::Handle;
652
653public:
654 /// Set object name.
655 void SetObjectNameEXT(const char* name) const;
656};
657
658class DeviceMemory : public Handle<VkDeviceMemory, VkDevice, DeviceDispatch> {
659 using Handle<VkDeviceMemory, VkDevice, DeviceDispatch>::Handle;
660
661public:
662 /// Set object name.
663 void SetObjectNameEXT(const char* name) const;
664
665 u8* Map(VkDeviceSize offset, VkDeviceSize size) const {
666 void* data;
667 Check(dld->vkMapMemory(owner, handle, offset, size, 0, &data));
668 return static_cast<u8*>(data);
669 }
670
671 void Unmap() const noexcept {
672 dld->vkUnmapMemory(owner, handle);
673 }
674};
675
676class Fence : public Handle<VkFence, VkDevice, DeviceDispatch> {
677 using Handle<VkFence, VkDevice, DeviceDispatch>::Handle;
678
679public:
680 /// Set object name.
681 void SetObjectNameEXT(const char* name) const;
682
683 VkResult Wait(u64 timeout = std::numeric_limits<u64>::max()) const noexcept {
684 return dld->vkWaitForFences(owner, 1, &handle, true, timeout);
685 }
686
687 VkResult GetStatus() const noexcept {
688 return dld->vkGetFenceStatus(owner, handle);
689 }
690
691 void Reset() const {
692 Check(dld->vkResetFences(owner, 1, &handle));
693 }
694};
695
696class Framebuffer : public Handle<VkFramebuffer, VkDevice, DeviceDispatch> {
697 using Handle<VkFramebuffer, VkDevice, DeviceDispatch>::Handle;
698
699public:
700 /// Set object name.
701 void SetObjectNameEXT(const char* name) const;
702};
703
704class DescriptorPool : public Handle<VkDescriptorPool, VkDevice, DeviceDispatch> {
705 using Handle<VkDescriptorPool, VkDevice, DeviceDispatch>::Handle;
706
707public:
708 DescriptorSets Allocate(const VkDescriptorSetAllocateInfo& ai) const;
709
710 /// Set object name.
711 void SetObjectNameEXT(const char* name) const;
712};
713
714class CommandPool : public Handle<VkCommandPool, VkDevice, DeviceDispatch> {
715 using Handle<VkCommandPool, VkDevice, DeviceDispatch>::Handle;
716
717public:
718 CommandBuffers Allocate(std::size_t num_buffers,
719 VkCommandBufferLevel level = VK_COMMAND_BUFFER_LEVEL_PRIMARY) const;
720
721 /// Set object name.
722 void SetObjectNameEXT(const char* name) const;
723};
724
725class SwapchainKHR : public Handle<VkSwapchainKHR, VkDevice, DeviceDispatch> {
726 using Handle<VkSwapchainKHR, VkDevice, DeviceDispatch>::Handle;
727
728public:
729 std::vector<VkImage> GetImages() const;
730};
731
732class Event : public Handle<VkEvent, VkDevice, DeviceDispatch> {
733 using Handle<VkEvent, VkDevice, DeviceDispatch>::Handle;
734
735public:
736 /// Set object name.
737 void SetObjectNameEXT(const char* name) const;
738
739 VkResult GetStatus() const noexcept {
740 return dld->vkGetEventStatus(owner, handle);
741 }
742};
743
744class ShaderModule : public Handle<VkShaderModule, VkDevice, DeviceDispatch> {
745 using Handle<VkShaderModule, VkDevice, DeviceDispatch>::Handle;
746
747public:
748 /// Set object name.
749 void SetObjectNameEXT(const char* name) const;
750};
751
752class Semaphore : public Handle<VkSemaphore, VkDevice, DeviceDispatch> {
753 using Handle<VkSemaphore, VkDevice, DeviceDispatch>::Handle;
754
755public:
756 /// Set object name.
757 void SetObjectNameEXT(const char* name) const;
758
759 [[nodiscard]] u64 GetCounter() const {
760 u64 value;
761 Check(dld->vkGetSemaphoreCounterValueKHR(owner, handle, &value));
762 return value;
763 }
764
765 /**
766 * Waits for a timeline semaphore on the host.
767 *
768 * @param value Value to wait
769 * @param timeout Time in nanoseconds to timeout
770 * @return True on successful wait, false on timeout
771 */
772 bool Wait(u64 value, u64 timeout = std::numeric_limits<u64>::max()) const {
773 const VkSemaphoreWaitInfoKHR wait_info{
774 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR,
775 .pNext = nullptr,
776 .flags = 0,
777 .semaphoreCount = 1,
778 .pSemaphores = &handle,
779 .pValues = &value,
780 };
781 const VkResult result = dld->vkWaitSemaphoresKHR(owner, &wait_info, timeout);
782 switch (result) {
783 case VK_SUCCESS:
784 return true;
785 case VK_TIMEOUT:
786 return false;
787 default:
788 throw Exception(result);
789 }
790 }
791};
792
793class Device : public Handle<VkDevice, NoOwner, DeviceDispatch> {
794 using Handle<VkDevice, NoOwner, DeviceDispatch>::Handle;
795
796public:
797 static Device Create(VkPhysicalDevice physical_device, Span<VkDeviceQueueCreateInfo> queues_ci,
798 Span<const char*> enabled_extensions, const void* next,
799 DeviceDispatch& dispatch);
800
801 Queue GetQueue(u32 family_index) const noexcept;
802
803 Buffer CreateBuffer(const VkBufferCreateInfo& ci) const;
804
805 BufferView CreateBufferView(const VkBufferViewCreateInfo& ci) const;
806
807 Image CreateImage(const VkImageCreateInfo& ci) const;
808
809 ImageView CreateImageView(const VkImageViewCreateInfo& ci) const;
810
811 Semaphore CreateSemaphore() const;
812
813 Semaphore CreateSemaphore(const VkSemaphoreCreateInfo& ci) const;
814
815 Fence CreateFence(const VkFenceCreateInfo& ci) const;
816
817 DescriptorPool CreateDescriptorPool(const VkDescriptorPoolCreateInfo& ci) const;
818
819 RenderPass CreateRenderPass(const VkRenderPassCreateInfo& ci) const;
820
821 DescriptorSetLayout CreateDescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo& ci) const;
822
823 PipelineLayout CreatePipelineLayout(const VkPipelineLayoutCreateInfo& ci) const;
824
825 Pipeline CreateGraphicsPipeline(const VkGraphicsPipelineCreateInfo& ci) const;
826
827 Pipeline CreateComputePipeline(const VkComputePipelineCreateInfo& ci) const;
828
829 Sampler CreateSampler(const VkSamplerCreateInfo& ci) const;
830
831 Framebuffer CreateFramebuffer(const VkFramebufferCreateInfo& ci) const;
832
833 CommandPool CreateCommandPool(const VkCommandPoolCreateInfo& ci) const;
834
835 DescriptorUpdateTemplateKHR CreateDescriptorUpdateTemplateKHR(
836 const VkDescriptorUpdateTemplateCreateInfoKHR& ci) const;
837
838 QueryPool CreateQueryPool(const VkQueryPoolCreateInfo& ci) const;
839
840 ShaderModule CreateShaderModule(const VkShaderModuleCreateInfo& ci) const;
841
842 Event CreateEvent() const;
843
844 SwapchainKHR CreateSwapchainKHR(const VkSwapchainCreateInfoKHR& ci) const;
845
846 DeviceMemory TryAllocateMemory(const VkMemoryAllocateInfo& ai) const noexcept;
847
848 DeviceMemory AllocateMemory(const VkMemoryAllocateInfo& ai) const;
849
850 VkMemoryRequirements GetBufferMemoryRequirements(VkBuffer buffer) const noexcept;
851
852 VkMemoryRequirements GetImageMemoryRequirements(VkImage image) const noexcept;
853
854 void UpdateDescriptorSets(Span<VkWriteDescriptorSet> writes,
855 Span<VkCopyDescriptorSet> copies) const noexcept;
856
857 void UpdateDescriptorSet(VkDescriptorSet set, VkDescriptorUpdateTemplateKHR update_template,
858 const void* data) const noexcept {
859 dld->vkUpdateDescriptorSetWithTemplateKHR(handle, set, update_template, data);
860 }
861
862 VkResult AcquireNextImageKHR(VkSwapchainKHR swapchain, u64 timeout, VkSemaphore semaphore,
863 VkFence fence, u32* image_index) const noexcept {
864 return dld->vkAcquireNextImageKHR(handle, swapchain, timeout, semaphore, fence,
865 image_index);
866 }
867
868 VkResult WaitIdle() const noexcept {
869 return dld->vkDeviceWaitIdle(handle);
870 }
871
872 void ResetQueryPoolEXT(VkQueryPool query_pool, u32 first, u32 count) const noexcept {
873 dld->vkResetQueryPoolEXT(handle, query_pool, first, count);
874 }
875
876 VkResult GetQueryResults(VkQueryPool query_pool, u32 first, u32 count, std::size_t data_size,
877 void* data, VkDeviceSize stride,
878 VkQueryResultFlags flags) const noexcept {
879 return dld->vkGetQueryPoolResults(handle, query_pool, first, count, data_size, data, stride,
880 flags);
881 }
882};
883
884class PhysicalDevice {
885public:
886 constexpr PhysicalDevice() noexcept = default;
887
888 constexpr PhysicalDevice(VkPhysicalDevice physical_device_,
889 const InstanceDispatch& dld_) noexcept
890 : physical_device{physical_device_}, dld{&dld_} {}
891
892 constexpr operator VkPhysicalDevice() const noexcept {
893 return physical_device;
894 }
895
896 VkPhysicalDeviceProperties GetProperties() const noexcept;
897
898 void GetProperties2KHR(VkPhysicalDeviceProperties2KHR&) const noexcept;
899
900 VkPhysicalDeviceFeatures GetFeatures() const noexcept;
901
902 void GetFeatures2KHR(VkPhysicalDeviceFeatures2KHR&) const noexcept;
903
904 VkFormatProperties GetFormatProperties(VkFormat) const noexcept;
905
906 std::vector<VkExtensionProperties> EnumerateDeviceExtensionProperties() const;
907
908 std::vector<VkQueueFamilyProperties> GetQueueFamilyProperties() const;
909
910 bool GetSurfaceSupportKHR(u32 queue_family_index, VkSurfaceKHR) const;
911
912 VkSurfaceCapabilitiesKHR GetSurfaceCapabilitiesKHR(VkSurfaceKHR) const;
913
914 std::vector<VkSurfaceFormatKHR> GetSurfaceFormatsKHR(VkSurfaceKHR) const;
915
916 std::vector<VkPresentModeKHR> GetSurfacePresentModesKHR(VkSurfaceKHR) const;
917
918 VkPhysicalDeviceMemoryProperties GetMemoryProperties() const noexcept;
919
920private:
921 VkPhysicalDevice physical_device = nullptr;
922 const InstanceDispatch* dld = nullptr;
923};
924
925class CommandBuffer {
926public:
927 CommandBuffer() noexcept = default;
928
929 explicit CommandBuffer(VkCommandBuffer handle_, const DeviceDispatch& dld_) noexcept
930 : handle{handle_}, dld{&dld_} {}
931
932 const VkCommandBuffer* address() const noexcept {
933 return &handle;
934 }
935
936 void Begin(const VkCommandBufferBeginInfo& begin_info) const {
937 Check(dld->vkBeginCommandBuffer(handle, &begin_info));
938 }
939
940 void End() const {
941 Check(dld->vkEndCommandBuffer(handle));
942 }
943
944 void BeginRenderPass(const VkRenderPassBeginInfo& renderpass_bi,
945 VkSubpassContents contents) const noexcept {
946 dld->vkCmdBeginRenderPass(handle, &renderpass_bi, contents);
947 }
948
949 void EndRenderPass() const noexcept {
950 dld->vkCmdEndRenderPass(handle);
951 }
952
953 void BeginQuery(VkQueryPool query_pool, u32 query, VkQueryControlFlags flags) const noexcept {
954 dld->vkCmdBeginQuery(handle, query_pool, query, flags);
955 }
956
957 void EndQuery(VkQueryPool query_pool, u32 query) const noexcept {
958 dld->vkCmdEndQuery(handle, query_pool, query);
959 }
960
961 void BindDescriptorSets(VkPipelineBindPoint bind_point, VkPipelineLayout layout, u32 first,
962 Span<VkDescriptorSet> sets, Span<u32> dynamic_offsets) const noexcept {
963 dld->vkCmdBindDescriptorSets(handle, bind_point, layout, first, sets.size(), sets.data(),
964 dynamic_offsets.size(), dynamic_offsets.data());
965 }
966
967 void BindPipeline(VkPipelineBindPoint bind_point, VkPipeline pipeline) const noexcept {
968 dld->vkCmdBindPipeline(handle, bind_point, pipeline);
969 }
970
971 void BindIndexBuffer(VkBuffer buffer, VkDeviceSize offset,
972 VkIndexType index_type) const noexcept {
973 dld->vkCmdBindIndexBuffer(handle, buffer, offset, index_type);
974 }
975
976 void BindVertexBuffers(u32 first, u32 count, const VkBuffer* buffers,
977 const VkDeviceSize* offsets) const noexcept {
978 dld->vkCmdBindVertexBuffers(handle, first, count, buffers, offsets);
979 }
980
981 void BindVertexBuffer(u32 binding, VkBuffer buffer, VkDeviceSize offset) const noexcept {
982 BindVertexBuffers(binding, 1, &buffer, &offset);
983 }
984
985 void Draw(u32 vertex_count, u32 instance_count, u32 first_vertex,
986 u32 first_instance) const noexcept {
987 dld->vkCmdDraw(handle, vertex_count, instance_count, first_vertex, first_instance);
988 }
989
990 void DrawIndexed(u32 index_count, u32 instance_count, u32 first_index, u32 vertex_offset,
991 u32 first_instance) const noexcept {
992 dld->vkCmdDrawIndexed(handle, index_count, instance_count, first_index, vertex_offset,
993 first_instance);
994 }
995
996 void ClearAttachments(Span<VkClearAttachment> attachments,
997 Span<VkClearRect> rects) const noexcept {
998 dld->vkCmdClearAttachments(handle, attachments.size(), attachments.data(), rects.size(),
999 rects.data());
1000 }
1001
1002 void BlitImage(VkImage src_image, VkImageLayout src_layout, VkImage dst_image,
1003 VkImageLayout dst_layout, Span<VkImageBlit> regions,
1004 VkFilter filter) const noexcept {
1005 dld->vkCmdBlitImage(handle, src_image, src_layout, dst_image, dst_layout, regions.size(),
1006 regions.data(), filter);
1007 }
1008
1009 void ResolveImage(VkImage src_image, VkImageLayout src_layout, VkImage dst_image,
1010 VkImageLayout dst_layout, Span<VkImageResolve> regions) {
1011 dld->vkCmdResolveImage(handle, src_image, src_layout, dst_image, dst_layout, regions.size(),
1012 regions.data());
1013 }
1014
1015 void Dispatch(u32 x, u32 y, u32 z) const noexcept {
1016 dld->vkCmdDispatch(handle, x, y, z);
1017 }
1018
1019 void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
1020 VkDependencyFlags dependency_flags, Span<VkMemoryBarrier> memory_barriers,
1021 Span<VkBufferMemoryBarrier> buffer_barriers,
1022 Span<VkImageMemoryBarrier> image_barriers) const noexcept {
1023 dld->vkCmdPipelineBarrier(handle, src_stage_mask, dst_stage_mask, dependency_flags,
1024 memory_barriers.size(), memory_barriers.data(),
1025 buffer_barriers.size(), buffer_barriers.data(),
1026 image_barriers.size(), image_barriers.data());
1027 }
1028
1029 void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
1030 VkDependencyFlags dependency_flags = 0) const noexcept {
1031 PipelineBarrier(src_stage_mask, dst_stage_mask, dependency_flags, {}, {}, {});
1032 }
1033
1034 void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
1035 VkDependencyFlags dependency_flags,
1036 const VkBufferMemoryBarrier& buffer_barrier) const noexcept {
1037 PipelineBarrier(src_stage_mask, dst_stage_mask, dependency_flags, {}, buffer_barrier, {});
1038 }
1039
1040 void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
1041 VkDependencyFlags dependency_flags,
1042 const VkImageMemoryBarrier& image_barrier) const noexcept {
1043 PipelineBarrier(src_stage_mask, dst_stage_mask, dependency_flags, {}, {}, image_barrier);
1044 }
1045
1046 void CopyBufferToImage(VkBuffer src_buffer, VkImage dst_image, VkImageLayout dst_image_layout,
1047 Span<VkBufferImageCopy> regions) const noexcept {
1048 dld->vkCmdCopyBufferToImage(handle, src_buffer, dst_image, dst_image_layout, regions.size(),
1049 regions.data());
1050 }
1051
1052 void CopyBuffer(VkBuffer src_buffer, VkBuffer dst_buffer,
1053 Span<VkBufferCopy> regions) const noexcept {
1054 dld->vkCmdCopyBuffer(handle, src_buffer, dst_buffer, regions.size(), regions.data());
1055 }
1056
1057 void CopyImage(VkImage src_image, VkImageLayout src_layout, VkImage dst_image,
1058 VkImageLayout dst_layout, Span<VkImageCopy> regions) const noexcept {
1059 dld->vkCmdCopyImage(handle, src_image, src_layout, dst_image, dst_layout, regions.size(),
1060 regions.data());
1061 }
1062
1063 void CopyImageToBuffer(VkImage src_image, VkImageLayout src_layout, VkBuffer dst_buffer,
1064 Span<VkBufferImageCopy> regions) const noexcept {
1065 dld->vkCmdCopyImageToBuffer(handle, src_image, src_layout, dst_buffer, regions.size(),
1066 regions.data());
1067 }
1068
1069 void FillBuffer(VkBuffer dst_buffer, VkDeviceSize dst_offset, VkDeviceSize size,
1070 u32 data) const noexcept {
1071 dld->vkCmdFillBuffer(handle, dst_buffer, dst_offset, size, data);
1072 }
1073
1074 void PushConstants(VkPipelineLayout layout, VkShaderStageFlags flags, u32 offset, u32 size,
1075 const void* values) const noexcept {
1076 dld->vkCmdPushConstants(handle, layout, flags, offset, size, values);
1077 }
1078
1079 template <typename T>
1080 void PushConstants(VkPipelineLayout layout, VkShaderStageFlags flags,
1081 const T& data) const noexcept {
1082 static_assert(std::is_trivially_copyable_v<T>, "<data> is not trivially copyable");
1083 dld->vkCmdPushConstants(handle, layout, flags, 0, static_cast<u32>(sizeof(T)), &data);
1084 }
1085
1086 void SetViewport(u32 first, Span<VkViewport> viewports) const noexcept {
1087 dld->vkCmdSetViewport(handle, first, viewports.size(), viewports.data());
1088 }
1089
1090 void SetScissor(u32 first, Span<VkRect2D> scissors) const noexcept {
1091 dld->vkCmdSetScissor(handle, first, scissors.size(), scissors.data());
1092 }
1093
1094 void SetBlendConstants(const float blend_constants[4]) const noexcept {
1095 dld->vkCmdSetBlendConstants(handle, blend_constants);
1096 }
1097
1098 void SetStencilCompareMask(VkStencilFaceFlags face_mask, u32 compare_mask) const noexcept {
1099 dld->vkCmdSetStencilCompareMask(handle, face_mask, compare_mask);
1100 }
1101
1102 void SetStencilReference(VkStencilFaceFlags face_mask, u32 reference) const noexcept {
1103 dld->vkCmdSetStencilReference(handle, face_mask, reference);
1104 }
1105
1106 void SetStencilWriteMask(VkStencilFaceFlags face_mask, u32 write_mask) const noexcept {
1107 dld->vkCmdSetStencilWriteMask(handle, face_mask, write_mask);
1108 }
1109
1110 void SetDepthBias(float constant_factor, float clamp, float slope_factor) const noexcept {
1111 dld->vkCmdSetDepthBias(handle, constant_factor, clamp, slope_factor);
1112 }
1113
1114 void SetDepthBounds(float min_depth_bounds, float max_depth_bounds) const noexcept {
1115 dld->vkCmdSetDepthBounds(handle, min_depth_bounds, max_depth_bounds);
1116 }
1117
1118 void SetEvent(VkEvent event, VkPipelineStageFlags stage_flags) const noexcept {
1119 dld->vkCmdSetEvent(handle, event, stage_flags);
1120 }
1121
1122 void WaitEvents(Span<VkEvent> events, VkPipelineStageFlags src_stage_mask,
1123 VkPipelineStageFlags dst_stage_mask, Span<VkMemoryBarrier> memory_barriers,
1124 Span<VkBufferMemoryBarrier> buffer_barriers,
1125 Span<VkImageMemoryBarrier> image_barriers) const noexcept {
1126 dld->vkCmdWaitEvents(handle, events.size(), events.data(), src_stage_mask, dst_stage_mask,
1127 memory_barriers.size(), memory_barriers.data(), buffer_barriers.size(),
1128 buffer_barriers.data(), image_barriers.size(), image_barriers.data());
1129 }
1130
1131 void BindVertexBuffers2EXT(u32 first_binding, u32 binding_count, const VkBuffer* buffers,
1132 const VkDeviceSize* offsets, const VkDeviceSize* sizes,
1133 const VkDeviceSize* strides) const noexcept {
1134 dld->vkCmdBindVertexBuffers2EXT(handle, first_binding, binding_count, buffers, offsets,
1135 sizes, strides);
1136 }
1137
1138 void SetCullModeEXT(VkCullModeFlags cull_mode) const noexcept {
1139 dld->vkCmdSetCullModeEXT(handle, cull_mode);
1140 }
1141
1142 void SetDepthBoundsTestEnableEXT(bool enable) const noexcept {
1143 dld->vkCmdSetDepthBoundsTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE);
1144 }
1145
1146 void SetDepthCompareOpEXT(VkCompareOp compare_op) const noexcept {
1147 dld->vkCmdSetDepthCompareOpEXT(handle, compare_op);
1148 }
1149
1150 void SetDepthTestEnableEXT(bool enable) const noexcept {
1151 dld->vkCmdSetDepthTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE);
1152 }
1153
1154 void SetDepthWriteEnableEXT(bool enable) const noexcept {
1155 dld->vkCmdSetDepthWriteEnableEXT(handle, enable ? VK_TRUE : VK_FALSE);
1156 }
1157
1158 void SetFrontFaceEXT(VkFrontFace front_face) const noexcept {
1159 dld->vkCmdSetFrontFaceEXT(handle, front_face);
1160 }
1161
1162 void SetPrimitiveTopologyEXT(VkPrimitiveTopology primitive_topology) const noexcept {
1163 dld->vkCmdSetPrimitiveTopologyEXT(handle, primitive_topology);
1164 }
1165
1166 void SetStencilOpEXT(VkStencilFaceFlags face_mask, VkStencilOp fail_op, VkStencilOp pass_op,
1167 VkStencilOp depth_fail_op, VkCompareOp compare_op) const noexcept {
1168 dld->vkCmdSetStencilOpEXT(handle, face_mask, fail_op, pass_op, depth_fail_op, compare_op);
1169 }
1170
1171 void SetStencilTestEnableEXT(bool enable) const noexcept {
1172 dld->vkCmdSetStencilTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE);
1173 }
1174
1175 void BindTransformFeedbackBuffersEXT(u32 first, u32 count, const VkBuffer* buffers,
1176 const VkDeviceSize* offsets,
1177 const VkDeviceSize* sizes) const noexcept {
1178 dld->vkCmdBindTransformFeedbackBuffersEXT(handle, first, count, buffers, offsets, sizes);
1179 }
1180
1181 void BeginTransformFeedbackEXT(u32 first_counter_buffer, u32 counter_buffers_count,
1182 const VkBuffer* counter_buffers,
1183 const VkDeviceSize* counter_buffer_offsets) const noexcept {
1184 dld->vkCmdBeginTransformFeedbackEXT(handle, first_counter_buffer, counter_buffers_count,
1185 counter_buffers, counter_buffer_offsets);
1186 }
1187
1188 void EndTransformFeedbackEXT(u32 first_counter_buffer, u32 counter_buffers_count,
1189 const VkBuffer* counter_buffers,
1190 const VkDeviceSize* counter_buffer_offsets) const noexcept {
1191 dld->vkCmdEndTransformFeedbackEXT(handle, first_counter_buffer, counter_buffers_count,
1192 counter_buffers, counter_buffer_offsets);
1193 }
1194
1195 void BeginDebugUtilsLabelEXT(const char* label, std::span<float, 4> color) const noexcept {
1196 const VkDebugUtilsLabelEXT label_info{
1197 .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT,
1198 .pNext = nullptr,
1199 .pLabelName = label,
1200 .color{color[0], color[1], color[2], color[3]},
1201 };
1202 dld->vkCmdBeginDebugUtilsLabelEXT(handle, &label_info);
1203 }
1204
1205 void EndDebugUtilsLabelEXT() const noexcept {
1206 dld->vkCmdEndDebugUtilsLabelEXT(handle);
1207 }
1208
1209private:
1210 VkCommandBuffer handle;
1211 const DeviceDispatch* dld;
1212};
1213
1214u32 AvailableVersion(const InstanceDispatch& dld) noexcept;
1215
1216std::optional<std::vector<VkExtensionProperties>> EnumerateInstanceExtensionProperties(
1217 const InstanceDispatch& dld);
1218
1219std::optional<std::vector<VkLayerProperties>> EnumerateInstanceLayerProperties(
1220 const InstanceDispatch& dld);
1221
1222} // namespace Vulkan::vk