summaryrefslogtreecommitdiff
path: root/src/video_core/vulkan_common
diff options
context:
space:
mode:
authorGravatar Rodrigo Locatti2021-01-03 17:38:29 -0300
committerGravatar GitHub2021-01-03 17:38:29 -0300
commit7265e80c12de2d24d759ee6956a2445bf9ac8992 (patch)
tree3236d949fdf894b985a8dcafc580cd233ebd9a68 /src/video_core/vulkan_common
parentMerge pull request #5278 from MerryMage/cpuopt_unsafe_inaccurate_nan (diff)
parentvulkan_instance: Allow different Vulkan versions and enforce 1.1 (diff)
downloadyuzu-7265e80c12de2d24d759ee6956a2445bf9ac8992.tar.gz
yuzu-7265e80c12de2d24d759ee6956a2445bf9ac8992.tar.xz
yuzu-7265e80c12de2d24d759ee6956a2445bf9ac8992.zip
Merge pull request #5230 from ReinUsesLisp/vulkan-common
vulkan_common: Move reusable Vulkan abstractions to a separate directory
Diffstat (limited to 'src/video_core/vulkan_common')
-rw-r--r--src/video_core/vulkan_common/vulkan_debug_callback.cpp45
-rw-r--r--src/video_core/vulkan_common/vulkan_debug_callback.h11
-rw-r--r--src/video_core/vulkan_common/vulkan_instance.cpp151
-rw-r--r--src/video_core/vulkan_common/vulkan_instance.h32
-rw-r--r--src/video_core/vulkan_common/vulkan_library.cpp36
-rw-r--r--src/video_core/vulkan_common/vulkan_library.h13
-rw-r--r--src/video_core/vulkan_common/vulkan_surface.cpp81
-rw-r--r--src/video_core/vulkan_common/vulkan_surface.h18
-rw-r--r--src/video_core/vulkan_common/vulkan_wrapper.cpp900
-rw-r--r--src/video_core/vulkan_common/vulkan_wrapper.h1222
10 files changed, 2509 insertions, 0 deletions
diff --git a/src/video_core/vulkan_common/vulkan_debug_callback.cpp b/src/video_core/vulkan_common/vulkan_debug_callback.cpp
new file mode 100644
index 000000000..ea7af8ad4
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_debug_callback.cpp
@@ -0,0 +1,45 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include <string_view>
6#include "common/logging/log.h"
7#include "video_core/vulkan_common/vulkan_debug_callback.h"
8
9namespace Vulkan {
10namespace {
11VkBool32 Callback(VkDebugUtilsMessageSeverityFlagBitsEXT severity,
12 VkDebugUtilsMessageTypeFlagsEXT type,
13 const VkDebugUtilsMessengerCallbackDataEXT* data,
14 [[maybe_unused]] void* user_data) {
15 const std::string_view message{data->pMessage};
16 if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) {
17 LOG_CRITICAL(Render_Vulkan, "{}", message);
18 } else if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) {
19 LOG_WARNING(Render_Vulkan, "{}", message);
20 } else if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT) {
21 LOG_INFO(Render_Vulkan, "{}", message);
22 } else if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT) {
23 LOG_DEBUG(Render_Vulkan, "{}", message);
24 }
25 return VK_FALSE;
26}
27} // Anonymous namespace
28
29vk::DebugUtilsMessenger CreateDebugCallback(const vk::Instance& instance) {
30 return instance.CreateDebugUtilsMessenger(VkDebugUtilsMessengerCreateInfoEXT{
31 .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
32 .pNext = nullptr,
33 .flags = 0,
34 .messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT |
35 VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT |
36 VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT |
37 VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT,
38 .messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
39 VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
40 VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
41 .pfnUserCallback = Callback,
42 });
43}
44
45} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_debug_callback.h b/src/video_core/vulkan_common/vulkan_debug_callback.h
new file mode 100644
index 000000000..2efcd244c
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_debug_callback.h
@@ -0,0 +1,11 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include "video_core/vulkan_common/vulkan_wrapper.h"
6
7namespace Vulkan {
8
9vk::DebugUtilsMessenger CreateDebugCallback(const vk::Instance& instance);
10
11} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_instance.cpp b/src/video_core/vulkan_common/vulkan_instance.cpp
new file mode 100644
index 000000000..889ecda0c
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_instance.cpp
@@ -0,0 +1,151 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include <algorithm>
6#include <optional>
7#include <span>
8#include <utility>
9#include <vector>
10
11#include "common/common_types.h"
12#include "common/dynamic_library.h"
13#include "common/logging/log.h"
14#include "core/frontend/emu_window.h"
15#include "video_core/vulkan_common/vulkan_instance.h"
16#include "video_core/vulkan_common/vulkan_wrapper.h"
17
18// Include these late to avoid polluting previous headers
19#ifdef _WIN32
20#include <windows.h>
21// ensure include order
22#include <vulkan/vulkan_win32.h>
23#endif
24
25#if !defined(_WIN32) && !defined(__APPLE__)
26#include <X11/Xlib.h>
27#include <vulkan/vulkan_wayland.h>
28#include <vulkan/vulkan_xlib.h>
29#endif
30
31namespace Vulkan {
32namespace {
33[[nodiscard]] std::vector<const char*> RequiredExtensions(
34 Core::Frontend::WindowSystemType window_type, bool enable_debug_utils) {
35 std::vector<const char*> extensions;
36 extensions.reserve(6);
37 switch (window_type) {
38 case Core::Frontend::WindowSystemType::Headless:
39 break;
40#ifdef _WIN32
41 case Core::Frontend::WindowSystemType::Windows:
42 extensions.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
43 break;
44#endif
45#if !defined(_WIN32) && !defined(__APPLE__)
46 case Core::Frontend::WindowSystemType::X11:
47 extensions.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
48 break;
49 case Core::Frontend::WindowSystemType::Wayland:
50 extensions.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
51 break;
52#endif
53 default:
54 LOG_ERROR(Render_Vulkan, "Presentation not supported on this platform");
55 break;
56 }
57 if (window_type != Core::Frontend::WindowSystemType::Headless) {
58 extensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
59 }
60 if (enable_debug_utils) {
61 extensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
62 }
63 extensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
64 return extensions;
65}
66
67[[nodiscard]] bool AreExtensionsSupported(const vk::InstanceDispatch& dld,
68 std::span<const char* const> extensions) {
69 const std::optional properties = vk::EnumerateInstanceExtensionProperties(dld);
70 if (!properties) {
71 LOG_ERROR(Render_Vulkan, "Failed to query extension properties");
72 return false;
73 }
74 for (const char* extension : extensions) {
75 const auto it = std::ranges::find_if(*properties, [extension](const auto& prop) {
76 return std::strcmp(extension, prop.extensionName) == 0;
77 });
78 if (it == properties->end()) {
79 LOG_ERROR(Render_Vulkan, "Required instance extension {} is not available", extension);
80 return false;
81 }
82 }
83 return true;
84}
85
86[[nodiscard]] std::vector<const char*> Layers(bool enable_layers) {
87 std::vector<const char*> layers;
88 if (enable_layers) {
89 layers.push_back("VK_LAYER_KHRONOS_validation");
90 }
91 return layers;
92}
93
94void RemoveUnavailableLayers(const vk::InstanceDispatch& dld, std::vector<const char*>& layers) {
95 const std::optional layer_properties = vk::EnumerateInstanceLayerProperties(dld);
96 if (!layer_properties) {
97 LOG_ERROR(Render_Vulkan, "Failed to query layer properties, disabling layers");
98 layers.clear();
99 }
100 std::erase_if(layers, [&layer_properties](const char* layer) {
101 const auto comp = [layer](const VkLayerProperties& layer_property) {
102 return std::strcmp(layer, layer_property.layerName) == 0;
103 };
104 const auto it = std::ranges::find_if(*layer_properties, comp);
105 if (it == layer_properties->end()) {
106 LOG_ERROR(Render_Vulkan, "Layer {} not available, removing it", layer);
107 return true;
108 }
109 return false;
110 });
111}
112} // Anonymous namespace
113
114vk::Instance CreateInstance(const Common::DynamicLibrary& library, vk::InstanceDispatch& dld,
115 u32 required_version, Core::Frontend::WindowSystemType window_type,
116 bool enable_debug_utils, bool enable_layers) {
117 if (!library.IsOpen()) {
118 LOG_ERROR(Render_Vulkan, "Vulkan library not available");
119 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
120 }
121 if (!library.GetSymbol("vkGetInstanceProcAddr", &dld.vkGetInstanceProcAddr)) {
122 LOG_ERROR(Render_Vulkan, "vkGetInstanceProcAddr not present in Vulkan");
123 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
124 }
125 if (!vk::Load(dld)) {
126 LOG_ERROR(Render_Vulkan, "Failed to load Vulkan function pointers");
127 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
128 }
129 const std::vector<const char*> extensions = RequiredExtensions(window_type, enable_debug_utils);
130 if (!AreExtensionsSupported(dld, extensions)) {
131 throw vk::Exception(VK_ERROR_EXTENSION_NOT_PRESENT);
132 }
133 std::vector<const char*> layers = Layers(enable_layers);
134 RemoveUnavailableLayers(dld, layers);
135
136 const u32 available_version = vk::AvailableVersion(dld);
137 if (available_version < required_version) {
138 LOG_ERROR(Render_Vulkan, "Vulkan {}.{} is not supported, {}.{} is required",
139 VK_VERSION_MAJOR(available_version), VK_VERSION_MINOR(available_version),
140 VK_VERSION_MAJOR(required_version), VK_VERSION_MINOR(required_version));
141 throw vk::Exception(VK_ERROR_INCOMPATIBLE_DRIVER);
142 }
143 vk::Instance instance = vk::Instance::Create(required_version, layers, extensions, dld);
144 if (!vk::Load(*instance, dld)) {
145 LOG_ERROR(Render_Vulkan, "Failed to load Vulkan instance function pointers");
146 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
147 }
148 return instance;
149}
150
151} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_instance.h b/src/video_core/vulkan_common/vulkan_instance.h
new file mode 100644
index 000000000..e5e3a7144
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_instance.h
@@ -0,0 +1,32 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#pragma once
6
7#include "common/common_types.h"
8#include "common/dynamic_library.h"
9#include "core/frontend/emu_window.h"
10#include "video_core/vulkan_common/vulkan_wrapper.h"
11
12namespace Vulkan {
13
14/**
15 * Create a Vulkan instance
16 *
17 * @param library Dynamic library to load the Vulkan instance from
18 * @param dld Dispatch table to load function pointers into
19 * @param required_version Required Vulkan version (for example, VK_API_VERSION_1_1)
20 * @param window_type Window system type's enabled extension
21 * @param enable_debug_utils Whether to enable VK_EXT_debug_utils_extension_name or not
22 * @param enable_layers Whether to enable Vulkan validation layers or not
23 *
24 * @return A new Vulkan instance
25 * @throw vk::Exception on failure
26 */
27[[nodiscard]] vk::Instance CreateInstance(
28 const Common::DynamicLibrary& library, vk::InstanceDispatch& dld, u32 required_version,
29 Core::Frontend::WindowSystemType window_type = Core::Frontend::WindowSystemType::Headless,
30 bool enable_debug_utils = false, bool enable_layers = false);
31
32} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_library.cpp b/src/video_core/vulkan_common/vulkan_library.cpp
new file mode 100644
index 000000000..27c958221
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_library.cpp
@@ -0,0 +1,36 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include <cstdlib>
6#include <string>
7
8#include "common/dynamic_library.h"
9#include "common/file_util.h"
10#include "video_core/vulkan_common/vulkan_library.h"
11
12namespace Vulkan {
13
14Common::DynamicLibrary OpenLibrary() {
15 Common::DynamicLibrary library;
16#ifdef __APPLE__
17 // Check if a path to a specific Vulkan library has been specified.
18 char* const libvulkan_env = std::getenv("LIBVULKAN_PATH");
19 if (!libvulkan_env || !library.Open(libvulkan_env)) {
20 // Use the libvulkan.dylib from the application bundle.
21 const std::string filename =
22 Common::FS::GetBundleDirectory() + "/Contents/Frameworks/libvulkan.dylib";
23 library.Open(filename.c_str());
24 }
25#else
26 std::string filename = Common::DynamicLibrary::GetVersionedFilename("vulkan", 1);
27 if (!library.Open(filename.c_str())) {
28 // Android devices may not have libvulkan.so.1, only libvulkan.so.
29 filename = Common::DynamicLibrary::GetVersionedFilename("vulkan");
30 void(library.Open(filename.c_str()));
31 }
32#endif
33 return library;
34}
35
36} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_library.h b/src/video_core/vulkan_common/vulkan_library.h
new file mode 100644
index 000000000..8b28b0e17
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_library.h
@@ -0,0 +1,13 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#pragma once
6
7#include "common/dynamic_library.h"
8
9namespace Vulkan {
10
11Common::DynamicLibrary OpenLibrary();
12
13} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_surface.cpp b/src/video_core/vulkan_common/vulkan_surface.cpp
new file mode 100644
index 000000000..3c3238f96
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_surface.cpp
@@ -0,0 +1,81 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include "common/logging/log.h"
6#include "core/frontend/emu_window.h"
7#include "video_core/vulkan_common/vulkan_surface.h"
8#include "video_core/vulkan_common/vulkan_wrapper.h"
9
10// Include these late to avoid polluting previous headers
11#ifdef _WIN32
12#include <windows.h>
13// ensure include order
14#include <vulkan/vulkan_win32.h>
15#endif
16
17#if !defined(_WIN32) && !defined(__APPLE__)
18#include <X11/Xlib.h>
19#include <vulkan/vulkan_wayland.h>
20#include <vulkan/vulkan_xlib.h>
21#endif
22
23namespace Vulkan {
24
25vk::SurfaceKHR CreateSurface(const vk::Instance& instance,
26 const Core::Frontend::EmuWindow& emu_window) {
27 [[maybe_unused]] const vk::InstanceDispatch& dld = instance.Dispatch();
28 [[maybe_unused]] const auto& window_info = emu_window.GetWindowInfo();
29 VkSurfaceKHR unsafe_surface = nullptr;
30
31#ifdef _WIN32
32 if (window_info.type == Core::Frontend::WindowSystemType::Windows) {
33 const HWND hWnd = static_cast<HWND>(window_info.render_surface);
34 const VkWin32SurfaceCreateInfoKHR win32_ci{VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
35 nullptr, 0, nullptr, hWnd};
36 const auto vkCreateWin32SurfaceKHR = reinterpret_cast<PFN_vkCreateWin32SurfaceKHR>(
37 dld.vkGetInstanceProcAddr(*instance, "vkCreateWin32SurfaceKHR"));
38 if (!vkCreateWin32SurfaceKHR ||
39 vkCreateWin32SurfaceKHR(*instance, &win32_ci, nullptr, &unsafe_surface) != VK_SUCCESS) {
40 LOG_ERROR(Render_Vulkan, "Failed to initialize Win32 surface");
41 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
42 }
43 }
44#endif
45#if !defined(_WIN32) && !defined(__APPLE__)
46 if (window_info.type == Core::Frontend::WindowSystemType::X11) {
47 const VkXlibSurfaceCreateInfoKHR xlib_ci{
48 VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, nullptr, 0,
49 static_cast<Display*>(window_info.display_connection),
50 reinterpret_cast<Window>(window_info.render_surface)};
51 const auto vkCreateXlibSurfaceKHR = reinterpret_cast<PFN_vkCreateXlibSurfaceKHR>(
52 dld.vkGetInstanceProcAddr(*instance, "vkCreateXlibSurfaceKHR"));
53 if (!vkCreateXlibSurfaceKHR ||
54 vkCreateXlibSurfaceKHR(*instance, &xlib_ci, nullptr, &unsafe_surface) != VK_SUCCESS) {
55 LOG_ERROR(Render_Vulkan, "Failed to initialize Xlib surface");
56 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
57 }
58 }
59 if (window_info.type == Core::Frontend::WindowSystemType::Wayland) {
60 const VkWaylandSurfaceCreateInfoKHR wayland_ci{
61 VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, nullptr, 0,
62 static_cast<wl_display*>(window_info.display_connection),
63 static_cast<wl_surface*>(window_info.render_surface)};
64 const auto vkCreateWaylandSurfaceKHR = reinterpret_cast<PFN_vkCreateWaylandSurfaceKHR>(
65 dld.vkGetInstanceProcAddr(*instance, "vkCreateWaylandSurfaceKHR"));
66 if (!vkCreateWaylandSurfaceKHR ||
67 vkCreateWaylandSurfaceKHR(*instance, &wayland_ci, nullptr, &unsafe_surface) !=
68 VK_SUCCESS) {
69 LOG_ERROR(Render_Vulkan, "Failed to initialize Wayland surface");
70 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
71 }
72 }
73#endif
74 if (!unsafe_surface) {
75 LOG_ERROR(Render_Vulkan, "Presentation not supported on this platform");
76 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
77 }
78 return vk::SurfaceKHR(unsafe_surface, *instance, dld);
79}
80
81} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_surface.h b/src/video_core/vulkan_common/vulkan_surface.h
new file mode 100644
index 000000000..05a169e32
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_surface.h
@@ -0,0 +1,18 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#pragma once
6
7#include "video_core/vulkan_common/vulkan_wrapper.h"
8
9namespace Core::Frontend {
10class EmuWindow;
11}
12
13namespace Vulkan {
14
15[[nodiscard]] vk::SurfaceKHR CreateSurface(const vk::Instance& instance,
16 const Core::Frontend::EmuWindow& emu_window);
17
18} // namespace Vulkan
diff --git a/src/video_core/vulkan_common/vulkan_wrapper.cpp b/src/video_core/vulkan_common/vulkan_wrapper.cpp
new file mode 100644
index 000000000..5e15ad607
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_wrapper.cpp
@@ -0,0 +1,900 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#include <algorithm>
6#include <exception>
7#include <memory>
8#include <optional>
9#include <string_view>
10#include <utility>
11#include <vector>
12
13#include "common/common_types.h"
14#include "common/logging/log.h"
15
16#include "video_core/vulkan_common/vulkan_wrapper.h"
17
18namespace Vulkan::vk {
19
20namespace {
21
22template <typename Func>
23void SortPhysicalDevices(std::vector<VkPhysicalDevice>& devices, const InstanceDispatch& dld,
24 Func&& func) {
25 // Calling GetProperties calls Vulkan more than needed. But they are supposed to be cheap
26 // functions.
27 std::stable_sort(devices.begin(), devices.end(),
28 [&dld, &func](VkPhysicalDevice lhs, VkPhysicalDevice rhs) {
29 return func(vk::PhysicalDevice(lhs, dld).GetProperties(),
30 vk::PhysicalDevice(rhs, dld).GetProperties());
31 });
32}
33
34void SortPhysicalDevicesPerVendor(std::vector<VkPhysicalDevice>& devices,
35 const InstanceDispatch& dld,
36 std::initializer_list<u32> vendor_ids) {
37 for (auto it = vendor_ids.end(); it != vendor_ids.begin();) {
38 --it;
39 SortPhysicalDevices(devices, dld, [id = *it](const auto& lhs, const auto& rhs) {
40 return lhs.vendorID == id && rhs.vendorID != id;
41 });
42 }
43}
44
45void SortPhysicalDevices(std::vector<VkPhysicalDevice>& devices, const InstanceDispatch& dld) {
46 // Sort by name, this will set a base and make GPUs with higher numbers appear first
47 // (e.g. GTX 1650 will intentionally be listed before a GTX 1080).
48 SortPhysicalDevices(devices, dld, [](const auto& lhs, const auto& rhs) {
49 return std::string_view{lhs.deviceName} > std::string_view{rhs.deviceName};
50 });
51 // Prefer discrete over non-discrete
52 SortPhysicalDevices(devices, dld, [](const auto& lhs, const auto& rhs) {
53 return lhs.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU &&
54 rhs.deviceType != VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU;
55 });
56 // Prefer Nvidia over AMD, AMD over Intel, Intel over the rest.
57 SortPhysicalDevicesPerVendor(devices, dld, {0x10DE, 0x1002, 0x8086});
58}
59
60template <typename T>
61bool Proc(T& result, const InstanceDispatch& dld, const char* proc_name,
62 VkInstance instance = nullptr) noexcept {
63 result = reinterpret_cast<T>(dld.vkGetInstanceProcAddr(instance, proc_name));
64 return result != nullptr;
65}
66
67template <typename T>
68void Proc(T& result, const DeviceDispatch& dld, const char* proc_name, VkDevice device) noexcept {
69 result = reinterpret_cast<T>(dld.vkGetDeviceProcAddr(device, proc_name));
70}
71
72void Load(VkDevice device, DeviceDispatch& dld) noexcept {
73#define X(name) Proc(dld.name, dld, #name, device)
74 X(vkAcquireNextImageKHR);
75 X(vkAllocateCommandBuffers);
76 X(vkAllocateDescriptorSets);
77 X(vkAllocateMemory);
78 X(vkBeginCommandBuffer);
79 X(vkBindBufferMemory);
80 X(vkBindImageMemory);
81 X(vkCmdBeginQuery);
82 X(vkCmdBeginRenderPass);
83 X(vkCmdBeginTransformFeedbackEXT);
84 X(vkCmdBeginDebugUtilsLabelEXT);
85 X(vkCmdBindDescriptorSets);
86 X(vkCmdBindIndexBuffer);
87 X(vkCmdBindPipeline);
88 X(vkCmdBindTransformFeedbackBuffersEXT);
89 X(vkCmdBindVertexBuffers);
90 X(vkCmdBlitImage);
91 X(vkCmdClearAttachments);
92 X(vkCmdCopyBuffer);
93 X(vkCmdCopyBufferToImage);
94 X(vkCmdCopyImage);
95 X(vkCmdCopyImageToBuffer);
96 X(vkCmdDispatch);
97 X(vkCmdDraw);
98 X(vkCmdDrawIndexed);
99 X(vkCmdEndQuery);
100 X(vkCmdEndRenderPass);
101 X(vkCmdEndTransformFeedbackEXT);
102 X(vkCmdEndDebugUtilsLabelEXT);
103 X(vkCmdFillBuffer);
104 X(vkCmdPipelineBarrier);
105 X(vkCmdPushConstants);
106 X(vkCmdSetBlendConstants);
107 X(vkCmdSetDepthBias);
108 X(vkCmdSetDepthBounds);
109 X(vkCmdSetEvent);
110 X(vkCmdSetScissor);
111 X(vkCmdSetStencilCompareMask);
112 X(vkCmdSetStencilReference);
113 X(vkCmdSetStencilWriteMask);
114 X(vkCmdSetViewport);
115 X(vkCmdWaitEvents);
116 X(vkCmdBindVertexBuffers2EXT);
117 X(vkCmdSetCullModeEXT);
118 X(vkCmdSetDepthBoundsTestEnableEXT);
119 X(vkCmdSetDepthCompareOpEXT);
120 X(vkCmdSetDepthTestEnableEXT);
121 X(vkCmdSetDepthWriteEnableEXT);
122 X(vkCmdSetFrontFaceEXT);
123 X(vkCmdSetPrimitiveTopologyEXT);
124 X(vkCmdSetStencilOpEXT);
125 X(vkCmdSetStencilTestEnableEXT);
126 X(vkCmdResolveImage);
127 X(vkCreateBuffer);
128 X(vkCreateBufferView);
129 X(vkCreateCommandPool);
130 X(vkCreateComputePipelines);
131 X(vkCreateDescriptorPool);
132 X(vkCreateDescriptorSetLayout);
133 X(vkCreateDescriptorUpdateTemplateKHR);
134 X(vkCreateEvent);
135 X(vkCreateFence);
136 X(vkCreateFramebuffer);
137 X(vkCreateGraphicsPipelines);
138 X(vkCreateImage);
139 X(vkCreateImageView);
140 X(vkCreatePipelineLayout);
141 X(vkCreateQueryPool);
142 X(vkCreateRenderPass);
143 X(vkCreateSampler);
144 X(vkCreateSemaphore);
145 X(vkCreateShaderModule);
146 X(vkCreateSwapchainKHR);
147 X(vkDestroyBuffer);
148 X(vkDestroyBufferView);
149 X(vkDestroyCommandPool);
150 X(vkDestroyDescriptorPool);
151 X(vkDestroyDescriptorSetLayout);
152 X(vkDestroyDescriptorUpdateTemplateKHR);
153 X(vkDestroyEvent);
154 X(vkDestroyFence);
155 X(vkDestroyFramebuffer);
156 X(vkDestroyImage);
157 X(vkDestroyImageView);
158 X(vkDestroyPipeline);
159 X(vkDestroyPipelineLayout);
160 X(vkDestroyQueryPool);
161 X(vkDestroyRenderPass);
162 X(vkDestroySampler);
163 X(vkDestroySemaphore);
164 X(vkDestroyShaderModule);
165 X(vkDestroySwapchainKHR);
166 X(vkDeviceWaitIdle);
167 X(vkEndCommandBuffer);
168 X(vkFreeCommandBuffers);
169 X(vkFreeDescriptorSets);
170 X(vkFreeMemory);
171 X(vkGetBufferMemoryRequirements);
172 X(vkGetDeviceQueue);
173 X(vkGetEventStatus);
174 X(vkGetFenceStatus);
175 X(vkGetImageMemoryRequirements);
176 X(vkGetQueryPoolResults);
177 X(vkGetSemaphoreCounterValueKHR);
178 X(vkMapMemory);
179 X(vkQueueSubmit);
180 X(vkResetFences);
181 X(vkResetQueryPoolEXT);
182 X(vkSetDebugUtilsObjectNameEXT);
183 X(vkSetDebugUtilsObjectTagEXT);
184 X(vkUnmapMemory);
185 X(vkUpdateDescriptorSetWithTemplateKHR);
186 X(vkUpdateDescriptorSets);
187 X(vkWaitForFences);
188 X(vkWaitSemaphoresKHR);
189#undef X
190}
191
192template <typename T>
193void SetObjectName(const DeviceDispatch* dld, VkDevice device, T handle, VkObjectType type,
194 const char* name) {
195 const VkDebugUtilsObjectNameInfoEXT name_info{
196 .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT,
197 .pNext = nullptr,
198 .objectType = VK_OBJECT_TYPE_IMAGE,
199 .objectHandle = reinterpret_cast<u64>(handle),
200 .pObjectName = name,
201 };
202 Check(dld->vkSetDebugUtilsObjectNameEXT(device, &name_info));
203}
204
205} // Anonymous namespace
206
207bool Load(InstanceDispatch& dld) noexcept {
208#define X(name) Proc(dld.name, dld, #name)
209 return X(vkCreateInstance) && X(vkEnumerateInstanceExtensionProperties) &&
210 X(vkEnumerateInstanceLayerProperties);
211#undef X
212}
213
214bool Load(VkInstance instance, InstanceDispatch& dld) noexcept {
215#define X(name) Proc(dld.name, dld, #name, instance)
216 // These functions may fail to load depending on the enabled extensions.
217 // Don't return a failure on these.
218 X(vkCreateDebugUtilsMessengerEXT);
219 X(vkDestroyDebugUtilsMessengerEXT);
220 X(vkDestroySurfaceKHR);
221 X(vkGetPhysicalDeviceFeatures2KHR);
222 X(vkGetPhysicalDeviceProperties2KHR);
223 X(vkGetPhysicalDeviceSurfaceCapabilitiesKHR);
224 X(vkGetPhysicalDeviceSurfaceFormatsKHR);
225 X(vkGetPhysicalDeviceSurfacePresentModesKHR);
226 X(vkGetPhysicalDeviceSurfaceSupportKHR);
227 X(vkGetSwapchainImagesKHR);
228 X(vkQueuePresentKHR);
229
230 return X(vkCreateDevice) && X(vkDestroyDevice) && X(vkDestroyDevice) &&
231 X(vkEnumerateDeviceExtensionProperties) && X(vkEnumeratePhysicalDevices) &&
232 X(vkGetDeviceProcAddr) && X(vkGetPhysicalDeviceFormatProperties) &&
233 X(vkGetPhysicalDeviceMemoryProperties) && X(vkGetPhysicalDeviceProperties) &&
234 X(vkGetPhysicalDeviceQueueFamilyProperties);
235#undef X
236}
237
238const char* Exception::what() const noexcept {
239 return ToString(result);
240}
241
242const char* ToString(VkResult result) noexcept {
243 switch (result) {
244 case VkResult::VK_SUCCESS:
245 return "VK_SUCCESS";
246 case VkResult::VK_NOT_READY:
247 return "VK_NOT_READY";
248 case VkResult::VK_TIMEOUT:
249 return "VK_TIMEOUT";
250 case VkResult::VK_EVENT_SET:
251 return "VK_EVENT_SET";
252 case VkResult::VK_EVENT_RESET:
253 return "VK_EVENT_RESET";
254 case VkResult::VK_INCOMPLETE:
255 return "VK_INCOMPLETE";
256 case VkResult::VK_ERROR_OUT_OF_HOST_MEMORY:
257 return "VK_ERROR_OUT_OF_HOST_MEMORY";
258 case VkResult::VK_ERROR_OUT_OF_DEVICE_MEMORY:
259 return "VK_ERROR_OUT_OF_DEVICE_MEMORY";
260 case VkResult::VK_ERROR_INITIALIZATION_FAILED:
261 return "VK_ERROR_INITIALIZATION_FAILED";
262 case VkResult::VK_ERROR_DEVICE_LOST:
263 return "VK_ERROR_DEVICE_LOST";
264 case VkResult::VK_ERROR_MEMORY_MAP_FAILED:
265 return "VK_ERROR_MEMORY_MAP_FAILED";
266 case VkResult::VK_ERROR_LAYER_NOT_PRESENT:
267 return "VK_ERROR_LAYER_NOT_PRESENT";
268 case VkResult::VK_ERROR_EXTENSION_NOT_PRESENT:
269 return "VK_ERROR_EXTENSION_NOT_PRESENT";
270 case VkResult::VK_ERROR_FEATURE_NOT_PRESENT:
271 return "VK_ERROR_FEATURE_NOT_PRESENT";
272 case VkResult::VK_ERROR_INCOMPATIBLE_DRIVER:
273 return "VK_ERROR_INCOMPATIBLE_DRIVER";
274 case VkResult::VK_ERROR_TOO_MANY_OBJECTS:
275 return "VK_ERROR_TOO_MANY_OBJECTS";
276 case VkResult::VK_ERROR_FORMAT_NOT_SUPPORTED:
277 return "VK_ERROR_FORMAT_NOT_SUPPORTED";
278 case VkResult::VK_ERROR_FRAGMENTED_POOL:
279 return "VK_ERROR_FRAGMENTED_POOL";
280 case VkResult::VK_ERROR_OUT_OF_POOL_MEMORY:
281 return "VK_ERROR_OUT_OF_POOL_MEMORY";
282 case VkResult::VK_ERROR_INVALID_EXTERNAL_HANDLE:
283 return "VK_ERROR_INVALID_EXTERNAL_HANDLE";
284 case VkResult::VK_ERROR_SURFACE_LOST_KHR:
285 return "VK_ERROR_SURFACE_LOST_KHR";
286 case VkResult::VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
287 return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR";
288 case VkResult::VK_SUBOPTIMAL_KHR:
289 return "VK_SUBOPTIMAL_KHR";
290 case VkResult::VK_ERROR_OUT_OF_DATE_KHR:
291 return "VK_ERROR_OUT_OF_DATE_KHR";
292 case VkResult::VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
293 return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR";
294 case VkResult::VK_ERROR_VALIDATION_FAILED_EXT:
295 return "VK_ERROR_VALIDATION_FAILED_EXT";
296 case VkResult::VK_ERROR_INVALID_SHADER_NV:
297 return "VK_ERROR_INVALID_SHADER_NV";
298 case VkResult::VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT:
299 return "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT";
300 case VkResult::VK_ERROR_FRAGMENTATION_EXT:
301 return "VK_ERROR_FRAGMENTATION_EXT";
302 case VkResult::VK_ERROR_NOT_PERMITTED_EXT:
303 return "VK_ERROR_NOT_PERMITTED_EXT";
304 case VkResult::VK_ERROR_INVALID_DEVICE_ADDRESS_EXT:
305 return "VK_ERROR_INVALID_DEVICE_ADDRESS_EXT";
306 case VkResult::VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT:
307 return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT";
308 case VkResult::VK_ERROR_UNKNOWN:
309 return "VK_ERROR_UNKNOWN";
310 case VkResult::VK_ERROR_INCOMPATIBLE_VERSION_KHR:
311 return "VK_ERROR_INCOMPATIBLE_VERSION_KHR";
312 case VkResult::VK_THREAD_IDLE_KHR:
313 return "VK_THREAD_IDLE_KHR";
314 case VkResult::VK_THREAD_DONE_KHR:
315 return "VK_THREAD_DONE_KHR";
316 case VkResult::VK_OPERATION_DEFERRED_KHR:
317 return "VK_OPERATION_DEFERRED_KHR";
318 case VkResult::VK_OPERATION_NOT_DEFERRED_KHR:
319 return "VK_OPERATION_NOT_DEFERRED_KHR";
320 case VkResult::VK_PIPELINE_COMPILE_REQUIRED_EXT:
321 return "VK_PIPELINE_COMPILE_REQUIRED_EXT";
322 case VkResult::VK_RESULT_MAX_ENUM:
323 return "VK_RESULT_MAX_ENUM";
324 }
325 return "Unknown";
326}
327
328void Destroy(VkInstance instance, const InstanceDispatch& dld) noexcept {
329 dld.vkDestroyInstance(instance, nullptr);
330}
331
332void Destroy(VkDevice device, const InstanceDispatch& dld) noexcept {
333 dld.vkDestroyDevice(device, nullptr);
334}
335
336void Destroy(VkDevice device, VkBuffer handle, const DeviceDispatch& dld) noexcept {
337 dld.vkDestroyBuffer(device, handle, nullptr);
338}
339
340void Destroy(VkDevice device, VkBufferView handle, const DeviceDispatch& dld) noexcept {
341 dld.vkDestroyBufferView(device, handle, nullptr);
342}
343
344void Destroy(VkDevice device, VkCommandPool handle, const DeviceDispatch& dld) noexcept {
345 dld.vkDestroyCommandPool(device, handle, nullptr);
346}
347
348void Destroy(VkDevice device, VkDescriptorPool handle, const DeviceDispatch& dld) noexcept {
349 dld.vkDestroyDescriptorPool(device, handle, nullptr);
350}
351
352void Destroy(VkDevice device, VkDescriptorSetLayout handle, const DeviceDispatch& dld) noexcept {
353 dld.vkDestroyDescriptorSetLayout(device, handle, nullptr);
354}
355
356void Destroy(VkDevice device, VkDescriptorUpdateTemplateKHR handle,
357 const DeviceDispatch& dld) noexcept {
358 dld.vkDestroyDescriptorUpdateTemplateKHR(device, handle, nullptr);
359}
360
361void Destroy(VkDevice device, VkDeviceMemory handle, const DeviceDispatch& dld) noexcept {
362 dld.vkFreeMemory(device, handle, nullptr);
363}
364
365void Destroy(VkDevice device, VkEvent handle, const DeviceDispatch& dld) noexcept {
366 dld.vkDestroyEvent(device, handle, nullptr);
367}
368
369void Destroy(VkDevice device, VkFence handle, const DeviceDispatch& dld) noexcept {
370 dld.vkDestroyFence(device, handle, nullptr);
371}
372
373void Destroy(VkDevice device, VkFramebuffer handle, const DeviceDispatch& dld) noexcept {
374 dld.vkDestroyFramebuffer(device, handle, nullptr);
375}
376
377void Destroy(VkDevice device, VkImage handle, const DeviceDispatch& dld) noexcept {
378 dld.vkDestroyImage(device, handle, nullptr);
379}
380
381void Destroy(VkDevice device, VkImageView handle, const DeviceDispatch& dld) noexcept {
382 dld.vkDestroyImageView(device, handle, nullptr);
383}
384
385void Destroy(VkDevice device, VkPipeline handle, const DeviceDispatch& dld) noexcept {
386 dld.vkDestroyPipeline(device, handle, nullptr);
387}
388
389void Destroy(VkDevice device, VkPipelineLayout handle, const DeviceDispatch& dld) noexcept {
390 dld.vkDestroyPipelineLayout(device, handle, nullptr);
391}
392
393void Destroy(VkDevice device, VkQueryPool handle, const DeviceDispatch& dld) noexcept {
394 dld.vkDestroyQueryPool(device, handle, nullptr);
395}
396
397void Destroy(VkDevice device, VkRenderPass handle, const DeviceDispatch& dld) noexcept {
398 dld.vkDestroyRenderPass(device, handle, nullptr);
399}
400
401void Destroy(VkDevice device, VkSampler handle, const DeviceDispatch& dld) noexcept {
402 dld.vkDestroySampler(device, handle, nullptr);
403}
404
405void Destroy(VkDevice device, VkSwapchainKHR handle, const DeviceDispatch& dld) noexcept {
406 dld.vkDestroySwapchainKHR(device, handle, nullptr);
407}
408
409void Destroy(VkDevice device, VkSemaphore handle, const DeviceDispatch& dld) noexcept {
410 dld.vkDestroySemaphore(device, handle, nullptr);
411}
412
413void Destroy(VkDevice device, VkShaderModule handle, const DeviceDispatch& dld) noexcept {
414 dld.vkDestroyShaderModule(device, handle, nullptr);
415}
416
417void Destroy(VkInstance instance, VkDebugUtilsMessengerEXT handle,
418 const InstanceDispatch& dld) noexcept {
419 dld.vkDestroyDebugUtilsMessengerEXT(instance, handle, nullptr);
420}
421
422void Destroy(VkInstance instance, VkSurfaceKHR handle, const InstanceDispatch& dld) noexcept {
423 dld.vkDestroySurfaceKHR(instance, handle, nullptr);
424}
425
426VkResult Free(VkDevice device, VkDescriptorPool handle, Span<VkDescriptorSet> sets,
427 const DeviceDispatch& dld) noexcept {
428 return dld.vkFreeDescriptorSets(device, handle, sets.size(), sets.data());
429}
430
431VkResult Free(VkDevice device, VkCommandPool handle, Span<VkCommandBuffer> buffers,
432 const DeviceDispatch& dld) noexcept {
433 dld.vkFreeCommandBuffers(device, handle, buffers.size(), buffers.data());
434 return VK_SUCCESS;
435}
436
437Instance Instance::Create(u32 version, Span<const char*> layers, Span<const char*> extensions,
438 InstanceDispatch& dispatch) {
439 const VkApplicationInfo application_info{
440 .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
441 .pNext = nullptr,
442 .pApplicationName = "yuzu Emulator",
443 .applicationVersion = VK_MAKE_VERSION(0, 1, 0),
444 .pEngineName = "yuzu Emulator",
445 .engineVersion = VK_MAKE_VERSION(0, 1, 0),
446 .apiVersion = version,
447 };
448 const VkInstanceCreateInfo ci{
449 .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
450 .pNext = nullptr,
451 .flags = 0,
452 .pApplicationInfo = &application_info,
453 .enabledLayerCount = layers.size(),
454 .ppEnabledLayerNames = layers.data(),
455 .enabledExtensionCount = extensions.size(),
456 .ppEnabledExtensionNames = extensions.data(),
457 };
458 VkInstance instance;
459 Check(dispatch.vkCreateInstance(&ci, nullptr, &instance));
460 if (!Proc(dispatch.vkDestroyInstance, dispatch, "vkDestroyInstance", instance)) {
461 // We successfully created an instance but the destroy function couldn't be loaded.
462 // This is a good moment to panic.
463 throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED);
464 }
465 return Instance(instance, dispatch);
466}
467
468std::vector<VkPhysicalDevice> Instance::EnumeratePhysicalDevices() const {
469 u32 num;
470 Check(dld->vkEnumeratePhysicalDevices(handle, &num, nullptr));
471 std::vector<VkPhysicalDevice> physical_devices(num);
472 Check(dld->vkEnumeratePhysicalDevices(handle, &num, physical_devices.data()));
473 SortPhysicalDevices(physical_devices, *dld);
474 return physical_devices;
475}
476
477DebugUtilsMessenger Instance::CreateDebugUtilsMessenger(
478 const VkDebugUtilsMessengerCreateInfoEXT& create_info) const {
479 VkDebugUtilsMessengerEXT object;
480 Check(dld->vkCreateDebugUtilsMessengerEXT(handle, &create_info, nullptr, &object));
481 return DebugUtilsMessenger(object, handle, *dld);
482}
483
484void Buffer::BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const {
485 Check(dld->vkBindBufferMemory(owner, handle, memory, offset));
486}
487
488void Buffer::SetObjectNameEXT(const char* name) const {
489 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_BUFFER, name);
490}
491
492void BufferView::SetObjectNameEXT(const char* name) const {
493 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_BUFFER_VIEW, name);
494}
495
496void Image::BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const {
497 Check(dld->vkBindImageMemory(owner, handle, memory, offset));
498}
499
500void Image::SetObjectNameEXT(const char* name) const {
501 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_IMAGE, name);
502}
503
504void ImageView::SetObjectNameEXT(const char* name) const {
505 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_IMAGE_VIEW, name);
506}
507
508void DeviceMemory::SetObjectNameEXT(const char* name) const {
509 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_DEVICE_MEMORY, name);
510}
511
512void Fence::SetObjectNameEXT(const char* name) const {
513 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_FENCE, name);
514}
515
516void Framebuffer::SetObjectNameEXT(const char* name) const {
517 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_FRAMEBUFFER, name);
518}
519
520DescriptorSets DescriptorPool::Allocate(const VkDescriptorSetAllocateInfo& ai) const {
521 const std::size_t num = ai.descriptorSetCount;
522 std::unique_ptr sets = std::make_unique<VkDescriptorSet[]>(num);
523 switch (const VkResult result = dld->vkAllocateDescriptorSets(owner, &ai, sets.get())) {
524 case VK_SUCCESS:
525 return DescriptorSets(std::move(sets), num, owner, handle, *dld);
526 case VK_ERROR_OUT_OF_POOL_MEMORY:
527 return {};
528 default:
529 throw Exception(result);
530 }
531}
532
533void DescriptorPool::SetObjectNameEXT(const char* name) const {
534 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_DESCRIPTOR_POOL, name);
535}
536
537CommandBuffers CommandPool::Allocate(std::size_t num_buffers, VkCommandBufferLevel level) const {
538 const VkCommandBufferAllocateInfo ai{
539 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
540 .pNext = nullptr,
541 .commandPool = handle,
542 .level = level,
543 .commandBufferCount = static_cast<u32>(num_buffers),
544 };
545
546 std::unique_ptr buffers = std::make_unique<VkCommandBuffer[]>(num_buffers);
547 switch (const VkResult result = dld->vkAllocateCommandBuffers(owner, &ai, buffers.get())) {
548 case VK_SUCCESS:
549 return CommandBuffers(std::move(buffers), num_buffers, owner, handle, *dld);
550 case VK_ERROR_OUT_OF_POOL_MEMORY:
551 return {};
552 default:
553 throw Exception(result);
554 }
555}
556
557void CommandPool::SetObjectNameEXT(const char* name) const {
558 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_COMMAND_POOL, name);
559}
560
561std::vector<VkImage> SwapchainKHR::GetImages() const {
562 u32 num;
563 Check(dld->vkGetSwapchainImagesKHR(owner, handle, &num, nullptr));
564 std::vector<VkImage> images(num);
565 Check(dld->vkGetSwapchainImagesKHR(owner, handle, &num, images.data()));
566 return images;
567}
568
569void Event::SetObjectNameEXT(const char* name) const {
570 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_EVENT, name);
571}
572
573void ShaderModule::SetObjectNameEXT(const char* name) const {
574 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_SHADER_MODULE, name);
575}
576
577void Semaphore::SetObjectNameEXT(const char* name) const {
578 SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_SEMAPHORE, name);
579}
580
581Device Device::Create(VkPhysicalDevice physical_device, Span<VkDeviceQueueCreateInfo> queues_ci,
582 Span<const char*> enabled_extensions, const void* next,
583 DeviceDispatch& dispatch) {
584 const VkDeviceCreateInfo ci{
585 .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
586 .pNext = next,
587 .flags = 0,
588 .queueCreateInfoCount = queues_ci.size(),
589 .pQueueCreateInfos = queues_ci.data(),
590 .enabledLayerCount = 0,
591 .ppEnabledLayerNames = nullptr,
592 .enabledExtensionCount = enabled_extensions.size(),
593 .ppEnabledExtensionNames = enabled_extensions.data(),
594 .pEnabledFeatures = nullptr,
595 };
596 VkDevice device;
597 Check(dispatch.vkCreateDevice(physical_device, &ci, nullptr, &device));
598 Load(device, dispatch);
599 return Device(device, dispatch);
600}
601
602Queue Device::GetQueue(u32 family_index) const noexcept {
603 VkQueue queue;
604 dld->vkGetDeviceQueue(handle, family_index, 0, &queue);
605 return Queue(queue, *dld);
606}
607
608Buffer Device::CreateBuffer(const VkBufferCreateInfo& ci) const {
609 VkBuffer object;
610 Check(dld->vkCreateBuffer(handle, &ci, nullptr, &object));
611 return Buffer(object, handle, *dld);
612}
613
614BufferView Device::CreateBufferView(const VkBufferViewCreateInfo& ci) const {
615 VkBufferView object;
616 Check(dld->vkCreateBufferView(handle, &ci, nullptr, &object));
617 return BufferView(object, handle, *dld);
618}
619
620Image Device::CreateImage(const VkImageCreateInfo& ci) const {
621 VkImage object;
622 Check(dld->vkCreateImage(handle, &ci, nullptr, &object));
623 return Image(object, handle, *dld);
624}
625
626ImageView Device::CreateImageView(const VkImageViewCreateInfo& ci) const {
627 VkImageView object;
628 Check(dld->vkCreateImageView(handle, &ci, nullptr, &object));
629 return ImageView(object, handle, *dld);
630}
631
632Semaphore Device::CreateSemaphore() const {
633 static constexpr VkSemaphoreCreateInfo ci{
634 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
635 .pNext = nullptr,
636 .flags = 0,
637 };
638 return CreateSemaphore(ci);
639}
640
641Semaphore Device::CreateSemaphore(const VkSemaphoreCreateInfo& ci) const {
642 VkSemaphore object;
643 Check(dld->vkCreateSemaphore(handle, &ci, nullptr, &object));
644 return Semaphore(object, handle, *dld);
645}
646
647Fence Device::CreateFence(const VkFenceCreateInfo& ci) const {
648 VkFence object;
649 Check(dld->vkCreateFence(handle, &ci, nullptr, &object));
650 return Fence(object, handle, *dld);
651}
652
653DescriptorPool Device::CreateDescriptorPool(const VkDescriptorPoolCreateInfo& ci) const {
654 VkDescriptorPool object;
655 Check(dld->vkCreateDescriptorPool(handle, &ci, nullptr, &object));
656 return DescriptorPool(object, handle, *dld);
657}
658
659RenderPass Device::CreateRenderPass(const VkRenderPassCreateInfo& ci) const {
660 VkRenderPass object;
661 Check(dld->vkCreateRenderPass(handle, &ci, nullptr, &object));
662 return RenderPass(object, handle, *dld);
663}
664
665DescriptorSetLayout Device::CreateDescriptorSetLayout(
666 const VkDescriptorSetLayoutCreateInfo& ci) const {
667 VkDescriptorSetLayout object;
668 Check(dld->vkCreateDescriptorSetLayout(handle, &ci, nullptr, &object));
669 return DescriptorSetLayout(object, handle, *dld);
670}
671
672PipelineLayout Device::CreatePipelineLayout(const VkPipelineLayoutCreateInfo& ci) const {
673 VkPipelineLayout object;
674 Check(dld->vkCreatePipelineLayout(handle, &ci, nullptr, &object));
675 return PipelineLayout(object, handle, *dld);
676}
677
678Pipeline Device::CreateGraphicsPipeline(const VkGraphicsPipelineCreateInfo& ci) const {
679 VkPipeline object;
680 Check(dld->vkCreateGraphicsPipelines(handle, nullptr, 1, &ci, nullptr, &object));
681 return Pipeline(object, handle, *dld);
682}
683
684Pipeline Device::CreateComputePipeline(const VkComputePipelineCreateInfo& ci) const {
685 VkPipeline object;
686 Check(dld->vkCreateComputePipelines(handle, nullptr, 1, &ci, nullptr, &object));
687 return Pipeline(object, handle, *dld);
688}
689
690Sampler Device::CreateSampler(const VkSamplerCreateInfo& ci) const {
691 VkSampler object;
692 Check(dld->vkCreateSampler(handle, &ci, nullptr, &object));
693 return Sampler(object, handle, *dld);
694}
695
696Framebuffer Device::CreateFramebuffer(const VkFramebufferCreateInfo& ci) const {
697 VkFramebuffer object;
698 Check(dld->vkCreateFramebuffer(handle, &ci, nullptr, &object));
699 return Framebuffer(object, handle, *dld);
700}
701
702CommandPool Device::CreateCommandPool(const VkCommandPoolCreateInfo& ci) const {
703 VkCommandPool object;
704 Check(dld->vkCreateCommandPool(handle, &ci, nullptr, &object));
705 return CommandPool(object, handle, *dld);
706}
707
708DescriptorUpdateTemplateKHR Device::CreateDescriptorUpdateTemplateKHR(
709 const VkDescriptorUpdateTemplateCreateInfoKHR& ci) const {
710 VkDescriptorUpdateTemplateKHR object;
711 Check(dld->vkCreateDescriptorUpdateTemplateKHR(handle, &ci, nullptr, &object));
712 return DescriptorUpdateTemplateKHR(object, handle, *dld);
713}
714
715QueryPool Device::CreateQueryPool(const VkQueryPoolCreateInfo& ci) const {
716 VkQueryPool object;
717 Check(dld->vkCreateQueryPool(handle, &ci, nullptr, &object));
718 return QueryPool(object, handle, *dld);
719}
720
721ShaderModule Device::CreateShaderModule(const VkShaderModuleCreateInfo& ci) const {
722 VkShaderModule object;
723 Check(dld->vkCreateShaderModule(handle, &ci, nullptr, &object));
724 return ShaderModule(object, handle, *dld);
725}
726
727Event Device::CreateEvent() const {
728 static constexpr VkEventCreateInfo ci{
729 .sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
730 .pNext = nullptr,
731 .flags = 0,
732 };
733
734 VkEvent object;
735 Check(dld->vkCreateEvent(handle, &ci, nullptr, &object));
736 return Event(object, handle, *dld);
737}
738
739SwapchainKHR Device::CreateSwapchainKHR(const VkSwapchainCreateInfoKHR& ci) const {
740 VkSwapchainKHR object;
741 Check(dld->vkCreateSwapchainKHR(handle, &ci, nullptr, &object));
742 return SwapchainKHR(object, handle, *dld);
743}
744
745DeviceMemory Device::TryAllocateMemory(const VkMemoryAllocateInfo& ai) const noexcept {
746 VkDeviceMemory memory;
747 if (dld->vkAllocateMemory(handle, &ai, nullptr, &memory) != VK_SUCCESS) {
748 return {};
749 }
750 return DeviceMemory(memory, handle, *dld);
751}
752
753DeviceMemory Device::AllocateMemory(const VkMemoryAllocateInfo& ai) const {
754 VkDeviceMemory memory;
755 Check(dld->vkAllocateMemory(handle, &ai, nullptr, &memory));
756 return DeviceMemory(memory, handle, *dld);
757}
758
759VkMemoryRequirements Device::GetBufferMemoryRequirements(VkBuffer buffer) const noexcept {
760 VkMemoryRequirements requirements;
761 dld->vkGetBufferMemoryRequirements(handle, buffer, &requirements);
762 return requirements;
763}
764
765VkMemoryRequirements Device::GetImageMemoryRequirements(VkImage image) const noexcept {
766 VkMemoryRequirements requirements;
767 dld->vkGetImageMemoryRequirements(handle, image, &requirements);
768 return requirements;
769}
770
771void Device::UpdateDescriptorSets(Span<VkWriteDescriptorSet> writes,
772 Span<VkCopyDescriptorSet> copies) const noexcept {
773 dld->vkUpdateDescriptorSets(handle, writes.size(), writes.data(), copies.size(), copies.data());
774}
775
776VkPhysicalDeviceProperties PhysicalDevice::GetProperties() const noexcept {
777 VkPhysicalDeviceProperties properties;
778 dld->vkGetPhysicalDeviceProperties(physical_device, &properties);
779 return properties;
780}
781
782void PhysicalDevice::GetProperties2KHR(VkPhysicalDeviceProperties2KHR& properties) const noexcept {
783 dld->vkGetPhysicalDeviceProperties2KHR(physical_device, &properties);
784}
785
786VkPhysicalDeviceFeatures PhysicalDevice::GetFeatures() const noexcept {
787 VkPhysicalDeviceFeatures2KHR features2;
788 features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR;
789 features2.pNext = nullptr;
790 dld->vkGetPhysicalDeviceFeatures2KHR(physical_device, &features2);
791 return features2.features;
792}
793
794void PhysicalDevice::GetFeatures2KHR(VkPhysicalDeviceFeatures2KHR& features) const noexcept {
795 dld->vkGetPhysicalDeviceFeatures2KHR(physical_device, &features);
796}
797
798VkFormatProperties PhysicalDevice::GetFormatProperties(VkFormat format) const noexcept {
799 VkFormatProperties properties;
800 dld->vkGetPhysicalDeviceFormatProperties(physical_device, format, &properties);
801 return properties;
802}
803
804std::vector<VkExtensionProperties> PhysicalDevice::EnumerateDeviceExtensionProperties() const {
805 u32 num;
806 dld->vkEnumerateDeviceExtensionProperties(physical_device, nullptr, &num, nullptr);
807 std::vector<VkExtensionProperties> properties(num);
808 dld->vkEnumerateDeviceExtensionProperties(physical_device, nullptr, &num, properties.data());
809 return properties;
810}
811
812std::vector<VkQueueFamilyProperties> PhysicalDevice::GetQueueFamilyProperties() const {
813 u32 num;
814 dld->vkGetPhysicalDeviceQueueFamilyProperties(physical_device, &num, nullptr);
815 std::vector<VkQueueFamilyProperties> properties(num);
816 dld->vkGetPhysicalDeviceQueueFamilyProperties(physical_device, &num, properties.data());
817 return properties;
818}
819
820bool PhysicalDevice::GetSurfaceSupportKHR(u32 queue_family_index, VkSurfaceKHR surface) const {
821 VkBool32 supported;
822 Check(dld->vkGetPhysicalDeviceSurfaceSupportKHR(physical_device, queue_family_index, surface,
823 &supported));
824 return supported == VK_TRUE;
825}
826
827VkSurfaceCapabilitiesKHR PhysicalDevice::GetSurfaceCapabilitiesKHR(VkSurfaceKHR surface) const {
828 VkSurfaceCapabilitiesKHR capabilities;
829 Check(dld->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_device, surface, &capabilities));
830 return capabilities;
831}
832
833std::vector<VkSurfaceFormatKHR> PhysicalDevice::GetSurfaceFormatsKHR(VkSurfaceKHR surface) const {
834 u32 num;
835 Check(dld->vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device, surface, &num, nullptr));
836 std::vector<VkSurfaceFormatKHR> formats(num);
837 Check(
838 dld->vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device, surface, &num, formats.data()));
839 return formats;
840}
841
842std::vector<VkPresentModeKHR> PhysicalDevice::GetSurfacePresentModesKHR(
843 VkSurfaceKHR surface) const {
844 u32 num;
845 Check(dld->vkGetPhysicalDeviceSurfacePresentModesKHR(physical_device, surface, &num, nullptr));
846 std::vector<VkPresentModeKHR> modes(num);
847 Check(dld->vkGetPhysicalDeviceSurfacePresentModesKHR(physical_device, surface, &num,
848 modes.data()));
849 return modes;
850}
851
852VkPhysicalDeviceMemoryProperties PhysicalDevice::GetMemoryProperties() const noexcept {
853 VkPhysicalDeviceMemoryProperties properties;
854 dld->vkGetPhysicalDeviceMemoryProperties(physical_device, &properties);
855 return properties;
856}
857
858u32 AvailableVersion(const InstanceDispatch& dld) noexcept {
859 PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion;
860 if (!Proc(vkEnumerateInstanceVersion, dld, "vkEnumerateInstanceVersion")) {
861 // If the procedure is not found, Vulkan 1.0 is assumed
862 return VK_API_VERSION_1_0;
863 }
864 u32 version;
865 if (const VkResult result = vkEnumerateInstanceVersion(&version); result != VK_SUCCESS) {
866 LOG_ERROR(Render_Vulkan, "vkEnumerateInstanceVersion returned {}, assuming Vulkan 1.1",
867 ToString(result));
868 return VK_API_VERSION_1_1;
869 }
870 return version;
871}
872
873std::optional<std::vector<VkExtensionProperties>> EnumerateInstanceExtensionProperties(
874 const InstanceDispatch& dld) {
875 u32 num;
876 if (dld.vkEnumerateInstanceExtensionProperties(nullptr, &num, nullptr) != VK_SUCCESS) {
877 return std::nullopt;
878 }
879 std::vector<VkExtensionProperties> properties(num);
880 if (dld.vkEnumerateInstanceExtensionProperties(nullptr, &num, properties.data()) !=
881 VK_SUCCESS) {
882 return std::nullopt;
883 }
884 return properties;
885}
886
887std::optional<std::vector<VkLayerProperties>> EnumerateInstanceLayerProperties(
888 const InstanceDispatch& dld) {
889 u32 num;
890 if (dld.vkEnumerateInstanceLayerProperties(&num, nullptr) != VK_SUCCESS) {
891 return std::nullopt;
892 }
893 std::vector<VkLayerProperties> properties(num);
894 if (dld.vkEnumerateInstanceLayerProperties(&num, properties.data()) != VK_SUCCESS) {
895 return std::nullopt;
896 }
897 return properties;
898}
899
900} // namespace Vulkan::vk
diff --git a/src/video_core/vulkan_common/vulkan_wrapper.h b/src/video_core/vulkan_common/vulkan_wrapper.h
new file mode 100644
index 000000000..912cab46c
--- /dev/null
+++ b/src/video_core/vulkan_common/vulkan_wrapper.h
@@ -0,0 +1,1222 @@
1// Copyright 2020 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#pragma once
6
7#include <exception>
8#include <iterator>
9#include <limits>
10#include <memory>
11#include <optional>
12#include <span>
13#include <type_traits>
14#include <utility>
15#include <vector>
16
17#define VK_NO_PROTOTYPES
18#include <vulkan/vulkan.h>
19
20#include "common/common_types.h"
21
22#ifdef _MSC_VER
23#pragma warning(disable : 26812) // Disable prefer enum class over enum
24#endif
25
26namespace Vulkan::vk {
27
28/**
29 * Span for Vulkan arrays.
30 * Based on std::span but optimized for array access instead of iterators.
31 * Size returns uint32_t instead of size_t to ease interaction with Vulkan functions.
32 */
33template <typename T>
34class Span {
35public:
36 using value_type = T;
37 using size_type = u32;
38 using difference_type = std::ptrdiff_t;
39 using reference = const T&;
40 using const_reference = const T&;
41 using pointer = const T*;
42 using const_pointer = const T*;
43 using iterator = const T*;
44 using const_iterator = const T*;
45
46 /// Construct an empty span.
47 constexpr Span() noexcept = default;
48
49 /// Construct an empty span
50 constexpr Span(std::nullptr_t) noexcept {}
51
52 /// Construct a span from a single element.
53 constexpr Span(const T& value) noexcept : ptr{&value}, num{1} {}
54
55 /// Construct a span from a range.
56 template <typename Range>
57 // requires std::data(const Range&)
58 // requires std::size(const Range&)
59 constexpr Span(const Range& range) : ptr{std::data(range)}, num{std::size(range)} {}
60
61 /// Construct a span from a pointer and a size.
62 /// This is inteded for subranges.
63 constexpr Span(const T* ptr_, std::size_t num_) noexcept : ptr{ptr_}, num{num_} {}
64
65 /// Returns the data pointer by the span.
66 constexpr const T* data() const noexcept {
67 return ptr;
68 }
69
70 /// Returns the number of elements in the span.
71 /// @note Returns a 32 bits integer because most Vulkan functions expect this type.
72 constexpr u32 size() const noexcept {
73 return static_cast<u32>(num);
74 }
75
76 /// Returns true when the span is empty.
77 constexpr bool empty() const noexcept {
78 return num == 0;
79 }
80
81 /// Returns a reference to the element in the passed index.
82 /// @pre: index < size()
83 constexpr const T& operator[](std::size_t index) const noexcept {
84 return ptr[index];
85 }
86
87 /// Returns an iterator to the beginning of the span.
88 constexpr const T* begin() const noexcept {
89 return ptr;
90 }
91
92 /// Returns an iterator to the end of the span.
93 constexpr const T* end() const noexcept {
94 return ptr + num;
95 }
96
97 /// Returns an iterator to the beginning of the span.
98 constexpr const T* cbegin() const noexcept {
99 return ptr;
100 }
101
102 /// Returns an iterator to the end of the span.
103 constexpr const T* cend() const noexcept {
104 return ptr + num;
105 }
106
107private:
108 const T* ptr = nullptr;
109 std::size_t num = 0;
110};
111
112/// Vulkan exception generated from a VkResult.
113class Exception final : public std::exception {
114public:
115 /// Construct the exception with a result.
116 /// @pre result != VK_SUCCESS
117 explicit Exception(VkResult result_) : result{result_} {}
118 virtual ~Exception() = default;
119
120 const char* what() const noexcept override;
121
122private:
123 VkResult result;
124};
125
126/// Converts a VkResult enum into a rodata string
127const char* ToString(VkResult) noexcept;
128
129/// Throws a Vulkan exception if result is not success.
130inline void Check(VkResult result) {
131 if (result != VK_SUCCESS) {
132 throw Exception(result);
133 }
134}
135
136/// Throws a Vulkan exception if result is an error.
137/// @return result
138inline VkResult Filter(VkResult result) {
139 if (result < 0) {
140 throw Exception(result);
141 }
142 return result;
143}
144
145/// Table holding Vulkan instance function pointers.
146struct InstanceDispatch {
147 PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
148
149 PFN_vkCreateInstance vkCreateInstance;
150 PFN_vkDestroyInstance vkDestroyInstance;
151 PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties;
152 PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties;
153
154 PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT;
155 PFN_vkCreateDevice vkCreateDevice;
156 PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT;
157 PFN_vkDestroyDevice vkDestroyDevice;
158 PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR;
159 PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties;
160 PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices;
161 PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
162 PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR;
163 PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties;
164 PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
165 PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
166 PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR;
167 PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties;
168 PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR;
169 PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR;
170 PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR;
171 PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR;
172 PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR;
173 PFN_vkQueuePresentKHR vkQueuePresentKHR;
174};
175
176/// Table holding Vulkan device function pointers.
177struct DeviceDispatch : public InstanceDispatch {
178 PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR;
179 PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers;
180 PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets;
181 PFN_vkAllocateMemory vkAllocateMemory;
182 PFN_vkBeginCommandBuffer vkBeginCommandBuffer;
183 PFN_vkBindBufferMemory vkBindBufferMemory;
184 PFN_vkBindImageMemory vkBindImageMemory;
185 PFN_vkCmdBeginQuery vkCmdBeginQuery;
186 PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass;
187 PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT;
188 PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT;
189 PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets;
190 PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer;
191 PFN_vkCmdBindPipeline vkCmdBindPipeline;
192 PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT;
193 PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers;
194 PFN_vkCmdBlitImage vkCmdBlitImage;
195 PFN_vkCmdClearAttachments vkCmdClearAttachments;
196 PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
197 PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage;
198 PFN_vkCmdCopyImage vkCmdCopyImage;
199 PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer;
200 PFN_vkCmdDispatch vkCmdDispatch;
201 PFN_vkCmdDraw vkCmdDraw;
202 PFN_vkCmdDrawIndexed vkCmdDrawIndexed;
203 PFN_vkCmdEndQuery vkCmdEndQuery;
204 PFN_vkCmdEndRenderPass vkCmdEndRenderPass;
205 PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT;
206 PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT;
207 PFN_vkCmdFillBuffer vkCmdFillBuffer;
208 PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier;
209 PFN_vkCmdPushConstants vkCmdPushConstants;
210 PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants;
211 PFN_vkCmdSetDepthBias vkCmdSetDepthBias;
212 PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds;
213 PFN_vkCmdSetEvent vkCmdSetEvent;
214 PFN_vkCmdSetScissor vkCmdSetScissor;
215 PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask;
216 PFN_vkCmdSetStencilReference vkCmdSetStencilReference;
217 PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask;
218 PFN_vkCmdSetViewport vkCmdSetViewport;
219 PFN_vkCmdWaitEvents vkCmdWaitEvents;
220 PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT;
221 PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT;
222 PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT;
223 PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT;
224 PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT;
225 PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT;
226 PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT;
227 PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT;
228 PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT;
229 PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT;
230 PFN_vkCmdResolveImage vkCmdResolveImage;
231 PFN_vkCreateBuffer vkCreateBuffer;
232 PFN_vkCreateBufferView vkCreateBufferView;
233 PFN_vkCreateCommandPool vkCreateCommandPool;
234 PFN_vkCreateComputePipelines vkCreateComputePipelines;
235 PFN_vkCreateDescriptorPool vkCreateDescriptorPool;
236 PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout;
237 PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR;
238 PFN_vkCreateEvent vkCreateEvent;
239 PFN_vkCreateFence vkCreateFence;
240 PFN_vkCreateFramebuffer vkCreateFramebuffer;
241 PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines;
242 PFN_vkCreateImage vkCreateImage;
243 PFN_vkCreateImageView vkCreateImageView;
244 PFN_vkCreatePipelineLayout vkCreatePipelineLayout;
245 PFN_vkCreateQueryPool vkCreateQueryPool;
246 PFN_vkCreateRenderPass vkCreateRenderPass;
247 PFN_vkCreateSampler vkCreateSampler;
248 PFN_vkCreateSemaphore vkCreateSemaphore;
249 PFN_vkCreateShaderModule vkCreateShaderModule;
250 PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR;
251 PFN_vkDestroyBuffer vkDestroyBuffer;
252 PFN_vkDestroyBufferView vkDestroyBufferView;
253 PFN_vkDestroyCommandPool vkDestroyCommandPool;
254 PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool;
255 PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout;
256 PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR;
257 PFN_vkDestroyEvent vkDestroyEvent;
258 PFN_vkDestroyFence vkDestroyFence;
259 PFN_vkDestroyFramebuffer vkDestroyFramebuffer;
260 PFN_vkDestroyImage vkDestroyImage;
261 PFN_vkDestroyImageView vkDestroyImageView;
262 PFN_vkDestroyPipeline vkDestroyPipeline;
263 PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout;
264 PFN_vkDestroyQueryPool vkDestroyQueryPool;
265 PFN_vkDestroyRenderPass vkDestroyRenderPass;
266 PFN_vkDestroySampler vkDestroySampler;
267 PFN_vkDestroySemaphore vkDestroySemaphore;
268 PFN_vkDestroyShaderModule vkDestroyShaderModule;
269 PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR;
270 PFN_vkDeviceWaitIdle vkDeviceWaitIdle;
271 PFN_vkEndCommandBuffer vkEndCommandBuffer;
272 PFN_vkFreeCommandBuffers vkFreeCommandBuffers;
273 PFN_vkFreeDescriptorSets vkFreeDescriptorSets;
274 PFN_vkFreeMemory vkFreeMemory;
275 PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
276 PFN_vkGetDeviceQueue vkGetDeviceQueue;
277 PFN_vkGetEventStatus vkGetEventStatus;
278 PFN_vkGetFenceStatus vkGetFenceStatus;
279 PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
280 PFN_vkGetQueryPoolResults vkGetQueryPoolResults;
281 PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR;
282 PFN_vkMapMemory vkMapMemory;
283 PFN_vkQueueSubmit vkQueueSubmit;
284 PFN_vkResetFences vkResetFences;
285 PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT;
286 PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT;
287 PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT;
288 PFN_vkUnmapMemory vkUnmapMemory;
289 PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR;
290 PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets;
291 PFN_vkWaitForFences vkWaitForFences;
292 PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR;
293};
294
295/// Loads instance agnostic function pointers.
296/// @return True on success, false on error.
297bool Load(InstanceDispatch&) noexcept;
298
299/// Loads instance function pointers.
300/// @return True on success, false on error.
301bool Load(VkInstance, InstanceDispatch&) noexcept;
302
303void Destroy(VkInstance, const InstanceDispatch&) noexcept;
304void Destroy(VkDevice, const InstanceDispatch&) noexcept;
305
306void Destroy(VkDevice, VkBuffer, const DeviceDispatch&) noexcept;
307void Destroy(VkDevice, VkBufferView, const DeviceDispatch&) noexcept;
308void Destroy(VkDevice, VkCommandPool, const DeviceDispatch&) noexcept;
309void Destroy(VkDevice, VkDescriptorPool, const DeviceDispatch&) noexcept;
310void Destroy(VkDevice, VkDescriptorSetLayout, const DeviceDispatch&) noexcept;
311void Destroy(VkDevice, VkDescriptorUpdateTemplateKHR, const DeviceDispatch&) noexcept;
312void Destroy(VkDevice, VkDeviceMemory, const DeviceDispatch&) noexcept;
313void Destroy(VkDevice, VkEvent, const DeviceDispatch&) noexcept;
314void Destroy(VkDevice, VkFence, const DeviceDispatch&) noexcept;
315void Destroy(VkDevice, VkFramebuffer, const DeviceDispatch&) noexcept;
316void Destroy(VkDevice, VkImage, const DeviceDispatch&) noexcept;
317void Destroy(VkDevice, VkImageView, const DeviceDispatch&) noexcept;
318void Destroy(VkDevice, VkPipeline, const DeviceDispatch&) noexcept;
319void Destroy(VkDevice, VkPipelineLayout, const DeviceDispatch&) noexcept;
320void Destroy(VkDevice, VkQueryPool, const DeviceDispatch&) noexcept;
321void Destroy(VkDevice, VkRenderPass, const DeviceDispatch&) noexcept;
322void Destroy(VkDevice, VkSampler, const DeviceDispatch&) noexcept;
323void Destroy(VkDevice, VkSwapchainKHR, const DeviceDispatch&) noexcept;
324void Destroy(VkDevice, VkSemaphore, const DeviceDispatch&) noexcept;
325void Destroy(VkDevice, VkShaderModule, const DeviceDispatch&) noexcept;
326void Destroy(VkInstance, VkDebugUtilsMessengerEXT, const InstanceDispatch&) noexcept;
327void Destroy(VkInstance, VkSurfaceKHR, const InstanceDispatch&) noexcept;
328
329VkResult Free(VkDevice, VkDescriptorPool, Span<VkDescriptorSet>, const DeviceDispatch&) noexcept;
330VkResult Free(VkDevice, VkCommandPool, Span<VkCommandBuffer>, const DeviceDispatch&) noexcept;
331
332template <typename Type, typename OwnerType, typename Dispatch>
333class Handle;
334
335/// Handle with an owning type.
336/// Analogue to std::unique_ptr.
337template <typename Type, typename OwnerType, typename Dispatch>
338class Handle {
339public:
340 /// Construct a handle and hold it's ownership.
341 explicit Handle(Type handle_, OwnerType owner_, const Dispatch& dld_) noexcept
342 : handle{handle_}, owner{owner_}, dld{&dld_} {}
343
344 /// Construct an empty handle.
345 Handle() = default;
346
347 /// Copying Vulkan objects is not supported and will never be.
348 Handle(const Handle&) = delete;
349 Handle& operator=(const Handle&) = delete;
350
351 /// Construct a handle transfering the ownership from another handle.
352 Handle(Handle&& rhs) noexcept
353 : handle{std::exchange(rhs.handle, nullptr)}, owner{rhs.owner}, dld{rhs.dld} {}
354
355 /// Assign the current handle transfering the ownership from another handle.
356 /// Destroys any previously held object.
357 Handle& operator=(Handle&& rhs) noexcept {
358 Release();
359 handle = std::exchange(rhs.handle, nullptr);
360 owner = rhs.owner;
361 dld = rhs.dld;
362 return *this;
363 }
364
365 /// Destroys the current handle if it existed.
366 ~Handle() noexcept {
367 Release();
368 }
369
370 /// Destroys any held object.
371 void reset() noexcept {
372 Release();
373 handle = nullptr;
374 }
375
376 /// Returns the address of the held object.
377 /// Intended for Vulkan structures that expect a pointer to an array.
378 const Type* address() const noexcept {
379 return std::addressof(handle);
380 }
381
382 /// Returns the held Vulkan handle.
383 Type operator*() const noexcept {
384 return handle;
385 }
386
387 /// Returns true when there's a held object.
388 explicit operator bool() const noexcept {
389 return handle != nullptr;
390 }
391
392protected:
393 Type handle = nullptr;
394 OwnerType owner = nullptr;
395 const Dispatch* dld = nullptr;
396
397private:
398 /// Destroys the held object if it exists.
399 void Release() noexcept {
400 if (handle) {
401 Destroy(owner, handle, *dld);
402 }
403 }
404};
405
406/// Dummy type used to specify a handle has no owner.
407struct NoOwner {};
408
409/// Handle without an owning type.
410/// Analogue to std::unique_ptr
411template <typename Type, typename Dispatch>
412class Handle<Type, NoOwner, Dispatch> {
413public:
414 /// Construct a handle and hold it's ownership.
415 explicit Handle(Type handle_, const Dispatch& dld_) noexcept : handle{handle_}, dld{&dld_} {}
416
417 /// Construct an empty handle.
418 Handle() noexcept = default;
419
420 /// Copying Vulkan objects is not supported and will never be.
421 Handle(const Handle&) = delete;
422 Handle& operator=(const Handle&) = delete;
423
424 /// Construct a handle transfering ownership from another handle.
425 Handle(Handle&& rhs) noexcept : handle{std::exchange(rhs.handle, nullptr)}, dld{rhs.dld} {}
426
427 /// Assign the current handle transfering the ownership from another handle.
428 /// Destroys any previously held object.
429 Handle& operator=(Handle&& rhs) noexcept {
430 Release();
431 handle = std::exchange(rhs.handle, nullptr);
432 dld = rhs.dld;
433 return *this;
434 }
435
436 /// Destroys the current handle if it existed.
437 ~Handle() noexcept {
438 Release();
439 }
440
441 /// Destroys any held object.
442 void reset() noexcept {
443 Release();
444 handle = nullptr;
445 }
446
447 /// Returns the address of the held object.
448 /// Intended for Vulkan structures that expect a pointer to an array.
449 const Type* address() const noexcept {
450 return std::addressof(handle);
451 }
452
453 /// Returns the held Vulkan handle.
454 Type operator*() const noexcept {
455 return handle;
456 }
457
458 /// Returns true when there's a held object.
459 operator bool() const noexcept {
460 return handle != nullptr;
461 }
462
463protected:
464 Type handle = nullptr;
465 const Dispatch* dld = nullptr;
466
467private:
468 /// Destroys the held object if it exists.
469 void Release() noexcept {
470 if (handle) {
471 Destroy(handle, *dld);
472 }
473 }
474};
475
476/// Array of a pool allocation.
477/// Analogue to std::vector
478template <typename AllocationType, typename PoolType>
479class PoolAllocations {
480public:
481 /// Construct an empty allocation.
482 PoolAllocations() = default;
483
484 /// Construct an allocation. Errors are reported through IsOutOfPoolMemory().
485 explicit PoolAllocations(std::unique_ptr<AllocationType[]> allocations_, std::size_t num_,
486 VkDevice device_, PoolType pool_, const DeviceDispatch& dld_) noexcept
487 : allocations{std::move(allocations_)}, num{num_}, device{device_}, pool{pool_},
488 dld{&dld_} {}
489
490 /// Copying Vulkan allocations is not supported and will never be.
491 PoolAllocations(const PoolAllocations&) = delete;
492 PoolAllocations& operator=(const PoolAllocations&) = delete;
493
494 /// Construct an allocation transfering ownership from another allocation.
495 PoolAllocations(PoolAllocations&& rhs) noexcept
496 : allocations{std::move(rhs.allocations)}, num{rhs.num}, device{rhs.device}, pool{rhs.pool},
497 dld{rhs.dld} {}
498
499 /// Assign an allocation transfering ownership from another allocation.
500 /// Releases any previously held allocation.
501 PoolAllocations& operator=(PoolAllocations&& rhs) noexcept {
502 Release();
503 allocations = std::move(rhs.allocations);
504 num = rhs.num;
505 device = rhs.device;
506 pool = rhs.pool;
507 dld = rhs.dld;
508 return *this;
509 }
510
511 /// Destroys any held allocation.
512 ~PoolAllocations() {
513 Release();
514 }
515
516 /// Returns the number of allocations.
517 std::size_t size() const noexcept {
518 return num;
519 }
520
521 /// Returns a pointer to the array of allocations.
522 AllocationType const* data() const noexcept {
523 return allocations.get();
524 }
525
526 /// Returns the allocation in the specified index.
527 /// @pre index < size()
528 AllocationType operator[](std::size_t index) const noexcept {
529 return allocations[index];
530 }
531
532 /// True when a pool fails to construct.
533 bool IsOutOfPoolMemory() const noexcept {
534 return !device;
535 }
536
537private:
538 /// Destroys the held allocations if they exist.
539 void Release() noexcept {
540 if (!allocations) {
541 return;
542 }
543 const Span<AllocationType> span(allocations.get(), num);
544 const VkResult result = Free(device, pool, span, *dld);
545 // There's no way to report errors from a destructor.
546 if (result != VK_SUCCESS) {
547 std::terminate();
548 }
549 }
550
551 std::unique_ptr<AllocationType[]> allocations;
552 std::size_t num = 0;
553 VkDevice device = nullptr;
554 PoolType pool = nullptr;
555 const DeviceDispatch* dld = nullptr;
556};
557
558using DebugUtilsMessenger = Handle<VkDebugUtilsMessengerEXT, VkInstance, InstanceDispatch>;
559using DescriptorSetLayout = Handle<VkDescriptorSetLayout, VkDevice, DeviceDispatch>;
560using DescriptorUpdateTemplateKHR = Handle<VkDescriptorUpdateTemplateKHR, VkDevice, DeviceDispatch>;
561using Pipeline = Handle<VkPipeline, VkDevice, DeviceDispatch>;
562using PipelineLayout = Handle<VkPipelineLayout, VkDevice, DeviceDispatch>;
563using QueryPool = Handle<VkQueryPool, VkDevice, DeviceDispatch>;
564using RenderPass = Handle<VkRenderPass, VkDevice, DeviceDispatch>;
565using Sampler = Handle<VkSampler, VkDevice, DeviceDispatch>;
566using SurfaceKHR = Handle<VkSurfaceKHR, VkInstance, InstanceDispatch>;
567
568using DescriptorSets = PoolAllocations<VkDescriptorSet, VkDescriptorPool>;
569using CommandBuffers = PoolAllocations<VkCommandBuffer, VkCommandPool>;
570
571/// Vulkan instance owning handle.
572class Instance : public Handle<VkInstance, NoOwner, InstanceDispatch> {
573 using Handle<VkInstance, NoOwner, InstanceDispatch>::Handle;
574
575public:
576 /// Creates a Vulkan instance.
577 /// @throw Exception on initialization error.
578 static Instance Create(u32 version, Span<const char*> layers, Span<const char*> extensions,
579 InstanceDispatch& dispatch);
580
581 /// Enumerates physical devices.
582 /// @return Physical devices and an empty handle on failure.
583 /// @throw Exception on Vulkan error.
584 std::vector<VkPhysicalDevice> EnumeratePhysicalDevices() const;
585
586 /// Creates a debug callback messenger.
587 /// @throw Exception on creation failure.
588 DebugUtilsMessenger CreateDebugUtilsMessenger(
589 const VkDebugUtilsMessengerCreateInfoEXT& create_info) const;
590
591 /// Returns dispatch table.
592 const InstanceDispatch& Dispatch() const noexcept {
593 return *dld;
594 }
595};
596
597class Queue {
598public:
599 /// Construct an empty queue handle.
600 constexpr Queue() noexcept = default;
601
602 /// Construct a queue handle.
603 constexpr Queue(VkQueue queue_, const DeviceDispatch& dld_) noexcept
604 : queue{queue_}, dld{&dld_} {}
605
606 VkResult Submit(Span<VkSubmitInfo> submit_infos,
607 VkFence fence = VK_NULL_HANDLE) const noexcept {
608 return dld->vkQueueSubmit(queue, submit_infos.size(), submit_infos.data(), fence);
609 }
610
611 VkResult Present(const VkPresentInfoKHR& present_info) const noexcept {
612 return dld->vkQueuePresentKHR(queue, &present_info);
613 }
614
615private:
616 VkQueue queue = nullptr;
617 const DeviceDispatch* dld = nullptr;
618};
619
620class Buffer : public Handle<VkBuffer, VkDevice, DeviceDispatch> {
621 using Handle<VkBuffer, VkDevice, DeviceDispatch>::Handle;
622
623public:
624 /// Attaches a memory allocation.
625 void BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const;
626
627 /// Set object name.
628 void SetObjectNameEXT(const char* name) const;
629};
630
631class BufferView : public Handle<VkBufferView, VkDevice, DeviceDispatch> {
632 using Handle<VkBufferView, VkDevice, DeviceDispatch>::Handle;
633
634public:
635 /// Set object name.
636 void SetObjectNameEXT(const char* name) const;
637};
638
639class Image : public Handle<VkImage, VkDevice, DeviceDispatch> {
640 using Handle<VkImage, VkDevice, DeviceDispatch>::Handle;
641
642public:
643 /// Attaches a memory allocation.
644 void BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const;
645
646 /// Set object name.
647 void SetObjectNameEXT(const char* name) const;
648};
649
650class ImageView : public Handle<VkImageView, VkDevice, DeviceDispatch> {
651 using Handle<VkImageView, VkDevice, DeviceDispatch>::Handle;
652
653public:
654 /// Set object name.
655 void SetObjectNameEXT(const char* name) const;
656};
657
658class DeviceMemory : public Handle<VkDeviceMemory, VkDevice, DeviceDispatch> {
659 using Handle<VkDeviceMemory, VkDevice, DeviceDispatch>::Handle;
660
661public:
662 /// Set object name.
663 void SetObjectNameEXT(const char* name) const;
664
665 u8* Map(VkDeviceSize offset, VkDeviceSize size) const {
666 void* data;
667 Check(dld->vkMapMemory(owner, handle, offset, size, 0, &data));
668 return static_cast<u8*>(data);
669 }
670
671 void Unmap() const noexcept {
672 dld->vkUnmapMemory(owner, handle);
673 }
674};
675
676class Fence : public Handle<VkFence, VkDevice, DeviceDispatch> {
677 using Handle<VkFence, VkDevice, DeviceDispatch>::Handle;
678
679public:
680 /// Set object name.
681 void SetObjectNameEXT(const char* name) const;
682
683 VkResult Wait(u64 timeout = std::numeric_limits<u64>::max()) const noexcept {
684 return dld->vkWaitForFences(owner, 1, &handle, true, timeout);
685 }
686
687 VkResult GetStatus() const noexcept {
688 return dld->vkGetFenceStatus(owner, handle);
689 }
690
691 void Reset() const {
692 Check(dld->vkResetFences(owner, 1, &handle));
693 }
694};
695
696class Framebuffer : public Handle<VkFramebuffer, VkDevice, DeviceDispatch> {
697 using Handle<VkFramebuffer, VkDevice, DeviceDispatch>::Handle;
698
699public:
700 /// Set object name.
701 void SetObjectNameEXT(const char* name) const;
702};
703
704class DescriptorPool : public Handle<VkDescriptorPool, VkDevice, DeviceDispatch> {
705 using Handle<VkDescriptorPool, VkDevice, DeviceDispatch>::Handle;
706
707public:
708 DescriptorSets Allocate(const VkDescriptorSetAllocateInfo& ai) const;
709
710 /// Set object name.
711 void SetObjectNameEXT(const char* name) const;
712};
713
714class CommandPool : public Handle<VkCommandPool, VkDevice, DeviceDispatch> {
715 using Handle<VkCommandPool, VkDevice, DeviceDispatch>::Handle;
716
717public:
718 CommandBuffers Allocate(std::size_t num_buffers,
719 VkCommandBufferLevel level = VK_COMMAND_BUFFER_LEVEL_PRIMARY) const;
720
721 /// Set object name.
722 void SetObjectNameEXT(const char* name) const;
723};
724
725class SwapchainKHR : public Handle<VkSwapchainKHR, VkDevice, DeviceDispatch> {
726 using Handle<VkSwapchainKHR, VkDevice, DeviceDispatch>::Handle;
727
728public:
729 std::vector<VkImage> GetImages() const;
730};
731
732class Event : public Handle<VkEvent, VkDevice, DeviceDispatch> {
733 using Handle<VkEvent, VkDevice, DeviceDispatch>::Handle;
734
735public:
736 /// Set object name.
737 void SetObjectNameEXT(const char* name) const;
738
739 VkResult GetStatus() const noexcept {
740 return dld->vkGetEventStatus(owner, handle);
741 }
742};
743
744class ShaderModule : public Handle<VkShaderModule, VkDevice, DeviceDispatch> {
745 using Handle<VkShaderModule, VkDevice, DeviceDispatch>::Handle;
746
747public:
748 /// Set object name.
749 void SetObjectNameEXT(const char* name) const;
750};
751
752class Semaphore : public Handle<VkSemaphore, VkDevice, DeviceDispatch> {
753 using Handle<VkSemaphore, VkDevice, DeviceDispatch>::Handle;
754
755public:
756 /// Set object name.
757 void SetObjectNameEXT(const char* name) const;
758
759 [[nodiscard]] u64 GetCounter() const {
760 u64 value;
761 Check(dld->vkGetSemaphoreCounterValueKHR(owner, handle, &value));
762 return value;
763 }
764
765 /**
766 * Waits for a timeline semaphore on the host.
767 *
768 * @param value Value to wait
769 * @param timeout Time in nanoseconds to timeout
770 * @return True on successful wait, false on timeout
771 */
772 bool Wait(u64 value, u64 timeout = std::numeric_limits<u64>::max()) const {
773 const VkSemaphoreWaitInfoKHR wait_info{
774 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR,
775 .pNext = nullptr,
776 .flags = 0,
777 .semaphoreCount = 1,
778 .pSemaphores = &handle,
779 .pValues = &value,
780 };
781 const VkResult result = dld->vkWaitSemaphoresKHR(owner, &wait_info, timeout);
782 switch (result) {
783 case VK_SUCCESS:
784 return true;
785 case VK_TIMEOUT:
786 return false;
787 default:
788 throw Exception(result);
789 }
790 }
791};
792
793class Device : public Handle<VkDevice, NoOwner, DeviceDispatch> {
794 using Handle<VkDevice, NoOwner, DeviceDispatch>::Handle;
795
796public:
797 static Device Create(VkPhysicalDevice physical_device, Span<VkDeviceQueueCreateInfo> queues_ci,
798 Span<const char*> enabled_extensions, const void* next,
799 DeviceDispatch& dispatch);
800
801 Queue GetQueue(u32 family_index) const noexcept;
802
803 Buffer CreateBuffer(const VkBufferCreateInfo& ci) const;
804
805 BufferView CreateBufferView(const VkBufferViewCreateInfo& ci) const;
806
807 Image CreateImage(const VkImageCreateInfo& ci) const;
808
809 ImageView CreateImageView(const VkImageViewCreateInfo& ci) const;
810
811 Semaphore CreateSemaphore() const;
812
813 Semaphore CreateSemaphore(const VkSemaphoreCreateInfo& ci) const;
814
815 Fence CreateFence(const VkFenceCreateInfo& ci) const;
816
817 DescriptorPool CreateDescriptorPool(const VkDescriptorPoolCreateInfo& ci) const;
818
819 RenderPass CreateRenderPass(const VkRenderPassCreateInfo& ci) const;
820
821 DescriptorSetLayout CreateDescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo& ci) const;
822
823 PipelineLayout CreatePipelineLayout(const VkPipelineLayoutCreateInfo& ci) const;
824
825 Pipeline CreateGraphicsPipeline(const VkGraphicsPipelineCreateInfo& ci) const;
826
827 Pipeline CreateComputePipeline(const VkComputePipelineCreateInfo& ci) const;
828
829 Sampler CreateSampler(const VkSamplerCreateInfo& ci) const;
830
831 Framebuffer CreateFramebuffer(const VkFramebufferCreateInfo& ci) const;
832
833 CommandPool CreateCommandPool(const VkCommandPoolCreateInfo& ci) const;
834
835 DescriptorUpdateTemplateKHR CreateDescriptorUpdateTemplateKHR(
836 const VkDescriptorUpdateTemplateCreateInfoKHR& ci) const;
837
838 QueryPool CreateQueryPool(const VkQueryPoolCreateInfo& ci) const;
839
840 ShaderModule CreateShaderModule(const VkShaderModuleCreateInfo& ci) const;
841
842 Event CreateEvent() const;
843
844 SwapchainKHR CreateSwapchainKHR(const VkSwapchainCreateInfoKHR& ci) const;
845
846 DeviceMemory TryAllocateMemory(const VkMemoryAllocateInfo& ai) const noexcept;
847
848 DeviceMemory AllocateMemory(const VkMemoryAllocateInfo& ai) const;
849
850 VkMemoryRequirements GetBufferMemoryRequirements(VkBuffer buffer) const noexcept;
851
852 VkMemoryRequirements GetImageMemoryRequirements(VkImage image) const noexcept;
853
854 void UpdateDescriptorSets(Span<VkWriteDescriptorSet> writes,
855 Span<VkCopyDescriptorSet> copies) const noexcept;
856
857 void UpdateDescriptorSet(VkDescriptorSet set, VkDescriptorUpdateTemplateKHR update_template,
858 const void* data) const noexcept {
859 dld->vkUpdateDescriptorSetWithTemplateKHR(handle, set, update_template, data);
860 }
861
862 VkResult AcquireNextImageKHR(VkSwapchainKHR swapchain, u64 timeout, VkSemaphore semaphore,
863 VkFence fence, u32* image_index) const noexcept {
864 return dld->vkAcquireNextImageKHR(handle, swapchain, timeout, semaphore, fence,
865 image_index);
866 }
867
868 VkResult WaitIdle() const noexcept {
869 return dld->vkDeviceWaitIdle(handle);
870 }
871
872 void ResetQueryPoolEXT(VkQueryPool query_pool, u32 first, u32 count) const noexcept {
873 dld->vkResetQueryPoolEXT(handle, query_pool, first, count);
874 }
875
876 VkResult GetQueryResults(VkQueryPool query_pool, u32 first, u32 count, std::size_t data_size,
877 void* data, VkDeviceSize stride,
878 VkQueryResultFlags flags) const noexcept {
879 return dld->vkGetQueryPoolResults(handle, query_pool, first, count, data_size, data, stride,
880 flags);
881 }
882};
883
884class PhysicalDevice {
885public:
886 constexpr PhysicalDevice() noexcept = default;
887
888 constexpr PhysicalDevice(VkPhysicalDevice physical_device_,
889 const InstanceDispatch& dld_) noexcept
890 : physical_device{physical_device_}, dld{&dld_} {}
891
892 constexpr operator VkPhysicalDevice() const noexcept {
893 return physical_device;
894 }
895
896 VkPhysicalDeviceProperties GetProperties() const noexcept;
897
898 void GetProperties2KHR(VkPhysicalDeviceProperties2KHR&) const noexcept;
899
900 VkPhysicalDeviceFeatures GetFeatures() const noexcept;
901
902 void GetFeatures2KHR(VkPhysicalDeviceFeatures2KHR&) const noexcept;
903
904 VkFormatProperties GetFormatProperties(VkFormat) const noexcept;
905
906 std::vector<VkExtensionProperties> EnumerateDeviceExtensionProperties() const;
907
908 std::vector<VkQueueFamilyProperties> GetQueueFamilyProperties() const;
909
910 bool GetSurfaceSupportKHR(u32 queue_family_index, VkSurfaceKHR) const;
911
912 VkSurfaceCapabilitiesKHR GetSurfaceCapabilitiesKHR(VkSurfaceKHR) const;
913
914 std::vector<VkSurfaceFormatKHR> GetSurfaceFormatsKHR(VkSurfaceKHR) const;
915
916 std::vector<VkPresentModeKHR> GetSurfacePresentModesKHR(VkSurfaceKHR) const;
917
918 VkPhysicalDeviceMemoryProperties GetMemoryProperties() const noexcept;
919
920private:
921 VkPhysicalDevice physical_device = nullptr;
922 const InstanceDispatch* dld = nullptr;
923};
924
925class CommandBuffer {
926public:
927 CommandBuffer() noexcept = default;
928
929 explicit CommandBuffer(VkCommandBuffer handle_, const DeviceDispatch& dld_) noexcept
930 : handle{handle_}, dld{&dld_} {}
931
932 const VkCommandBuffer* address() const noexcept {
933 return &handle;
934 }
935
936 void Begin(const VkCommandBufferBeginInfo& begin_info) const {
937 Check(dld->vkBeginCommandBuffer(handle, &begin_info));
938 }
939
940 void End() const {
941 Check(dld->vkEndCommandBuffer(handle));
942 }
943
944 void BeginRenderPass(const VkRenderPassBeginInfo& renderpass_bi,
945 VkSubpassContents contents) const noexcept {
946 dld->vkCmdBeginRenderPass(handle, &renderpass_bi, contents);
947 }
948
949 void EndRenderPass() const noexcept {
950 dld->vkCmdEndRenderPass(handle);
951 }
952
953 void BeginQuery(VkQueryPool query_pool, u32 query, VkQueryControlFlags flags) const noexcept {
954 dld->vkCmdBeginQuery(handle, query_pool, query, flags);
955 }
956
957 void EndQuery(VkQueryPool query_pool, u32 query) const noexcept {
958 dld->vkCmdEndQuery(handle, query_pool, query);
959 }
960
961 void BindDescriptorSets(VkPipelineBindPoint bind_point, VkPipelineLayout layout, u32 first,
962 Span<VkDescriptorSet> sets, Span<u32> dynamic_offsets) const noexcept {
963 dld->vkCmdBindDescriptorSets(handle, bind_point, layout, first, sets.size(), sets.data(),
964 dynamic_offsets.size(), dynamic_offsets.data());
965 }
966
967 void BindPipeline(VkPipelineBindPoint bind_point, VkPipeline pipeline) const noexcept {
968 dld->vkCmdBindPipeline(handle, bind_point, pipeline);
969 }
970
971 void BindIndexBuffer(VkBuffer buffer, VkDeviceSize offset,
972 VkIndexType index_type) const noexcept {
973 dld->vkCmdBindIndexBuffer(handle, buffer, offset, index_type);
974 }
975
976 void BindVertexBuffers(u32 first, u32 count, const VkBuffer* buffers,
977 const VkDeviceSize* offsets) const noexcept {
978 dld->vkCmdBindVertexBuffers(handle, first, count, buffers, offsets);
979 }
980
981 void BindVertexBuffer(u32 binding, VkBuffer buffer, VkDeviceSize offset) const noexcept {
982 BindVertexBuffers(binding, 1, &buffer, &offset);
983 }
984
985 void Draw(u32 vertex_count, u32 instance_count, u32 first_vertex,
986 u32 first_instance) const noexcept {
987 dld->vkCmdDraw(handle, vertex_count, instance_count, first_vertex, first_instance);
988 }
989
990 void DrawIndexed(u32 index_count, u32 instance_count, u32 first_index, u32 vertex_offset,
991 u32 first_instance) const noexcept {
992 dld->vkCmdDrawIndexed(handle, index_count, instance_count, first_index, vertex_offset,
993 first_instance);
994 }
995
996 void ClearAttachments(Span<VkClearAttachment> attachments,
997 Span<VkClearRect> rects) const noexcept {
998 dld->vkCmdClearAttachments(handle, attachments.size(), attachments.data(), rects.size(),
999 rects.data());
1000 }
1001
1002 void BlitImage(VkImage src_image, VkImageLayout src_layout, VkImage dst_image,
1003 VkImageLayout dst_layout, Span<VkImageBlit> regions,
1004 VkFilter filter) const noexcept {
1005 dld->vkCmdBlitImage(handle, src_image, src_layout, dst_image, dst_layout, regions.size(),
1006 regions.data(), filter);
1007 }
1008
1009 void ResolveImage(VkImage src_image, VkImageLayout src_layout, VkImage dst_image,
1010 VkImageLayout dst_layout, Span<VkImageResolve> regions) {
1011 dld->vkCmdResolveImage(handle, src_image, src_layout, dst_image, dst_layout, regions.size(),
1012 regions.data());
1013 }
1014
1015 void Dispatch(u32 x, u32 y, u32 z) const noexcept {
1016 dld->vkCmdDispatch(handle, x, y, z);
1017 }
1018
1019 void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
1020 VkDependencyFlags dependency_flags, Span<VkMemoryBarrier> memory_barriers,
1021 Span<VkBufferMemoryBarrier> buffer_barriers,
1022 Span<VkImageMemoryBarrier> image_barriers) const noexcept {
1023 dld->vkCmdPipelineBarrier(handle, src_stage_mask, dst_stage_mask, dependency_flags,
1024 memory_barriers.size(), memory_barriers.data(),
1025 buffer_barriers.size(), buffer_barriers.data(),
1026 image_barriers.size(), image_barriers.data());
1027 }
1028
1029 void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
1030 VkDependencyFlags dependency_flags = 0) const noexcept {
1031 PipelineBarrier(src_stage_mask, dst_stage_mask, dependency_flags, {}, {}, {});
1032 }
1033
1034 void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
1035 VkDependencyFlags dependency_flags,
1036 const VkBufferMemoryBarrier& buffer_barrier) const noexcept {
1037 PipelineBarrier(src_stage_mask, dst_stage_mask, dependency_flags, {}, buffer_barrier, {});
1038 }
1039
1040 void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
1041 VkDependencyFlags dependency_flags,
1042 const VkImageMemoryBarrier& image_barrier) const noexcept {
1043 PipelineBarrier(src_stage_mask, dst_stage_mask, dependency_flags, {}, {}, image_barrier);
1044 }
1045
1046 void CopyBufferToImage(VkBuffer src_buffer, VkImage dst_image, VkImageLayout dst_image_layout,
1047 Span<VkBufferImageCopy> regions) const noexcept {
1048 dld->vkCmdCopyBufferToImage(handle, src_buffer, dst_image, dst_image_layout, regions.size(),
1049 regions.data());
1050 }
1051
1052 void CopyBuffer(VkBuffer src_buffer, VkBuffer dst_buffer,
1053 Span<VkBufferCopy> regions) const noexcept {
1054 dld->vkCmdCopyBuffer(handle, src_buffer, dst_buffer, regions.size(), regions.data());
1055 }
1056
1057 void CopyImage(VkImage src_image, VkImageLayout src_layout, VkImage dst_image,
1058 VkImageLayout dst_layout, Span<VkImageCopy> regions) const noexcept {
1059 dld->vkCmdCopyImage(handle, src_image, src_layout, dst_image, dst_layout, regions.size(),
1060 regions.data());
1061 }
1062
1063 void CopyImageToBuffer(VkImage src_image, VkImageLayout src_layout, VkBuffer dst_buffer,
1064 Span<VkBufferImageCopy> regions) const noexcept {
1065 dld->vkCmdCopyImageToBuffer(handle, src_image, src_layout, dst_buffer, regions.size(),
1066 regions.data());
1067 }
1068
1069 void FillBuffer(VkBuffer dst_buffer, VkDeviceSize dst_offset, VkDeviceSize size,
1070 u32 data) const noexcept {
1071 dld->vkCmdFillBuffer(handle, dst_buffer, dst_offset, size, data);
1072 }
1073
1074 void PushConstants(VkPipelineLayout layout, VkShaderStageFlags flags, u32 offset, u32 size,
1075 const void* values) const noexcept {
1076 dld->vkCmdPushConstants(handle, layout, flags, offset, size, values);
1077 }
1078
1079 template <typename T>
1080 void PushConstants(VkPipelineLayout layout, VkShaderStageFlags flags,
1081 const T& data) const noexcept {
1082 static_assert(std::is_trivially_copyable_v<T>, "<data> is not trivially copyable");
1083 dld->vkCmdPushConstants(handle, layout, flags, 0, static_cast<u32>(sizeof(T)), &data);
1084 }
1085
1086 void SetViewport(u32 first, Span<VkViewport> viewports) const noexcept {
1087 dld->vkCmdSetViewport(handle, first, viewports.size(), viewports.data());
1088 }
1089
1090 void SetScissor(u32 first, Span<VkRect2D> scissors) const noexcept {
1091 dld->vkCmdSetScissor(handle, first, scissors.size(), scissors.data());
1092 }
1093
1094 void SetBlendConstants(const float blend_constants[4]) const noexcept {
1095 dld->vkCmdSetBlendConstants(handle, blend_constants);
1096 }
1097
1098 void SetStencilCompareMask(VkStencilFaceFlags face_mask, u32 compare_mask) const noexcept {
1099 dld->vkCmdSetStencilCompareMask(handle, face_mask, compare_mask);
1100 }
1101
1102 void SetStencilReference(VkStencilFaceFlags face_mask, u32 reference) const noexcept {
1103 dld->vkCmdSetStencilReference(handle, face_mask, reference);
1104 }
1105
1106 void SetStencilWriteMask(VkStencilFaceFlags face_mask, u32 write_mask) const noexcept {
1107 dld->vkCmdSetStencilWriteMask(handle, face_mask, write_mask);
1108 }
1109
1110 void SetDepthBias(float constant_factor, float clamp, float slope_factor) const noexcept {
1111 dld->vkCmdSetDepthBias(handle, constant_factor, clamp, slope_factor);
1112 }
1113
1114 void SetDepthBounds(float min_depth_bounds, float max_depth_bounds) const noexcept {
1115 dld->vkCmdSetDepthBounds(handle, min_depth_bounds, max_depth_bounds);
1116 }
1117
1118 void SetEvent(VkEvent event, VkPipelineStageFlags stage_flags) const noexcept {
1119 dld->vkCmdSetEvent(handle, event, stage_flags);
1120 }
1121
1122 void WaitEvents(Span<VkEvent> events, VkPipelineStageFlags src_stage_mask,
1123 VkPipelineStageFlags dst_stage_mask, Span<VkMemoryBarrier> memory_barriers,
1124 Span<VkBufferMemoryBarrier> buffer_barriers,
1125 Span<VkImageMemoryBarrier> image_barriers) const noexcept {
1126 dld->vkCmdWaitEvents(handle, events.size(), events.data(), src_stage_mask, dst_stage_mask,
1127 memory_barriers.size(), memory_barriers.data(), buffer_barriers.size(),
1128 buffer_barriers.data(), image_barriers.size(), image_barriers.data());
1129 }
1130
1131 void BindVertexBuffers2EXT(u32 first_binding, u32 binding_count, const VkBuffer* buffers,
1132 const VkDeviceSize* offsets, const VkDeviceSize* sizes,
1133 const VkDeviceSize* strides) const noexcept {
1134 dld->vkCmdBindVertexBuffers2EXT(handle, first_binding, binding_count, buffers, offsets,
1135 sizes, strides);
1136 }
1137
1138 void SetCullModeEXT(VkCullModeFlags cull_mode) const noexcept {
1139 dld->vkCmdSetCullModeEXT(handle, cull_mode);
1140 }
1141
1142 void SetDepthBoundsTestEnableEXT(bool enable) const noexcept {
1143 dld->vkCmdSetDepthBoundsTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE);
1144 }
1145
1146 void SetDepthCompareOpEXT(VkCompareOp compare_op) const noexcept {
1147 dld->vkCmdSetDepthCompareOpEXT(handle, compare_op);
1148 }
1149
1150 void SetDepthTestEnableEXT(bool enable) const noexcept {
1151 dld->vkCmdSetDepthTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE);
1152 }
1153
1154 void SetDepthWriteEnableEXT(bool enable) const noexcept {
1155 dld->vkCmdSetDepthWriteEnableEXT(handle, enable ? VK_TRUE : VK_FALSE);
1156 }
1157
1158 void SetFrontFaceEXT(VkFrontFace front_face) const noexcept {
1159 dld->vkCmdSetFrontFaceEXT(handle, front_face);
1160 }
1161
1162 void SetPrimitiveTopologyEXT(VkPrimitiveTopology primitive_topology) const noexcept {
1163 dld->vkCmdSetPrimitiveTopologyEXT(handle, primitive_topology);
1164 }
1165
1166 void SetStencilOpEXT(VkStencilFaceFlags face_mask, VkStencilOp fail_op, VkStencilOp pass_op,
1167 VkStencilOp depth_fail_op, VkCompareOp compare_op) const noexcept {
1168 dld->vkCmdSetStencilOpEXT(handle, face_mask, fail_op, pass_op, depth_fail_op, compare_op);
1169 }
1170
1171 void SetStencilTestEnableEXT(bool enable) const noexcept {
1172 dld->vkCmdSetStencilTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE);
1173 }
1174
1175 void BindTransformFeedbackBuffersEXT(u32 first, u32 count, const VkBuffer* buffers,
1176 const VkDeviceSize* offsets,
1177 const VkDeviceSize* sizes) const noexcept {
1178 dld->vkCmdBindTransformFeedbackBuffersEXT(handle, first, count, buffers, offsets, sizes);
1179 }
1180
1181 void BeginTransformFeedbackEXT(u32 first_counter_buffer, u32 counter_buffers_count,
1182 const VkBuffer* counter_buffers,
1183 const VkDeviceSize* counter_buffer_offsets) const noexcept {
1184 dld->vkCmdBeginTransformFeedbackEXT(handle, first_counter_buffer, counter_buffers_count,
1185 counter_buffers, counter_buffer_offsets);
1186 }
1187
1188 void EndTransformFeedbackEXT(u32 first_counter_buffer, u32 counter_buffers_count,
1189 const VkBuffer* counter_buffers,
1190 const VkDeviceSize* counter_buffer_offsets) const noexcept {
1191 dld->vkCmdEndTransformFeedbackEXT(handle, first_counter_buffer, counter_buffers_count,
1192 counter_buffers, counter_buffer_offsets);
1193 }
1194
1195 void BeginDebugUtilsLabelEXT(const char* label, std::span<float, 4> color) const noexcept {
1196 const VkDebugUtilsLabelEXT label_info{
1197 .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT,
1198 .pNext = nullptr,
1199 .pLabelName = label,
1200 .color{color[0], color[1], color[2], color[3]},
1201 };
1202 dld->vkCmdBeginDebugUtilsLabelEXT(handle, &label_info);
1203 }
1204
1205 void EndDebugUtilsLabelEXT() const noexcept {
1206 dld->vkCmdEndDebugUtilsLabelEXT(handle);
1207 }
1208
1209private:
1210 VkCommandBuffer handle;
1211 const DeviceDispatch* dld;
1212};
1213
1214u32 AvailableVersion(const InstanceDispatch& dld) noexcept;
1215
1216std::optional<std::vector<VkExtensionProperties>> EnumerateInstanceExtensionProperties(
1217 const InstanceDispatch& dld);
1218
1219std::optional<std::vector<VkLayerProperties>> EnumerateInstanceLayerProperties(
1220 const InstanceDispatch& dld);
1221
1222} // namespace Vulkan::vk