From d93742142243dea1355012b9f0ce7f5ac8a2dc02 Mon Sep 17 00:00:00 2001 From: ReinUsesLisp Date: Thu, 24 Dec 2020 21:24:34 -0300 Subject: vulkan_common: Move dynamic library load to a separate file Allows us to initialize a Vulkan dynamic library from different backends without duplicating code. --- src/video_core/vulkan_common/vulkan_library.cpp | 36 +++++++++++++++++++++++++ src/video_core/vulkan_common/vulkan_library.h | 13 +++++++++ 2 files changed, 49 insertions(+) create mode 100644 src/video_core/vulkan_common/vulkan_library.cpp create mode 100644 src/video_core/vulkan_common/vulkan_library.h (limited to 'src/video_core/vulkan_common') diff --git a/src/video_core/vulkan_common/vulkan_library.cpp b/src/video_core/vulkan_common/vulkan_library.cpp new file mode 100644 index 000000000..27c958221 --- /dev/null +++ b/src/video_core/vulkan_common/vulkan_library.cpp @@ -0,0 +1,36 @@ +// Copyright 2020 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include + +#include "common/dynamic_library.h" +#include "common/file_util.h" +#include "video_core/vulkan_common/vulkan_library.h" + +namespace Vulkan { + +Common::DynamicLibrary OpenLibrary() { + Common::DynamicLibrary library; +#ifdef __APPLE__ + // Check if a path to a specific Vulkan library has been specified. + char* const libvulkan_env = std::getenv("LIBVULKAN_PATH"); + if (!libvulkan_env || !library.Open(libvulkan_env)) { + // Use the libvulkan.dylib from the application bundle. + const std::string filename = + Common::FS::GetBundleDirectory() + "/Contents/Frameworks/libvulkan.dylib"; + library.Open(filename.c_str()); + } +#else + std::string filename = Common::DynamicLibrary::GetVersionedFilename("vulkan", 1); + if (!library.Open(filename.c_str())) { + // Android devices may not have libvulkan.so.1, only libvulkan.so. + filename = Common::DynamicLibrary::GetVersionedFilename("vulkan"); + void(library.Open(filename.c_str())); + } +#endif + return library; +} + +} // namespace Vulkan diff --git a/src/video_core/vulkan_common/vulkan_library.h b/src/video_core/vulkan_common/vulkan_library.h new file mode 100644 index 000000000..8b28b0e17 --- /dev/null +++ b/src/video_core/vulkan_common/vulkan_library.h @@ -0,0 +1,13 @@ +// Copyright 2020 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "common/dynamic_library.h" + +namespace Vulkan { + +Common::DynamicLibrary OpenLibrary(); + +} // namespace Vulkan -- cgit v1.2.3 From d1435009ed914cc50533a71b1e25132376c28586 Mon Sep 17 00:00:00 2001 From: ReinUsesLisp Date: Thu, 24 Dec 2020 21:30:11 -0300 Subject: vulkan_common: Rename renderer_vulkan/wrapper.h to vulkan_common/vulkan_wrapper.h Allows sharing Vulkan wrapper code between different rendering backends. --- src/video_core/vulkan_common/vulkan_wrapper.cpp | 928 +++++++++++++++++ src/video_core/vulkan_common/vulkan_wrapper.h | 1213 +++++++++++++++++++++++ 2 files changed, 2141 insertions(+) create mode 100644 src/video_core/vulkan_common/vulkan_wrapper.cpp create mode 100644 src/video_core/vulkan_common/vulkan_wrapper.h (limited to 'src/video_core/vulkan_common') diff --git a/src/video_core/vulkan_common/vulkan_wrapper.cpp b/src/video_core/vulkan_common/vulkan_wrapper.cpp new file mode 100644 index 000000000..478402bbd --- /dev/null +++ b/src/video_core/vulkan_common/vulkan_wrapper.cpp @@ -0,0 +1,928 @@ +// Copyright 2020 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include +#include +#include +#include +#include + +#include "common/common_types.h" +#include "common/logging/log.h" + +#include "video_core/vulkan_common/vulkan_wrapper.h" + +namespace Vulkan::vk { + +namespace { + +template +void SortPhysicalDevices(std::vector& devices, const InstanceDispatch& dld, + Func&& func) { + // Calling GetProperties calls Vulkan more than needed. But they are supposed to be cheap + // functions. + std::stable_sort(devices.begin(), devices.end(), + [&dld, &func](VkPhysicalDevice lhs, VkPhysicalDevice rhs) { + return func(vk::PhysicalDevice(lhs, dld).GetProperties(), + vk::PhysicalDevice(rhs, dld).GetProperties()); + }); +} + +void SortPhysicalDevicesPerVendor(std::vector& devices, + const InstanceDispatch& dld, + std::initializer_list vendor_ids) { + for (auto it = vendor_ids.end(); it != vendor_ids.begin();) { + --it; + SortPhysicalDevices(devices, dld, [id = *it](const auto& lhs, const auto& rhs) { + return lhs.vendorID == id && rhs.vendorID != id; + }); + } +} + +void SortPhysicalDevices(std::vector& devices, const InstanceDispatch& dld) { + // Sort by name, this will set a base and make GPUs with higher numbers appear first + // (e.g. GTX 1650 will intentionally be listed before a GTX 1080). + SortPhysicalDevices(devices, dld, [](const auto& lhs, const auto& rhs) { + return std::string_view{lhs.deviceName} > std::string_view{rhs.deviceName}; + }); + // Prefer discrete over non-discrete + SortPhysicalDevices(devices, dld, [](const auto& lhs, const auto& rhs) { + return lhs.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU && + rhs.deviceType != VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU; + }); + // Prefer Nvidia over AMD, AMD over Intel, Intel over the rest. + SortPhysicalDevicesPerVendor(devices, dld, {0x10DE, 0x1002, 0x8086}); +} + +template +bool Proc(T& result, const InstanceDispatch& dld, const char* proc_name, + VkInstance instance = nullptr) noexcept { + result = reinterpret_cast(dld.vkGetInstanceProcAddr(instance, proc_name)); + return result != nullptr; +} + +template +void Proc(T& result, const DeviceDispatch& dld, const char* proc_name, VkDevice device) noexcept { + result = reinterpret_cast(dld.vkGetDeviceProcAddr(device, proc_name)); +} + +void Load(VkDevice device, DeviceDispatch& dld) noexcept { +#define X(name) Proc(dld.name, dld, #name, device) + X(vkAcquireNextImageKHR); + X(vkAllocateCommandBuffers); + X(vkAllocateDescriptorSets); + X(vkAllocateMemory); + X(vkBeginCommandBuffer); + X(vkBindBufferMemory); + X(vkBindImageMemory); + X(vkCmdBeginQuery); + X(vkCmdBeginRenderPass); + X(vkCmdBeginTransformFeedbackEXT); + X(vkCmdBeginDebugUtilsLabelEXT); + X(vkCmdBindDescriptorSets); + X(vkCmdBindIndexBuffer); + X(vkCmdBindPipeline); + X(vkCmdBindTransformFeedbackBuffersEXT); + X(vkCmdBindVertexBuffers); + X(vkCmdBlitImage); + X(vkCmdClearAttachments); + X(vkCmdCopyBuffer); + X(vkCmdCopyBufferToImage); + X(vkCmdCopyImage); + X(vkCmdCopyImageToBuffer); + X(vkCmdDispatch); + X(vkCmdDraw); + X(vkCmdDrawIndexed); + X(vkCmdEndQuery); + X(vkCmdEndRenderPass); + X(vkCmdEndTransformFeedbackEXT); + X(vkCmdEndDebugUtilsLabelEXT); + X(vkCmdFillBuffer); + X(vkCmdPipelineBarrier); + X(vkCmdPushConstants); + X(vkCmdSetBlendConstants); + X(vkCmdSetDepthBias); + X(vkCmdSetDepthBounds); + X(vkCmdSetEvent); + X(vkCmdSetScissor); + X(vkCmdSetStencilCompareMask); + X(vkCmdSetStencilReference); + X(vkCmdSetStencilWriteMask); + X(vkCmdSetViewport); + X(vkCmdWaitEvents); + X(vkCmdBindVertexBuffers2EXT); + X(vkCmdSetCullModeEXT); + X(vkCmdSetDepthBoundsTestEnableEXT); + X(vkCmdSetDepthCompareOpEXT); + X(vkCmdSetDepthTestEnableEXT); + X(vkCmdSetDepthWriteEnableEXT); + X(vkCmdSetFrontFaceEXT); + X(vkCmdSetPrimitiveTopologyEXT); + X(vkCmdSetStencilOpEXT); + X(vkCmdSetStencilTestEnableEXT); + X(vkCmdResolveImage); + X(vkCreateBuffer); + X(vkCreateBufferView); + X(vkCreateCommandPool); + X(vkCreateComputePipelines); + X(vkCreateDescriptorPool); + X(vkCreateDescriptorSetLayout); + X(vkCreateDescriptorUpdateTemplateKHR); + X(vkCreateEvent); + X(vkCreateFence); + X(vkCreateFramebuffer); + X(vkCreateGraphicsPipelines); + X(vkCreateImage); + X(vkCreateImageView); + X(vkCreatePipelineLayout); + X(vkCreateQueryPool); + X(vkCreateRenderPass); + X(vkCreateSampler); + X(vkCreateSemaphore); + X(vkCreateShaderModule); + X(vkCreateSwapchainKHR); + X(vkDestroyBuffer); + X(vkDestroyBufferView); + X(vkDestroyCommandPool); + X(vkDestroyDescriptorPool); + X(vkDestroyDescriptorSetLayout); + X(vkDestroyDescriptorUpdateTemplateKHR); + X(vkDestroyEvent); + X(vkDestroyFence); + X(vkDestroyFramebuffer); + X(vkDestroyImage); + X(vkDestroyImageView); + X(vkDestroyPipeline); + X(vkDestroyPipelineLayout); + X(vkDestroyQueryPool); + X(vkDestroyRenderPass); + X(vkDestroySampler); + X(vkDestroySemaphore); + X(vkDestroyShaderModule); + X(vkDestroySwapchainKHR); + X(vkDeviceWaitIdle); + X(vkEndCommandBuffer); + X(vkFreeCommandBuffers); + X(vkFreeDescriptorSets); + X(vkFreeMemory); + X(vkGetBufferMemoryRequirements); + X(vkGetDeviceQueue); + X(vkGetEventStatus); + X(vkGetFenceStatus); + X(vkGetImageMemoryRequirements); + X(vkGetQueryPoolResults); + X(vkGetSemaphoreCounterValueKHR); + X(vkMapMemory); + X(vkQueueSubmit); + X(vkResetFences); + X(vkResetQueryPoolEXT); + X(vkSetDebugUtilsObjectNameEXT); + X(vkSetDebugUtilsObjectTagEXT); + X(vkUnmapMemory); + X(vkUpdateDescriptorSetWithTemplateKHR); + X(vkUpdateDescriptorSets); + X(vkWaitForFences); + X(vkWaitSemaphoresKHR); +#undef X +} + +template +void SetObjectName(const DeviceDispatch* dld, VkDevice device, T handle, VkObjectType type, + const char* name) { + const VkDebugUtilsObjectNameInfoEXT name_info{ + .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, + .pNext = nullptr, + .objectType = VK_OBJECT_TYPE_IMAGE, + .objectHandle = reinterpret_cast(handle), + .pObjectName = name, + }; + Check(dld->vkSetDebugUtilsObjectNameEXT(device, &name_info)); +} + +} // Anonymous namespace + +bool Load(InstanceDispatch& dld) noexcept { +#define X(name) Proc(dld.name, dld, #name) + return X(vkCreateInstance) && X(vkEnumerateInstanceExtensionProperties) && + X(vkEnumerateInstanceLayerProperties); +#undef X +} + +bool Load(VkInstance instance, InstanceDispatch& dld) noexcept { +#define X(name) Proc(dld.name, dld, #name, instance) + // These functions may fail to load depending on the enabled extensions. + // Don't return a failure on these. + X(vkCreateDebugUtilsMessengerEXT); + X(vkDestroyDebugUtilsMessengerEXT); + X(vkDestroySurfaceKHR); + X(vkGetPhysicalDeviceFeatures2KHR); + X(vkGetPhysicalDeviceProperties2KHR); + X(vkGetPhysicalDeviceSurfaceCapabilitiesKHR); + X(vkGetPhysicalDeviceSurfaceFormatsKHR); + X(vkGetPhysicalDeviceSurfacePresentModesKHR); + X(vkGetPhysicalDeviceSurfaceSupportKHR); + X(vkGetSwapchainImagesKHR); + X(vkQueuePresentKHR); + + return X(vkCreateDevice) && X(vkDestroyDevice) && X(vkDestroyDevice) && + X(vkEnumerateDeviceExtensionProperties) && X(vkEnumeratePhysicalDevices) && + X(vkGetDeviceProcAddr) && X(vkGetPhysicalDeviceFormatProperties) && + X(vkGetPhysicalDeviceMemoryProperties) && X(vkGetPhysicalDeviceProperties) && + X(vkGetPhysicalDeviceQueueFamilyProperties); +#undef X +} + +const char* Exception::what() const noexcept { + return ToString(result); +} + +const char* ToString(VkResult result) noexcept { + switch (result) { + case VkResult::VK_SUCCESS: + return "VK_SUCCESS"; + case VkResult::VK_NOT_READY: + return "VK_NOT_READY"; + case VkResult::VK_TIMEOUT: + return "VK_TIMEOUT"; + case VkResult::VK_EVENT_SET: + return "VK_EVENT_SET"; + case VkResult::VK_EVENT_RESET: + return "VK_EVENT_RESET"; + case VkResult::VK_INCOMPLETE: + return "VK_INCOMPLETE"; + case VkResult::VK_ERROR_OUT_OF_HOST_MEMORY: + return "VK_ERROR_OUT_OF_HOST_MEMORY"; + case VkResult::VK_ERROR_OUT_OF_DEVICE_MEMORY: + return "VK_ERROR_OUT_OF_DEVICE_MEMORY"; + case VkResult::VK_ERROR_INITIALIZATION_FAILED: + return "VK_ERROR_INITIALIZATION_FAILED"; + case VkResult::VK_ERROR_DEVICE_LOST: + return "VK_ERROR_DEVICE_LOST"; + case VkResult::VK_ERROR_MEMORY_MAP_FAILED: + return "VK_ERROR_MEMORY_MAP_FAILED"; + case VkResult::VK_ERROR_LAYER_NOT_PRESENT: + return "VK_ERROR_LAYER_NOT_PRESENT"; + case VkResult::VK_ERROR_EXTENSION_NOT_PRESENT: + return "VK_ERROR_EXTENSION_NOT_PRESENT"; + case VkResult::VK_ERROR_FEATURE_NOT_PRESENT: + return "VK_ERROR_FEATURE_NOT_PRESENT"; + case VkResult::VK_ERROR_INCOMPATIBLE_DRIVER: + return "VK_ERROR_INCOMPATIBLE_DRIVER"; + case VkResult::VK_ERROR_TOO_MANY_OBJECTS: + return "VK_ERROR_TOO_MANY_OBJECTS"; + case VkResult::VK_ERROR_FORMAT_NOT_SUPPORTED: + return "VK_ERROR_FORMAT_NOT_SUPPORTED"; + case VkResult::VK_ERROR_FRAGMENTED_POOL: + return "VK_ERROR_FRAGMENTED_POOL"; + case VkResult::VK_ERROR_OUT_OF_POOL_MEMORY: + return "VK_ERROR_OUT_OF_POOL_MEMORY"; + case VkResult::VK_ERROR_INVALID_EXTERNAL_HANDLE: + return "VK_ERROR_INVALID_EXTERNAL_HANDLE"; + case VkResult::VK_ERROR_SURFACE_LOST_KHR: + return "VK_ERROR_SURFACE_LOST_KHR"; + case VkResult::VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: + return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR"; + case VkResult::VK_SUBOPTIMAL_KHR: + return "VK_SUBOPTIMAL_KHR"; + case VkResult::VK_ERROR_OUT_OF_DATE_KHR: + return "VK_ERROR_OUT_OF_DATE_KHR"; + case VkResult::VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: + return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR"; + case VkResult::VK_ERROR_VALIDATION_FAILED_EXT: + return "VK_ERROR_VALIDATION_FAILED_EXT"; + case VkResult::VK_ERROR_INVALID_SHADER_NV: + return "VK_ERROR_INVALID_SHADER_NV"; + case VkResult::VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT: + return "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT"; + case VkResult::VK_ERROR_FRAGMENTATION_EXT: + return "VK_ERROR_FRAGMENTATION_EXT"; + case VkResult::VK_ERROR_NOT_PERMITTED_EXT: + return "VK_ERROR_NOT_PERMITTED_EXT"; + case VkResult::VK_ERROR_INVALID_DEVICE_ADDRESS_EXT: + return "VK_ERROR_INVALID_DEVICE_ADDRESS_EXT"; + case VkResult::VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT: + return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT"; + case VkResult::VK_ERROR_UNKNOWN: + return "VK_ERROR_UNKNOWN"; + case VkResult::VK_ERROR_INCOMPATIBLE_VERSION_KHR: + return "VK_ERROR_INCOMPATIBLE_VERSION_KHR"; + case VkResult::VK_THREAD_IDLE_KHR: + return "VK_THREAD_IDLE_KHR"; + case VkResult::VK_THREAD_DONE_KHR: + return "VK_THREAD_DONE_KHR"; + case VkResult::VK_OPERATION_DEFERRED_KHR: + return "VK_OPERATION_DEFERRED_KHR"; + case VkResult::VK_OPERATION_NOT_DEFERRED_KHR: + return "VK_OPERATION_NOT_DEFERRED_KHR"; + case VkResult::VK_PIPELINE_COMPILE_REQUIRED_EXT: + return "VK_PIPELINE_COMPILE_REQUIRED_EXT"; + case VkResult::VK_RESULT_MAX_ENUM: + return "VK_RESULT_MAX_ENUM"; + } + return "Unknown"; +} + +void Destroy(VkInstance instance, const InstanceDispatch& dld) noexcept { + dld.vkDestroyInstance(instance, nullptr); +} + +void Destroy(VkDevice device, const InstanceDispatch& dld) noexcept { + dld.vkDestroyDevice(device, nullptr); +} + +void Destroy(VkDevice device, VkBuffer handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyBuffer(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkBufferView handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyBufferView(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkCommandPool handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyCommandPool(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkDescriptorPool handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyDescriptorPool(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkDescriptorSetLayout handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyDescriptorSetLayout(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkDescriptorUpdateTemplateKHR handle, + const DeviceDispatch& dld) noexcept { + dld.vkDestroyDescriptorUpdateTemplateKHR(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkDeviceMemory handle, const DeviceDispatch& dld) noexcept { + dld.vkFreeMemory(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkEvent handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyEvent(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkFence handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyFence(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkFramebuffer handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyFramebuffer(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkImage handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyImage(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkImageView handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyImageView(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkPipeline handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyPipeline(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkPipelineLayout handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyPipelineLayout(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkQueryPool handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyQueryPool(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkRenderPass handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyRenderPass(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkSampler handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroySampler(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkSwapchainKHR handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroySwapchainKHR(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkSemaphore handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroySemaphore(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkShaderModule handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyShaderModule(device, handle, nullptr); +} + +void Destroy(VkInstance instance, VkDebugUtilsMessengerEXT handle, + const InstanceDispatch& dld) noexcept { + dld.vkDestroyDebugUtilsMessengerEXT(instance, handle, nullptr); +} + +void Destroy(VkInstance instance, VkSurfaceKHR handle, const InstanceDispatch& dld) noexcept { + dld.vkDestroySurfaceKHR(instance, handle, nullptr); +} + +VkResult Free(VkDevice device, VkDescriptorPool handle, Span sets, + const DeviceDispatch& dld) noexcept { + return dld.vkFreeDescriptorSets(device, handle, sets.size(), sets.data()); +} + +VkResult Free(VkDevice device, VkCommandPool handle, Span buffers, + const DeviceDispatch& dld) noexcept { + dld.vkFreeCommandBuffers(device, handle, buffers.size(), buffers.data()); + return VK_SUCCESS; +} + +Instance Instance::Create(u32 version, Span layers, Span extensions, + InstanceDispatch& dispatch) noexcept { + const VkApplicationInfo application_info{ + .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO, + .pNext = nullptr, + .pApplicationName = "yuzu Emulator", + .applicationVersion = VK_MAKE_VERSION(0, 1, 0), + .pEngineName = "yuzu Emulator", + .engineVersion = VK_MAKE_VERSION(0, 1, 0), + .apiVersion = version, + }; + const VkInstanceCreateInfo ci{ + .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, + .pNext = nullptr, + .flags = 0, + .pApplicationInfo = &application_info, + .enabledLayerCount = layers.size(), + .ppEnabledLayerNames = layers.data(), + .enabledExtensionCount = extensions.size(), + .ppEnabledExtensionNames = extensions.data(), + }; + + VkInstance instance; + if (dispatch.vkCreateInstance(&ci, nullptr, &instance) != VK_SUCCESS) { + // Failed to create the instance. + return {}; + } + if (!Proc(dispatch.vkDestroyInstance, dispatch, "vkDestroyInstance", instance)) { + // We successfully created an instance but the destroy function couldn't be loaded. + // This is a good moment to panic. + return {}; + } + + return Instance(instance, dispatch); +} + +std::optional> Instance::EnumeratePhysicalDevices() { + u32 num; + if (dld->vkEnumeratePhysicalDevices(handle, &num, nullptr) != VK_SUCCESS) { + return std::nullopt; + } + std::vector physical_devices(num); + if (dld->vkEnumeratePhysicalDevices(handle, &num, physical_devices.data()) != VK_SUCCESS) { + return std::nullopt; + } + SortPhysicalDevices(physical_devices, *dld); + return std::make_optional(std::move(physical_devices)); +} + +DebugCallback Instance::TryCreateDebugCallback( + PFN_vkDebugUtilsMessengerCallbackEXT callback) noexcept { + const VkDebugUtilsMessengerCreateInfoEXT ci{ + .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, + .pNext = nullptr, + .flags = 0, + .messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, + .messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, + .pfnUserCallback = callback, + .pUserData = nullptr, + }; + + VkDebugUtilsMessengerEXT messenger; + if (dld->vkCreateDebugUtilsMessengerEXT(handle, &ci, nullptr, &messenger) != VK_SUCCESS) { + return {}; + } + return DebugCallback(messenger, handle, *dld); +} + +void Buffer::BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const { + Check(dld->vkBindBufferMemory(owner, handle, memory, offset)); +} + +void Buffer::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_BUFFER, name); +} + +void BufferView::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_BUFFER_VIEW, name); +} + +void Image::BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const { + Check(dld->vkBindImageMemory(owner, handle, memory, offset)); +} + +void Image::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_IMAGE, name); +} + +void ImageView::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_IMAGE_VIEW, name); +} + +void DeviceMemory::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_DEVICE_MEMORY, name); +} + +void Fence::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_FENCE, name); +} + +void Framebuffer::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_FRAMEBUFFER, name); +} + +DescriptorSets DescriptorPool::Allocate(const VkDescriptorSetAllocateInfo& ai) const { + const std::size_t num = ai.descriptorSetCount; + std::unique_ptr sets = std::make_unique(num); + switch (const VkResult result = dld->vkAllocateDescriptorSets(owner, &ai, sets.get())) { + case VK_SUCCESS: + return DescriptorSets(std::move(sets), num, owner, handle, *dld); + case VK_ERROR_OUT_OF_POOL_MEMORY: + return {}; + default: + throw Exception(result); + } +} + +void DescriptorPool::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_DESCRIPTOR_POOL, name); +} + +CommandBuffers CommandPool::Allocate(std::size_t num_buffers, VkCommandBufferLevel level) const { + const VkCommandBufferAllocateInfo ai{ + .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, + .pNext = nullptr, + .commandPool = handle, + .level = level, + .commandBufferCount = static_cast(num_buffers), + }; + + std::unique_ptr buffers = std::make_unique(num_buffers); + switch (const VkResult result = dld->vkAllocateCommandBuffers(owner, &ai, buffers.get())) { + case VK_SUCCESS: + return CommandBuffers(std::move(buffers), num_buffers, owner, handle, *dld); + case VK_ERROR_OUT_OF_POOL_MEMORY: + return {}; + default: + throw Exception(result); + } +} + +void CommandPool::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_COMMAND_POOL, name); +} + +std::vector SwapchainKHR::GetImages() const { + u32 num; + Check(dld->vkGetSwapchainImagesKHR(owner, handle, &num, nullptr)); + std::vector images(num); + Check(dld->vkGetSwapchainImagesKHR(owner, handle, &num, images.data())); + return images; +} + +void Event::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_EVENT, name); +} + +void ShaderModule::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_SHADER_MODULE, name); +} + +void Semaphore::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_SEMAPHORE, name); +} + +Device Device::Create(VkPhysicalDevice physical_device, Span queues_ci, + Span enabled_extensions, const void* next, + DeviceDispatch& dispatch) noexcept { + const VkDeviceCreateInfo ci{ + .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, + .pNext = next, + .flags = 0, + .queueCreateInfoCount = queues_ci.size(), + .pQueueCreateInfos = queues_ci.data(), + .enabledLayerCount = 0, + .ppEnabledLayerNames = nullptr, + .enabledExtensionCount = enabled_extensions.size(), + .ppEnabledExtensionNames = enabled_extensions.data(), + .pEnabledFeatures = nullptr, + }; + + VkDevice device; + if (dispatch.vkCreateDevice(physical_device, &ci, nullptr, &device) != VK_SUCCESS) { + return {}; + } + Load(device, dispatch); + return Device(device, dispatch); +} + +Queue Device::GetQueue(u32 family_index) const noexcept { + VkQueue queue; + dld->vkGetDeviceQueue(handle, family_index, 0, &queue); + return Queue(queue, *dld); +} + +Buffer Device::CreateBuffer(const VkBufferCreateInfo& ci) const { + VkBuffer object; + Check(dld->vkCreateBuffer(handle, &ci, nullptr, &object)); + return Buffer(object, handle, *dld); +} + +BufferView Device::CreateBufferView(const VkBufferViewCreateInfo& ci) const { + VkBufferView object; + Check(dld->vkCreateBufferView(handle, &ci, nullptr, &object)); + return BufferView(object, handle, *dld); +} + +Image Device::CreateImage(const VkImageCreateInfo& ci) const { + VkImage object; + Check(dld->vkCreateImage(handle, &ci, nullptr, &object)); + return Image(object, handle, *dld); +} + +ImageView Device::CreateImageView(const VkImageViewCreateInfo& ci) const { + VkImageView object; + Check(dld->vkCreateImageView(handle, &ci, nullptr, &object)); + return ImageView(object, handle, *dld); +} + +Semaphore Device::CreateSemaphore() const { + static constexpr VkSemaphoreCreateInfo ci{ + .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, + .pNext = nullptr, + .flags = 0, + }; + return CreateSemaphore(ci); +} + +Semaphore Device::CreateSemaphore(const VkSemaphoreCreateInfo& ci) const { + VkSemaphore object; + Check(dld->vkCreateSemaphore(handle, &ci, nullptr, &object)); + return Semaphore(object, handle, *dld); +} + +Fence Device::CreateFence(const VkFenceCreateInfo& ci) const { + VkFence object; + Check(dld->vkCreateFence(handle, &ci, nullptr, &object)); + return Fence(object, handle, *dld); +} + +DescriptorPool Device::CreateDescriptorPool(const VkDescriptorPoolCreateInfo& ci) const { + VkDescriptorPool object; + Check(dld->vkCreateDescriptorPool(handle, &ci, nullptr, &object)); + return DescriptorPool(object, handle, *dld); +} + +RenderPass Device::CreateRenderPass(const VkRenderPassCreateInfo& ci) const { + VkRenderPass object; + Check(dld->vkCreateRenderPass(handle, &ci, nullptr, &object)); + return RenderPass(object, handle, *dld); +} + +DescriptorSetLayout Device::CreateDescriptorSetLayout( + const VkDescriptorSetLayoutCreateInfo& ci) const { + VkDescriptorSetLayout object; + Check(dld->vkCreateDescriptorSetLayout(handle, &ci, nullptr, &object)); + return DescriptorSetLayout(object, handle, *dld); +} + +PipelineLayout Device::CreatePipelineLayout(const VkPipelineLayoutCreateInfo& ci) const { + VkPipelineLayout object; + Check(dld->vkCreatePipelineLayout(handle, &ci, nullptr, &object)); + return PipelineLayout(object, handle, *dld); +} + +Pipeline Device::CreateGraphicsPipeline(const VkGraphicsPipelineCreateInfo& ci) const { + VkPipeline object; + Check(dld->vkCreateGraphicsPipelines(handle, nullptr, 1, &ci, nullptr, &object)); + return Pipeline(object, handle, *dld); +} + +Pipeline Device::CreateComputePipeline(const VkComputePipelineCreateInfo& ci) const { + VkPipeline object; + Check(dld->vkCreateComputePipelines(handle, nullptr, 1, &ci, nullptr, &object)); + return Pipeline(object, handle, *dld); +} + +Sampler Device::CreateSampler(const VkSamplerCreateInfo& ci) const { + VkSampler object; + Check(dld->vkCreateSampler(handle, &ci, nullptr, &object)); + return Sampler(object, handle, *dld); +} + +Framebuffer Device::CreateFramebuffer(const VkFramebufferCreateInfo& ci) const { + VkFramebuffer object; + Check(dld->vkCreateFramebuffer(handle, &ci, nullptr, &object)); + return Framebuffer(object, handle, *dld); +} + +CommandPool Device::CreateCommandPool(const VkCommandPoolCreateInfo& ci) const { + VkCommandPool object; + Check(dld->vkCreateCommandPool(handle, &ci, nullptr, &object)); + return CommandPool(object, handle, *dld); +} + +DescriptorUpdateTemplateKHR Device::CreateDescriptorUpdateTemplateKHR( + const VkDescriptorUpdateTemplateCreateInfoKHR& ci) const { + VkDescriptorUpdateTemplateKHR object; + Check(dld->vkCreateDescriptorUpdateTemplateKHR(handle, &ci, nullptr, &object)); + return DescriptorUpdateTemplateKHR(object, handle, *dld); +} + +QueryPool Device::CreateQueryPool(const VkQueryPoolCreateInfo& ci) const { + VkQueryPool object; + Check(dld->vkCreateQueryPool(handle, &ci, nullptr, &object)); + return QueryPool(object, handle, *dld); +} + +ShaderModule Device::CreateShaderModule(const VkShaderModuleCreateInfo& ci) const { + VkShaderModule object; + Check(dld->vkCreateShaderModule(handle, &ci, nullptr, &object)); + return ShaderModule(object, handle, *dld); +} + +Event Device::CreateEvent() const { + static constexpr VkEventCreateInfo ci{ + .sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, + .pNext = nullptr, + .flags = 0, + }; + + VkEvent object; + Check(dld->vkCreateEvent(handle, &ci, nullptr, &object)); + return Event(object, handle, *dld); +} + +SwapchainKHR Device::CreateSwapchainKHR(const VkSwapchainCreateInfoKHR& ci) const { + VkSwapchainKHR object; + Check(dld->vkCreateSwapchainKHR(handle, &ci, nullptr, &object)); + return SwapchainKHR(object, handle, *dld); +} + +DeviceMemory Device::TryAllocateMemory(const VkMemoryAllocateInfo& ai) const noexcept { + VkDeviceMemory memory; + if (dld->vkAllocateMemory(handle, &ai, nullptr, &memory) != VK_SUCCESS) { + return {}; + } + return DeviceMemory(memory, handle, *dld); +} + +DeviceMemory Device::AllocateMemory(const VkMemoryAllocateInfo& ai) const { + VkDeviceMemory memory; + Check(dld->vkAllocateMemory(handle, &ai, nullptr, &memory)); + return DeviceMemory(memory, handle, *dld); +} + +VkMemoryRequirements Device::GetBufferMemoryRequirements(VkBuffer buffer) const noexcept { + VkMemoryRequirements requirements; + dld->vkGetBufferMemoryRequirements(handle, buffer, &requirements); + return requirements; +} + +VkMemoryRequirements Device::GetImageMemoryRequirements(VkImage image) const noexcept { + VkMemoryRequirements requirements; + dld->vkGetImageMemoryRequirements(handle, image, &requirements); + return requirements; +} + +void Device::UpdateDescriptorSets(Span writes, + Span copies) const noexcept { + dld->vkUpdateDescriptorSets(handle, writes.size(), writes.data(), copies.size(), copies.data()); +} + +VkPhysicalDeviceProperties PhysicalDevice::GetProperties() const noexcept { + VkPhysicalDeviceProperties properties; + dld->vkGetPhysicalDeviceProperties(physical_device, &properties); + return properties; +} + +void PhysicalDevice::GetProperties2KHR(VkPhysicalDeviceProperties2KHR& properties) const noexcept { + dld->vkGetPhysicalDeviceProperties2KHR(physical_device, &properties); +} + +VkPhysicalDeviceFeatures PhysicalDevice::GetFeatures() const noexcept { + VkPhysicalDeviceFeatures2KHR features2; + features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR; + features2.pNext = nullptr; + dld->vkGetPhysicalDeviceFeatures2KHR(physical_device, &features2); + return features2.features; +} + +void PhysicalDevice::GetFeatures2KHR(VkPhysicalDeviceFeatures2KHR& features) const noexcept { + dld->vkGetPhysicalDeviceFeatures2KHR(physical_device, &features); +} + +VkFormatProperties PhysicalDevice::GetFormatProperties(VkFormat format) const noexcept { + VkFormatProperties properties; + dld->vkGetPhysicalDeviceFormatProperties(physical_device, format, &properties); + return properties; +} + +std::vector PhysicalDevice::EnumerateDeviceExtensionProperties() const { + u32 num; + dld->vkEnumerateDeviceExtensionProperties(physical_device, nullptr, &num, nullptr); + std::vector properties(num); + dld->vkEnumerateDeviceExtensionProperties(physical_device, nullptr, &num, properties.data()); + return properties; +} + +std::vector PhysicalDevice::GetQueueFamilyProperties() const { + u32 num; + dld->vkGetPhysicalDeviceQueueFamilyProperties(physical_device, &num, nullptr); + std::vector properties(num); + dld->vkGetPhysicalDeviceQueueFamilyProperties(physical_device, &num, properties.data()); + return properties; +} + +bool PhysicalDevice::GetSurfaceSupportKHR(u32 queue_family_index, VkSurfaceKHR surface) const { + VkBool32 supported; + Check(dld->vkGetPhysicalDeviceSurfaceSupportKHR(physical_device, queue_family_index, surface, + &supported)); + return supported == VK_TRUE; +} + +VkSurfaceCapabilitiesKHR PhysicalDevice::GetSurfaceCapabilitiesKHR(VkSurfaceKHR surface) const { + VkSurfaceCapabilitiesKHR capabilities; + Check(dld->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_device, surface, &capabilities)); + return capabilities; +} + +std::vector PhysicalDevice::GetSurfaceFormatsKHR(VkSurfaceKHR surface) const { + u32 num; + Check(dld->vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device, surface, &num, nullptr)); + std::vector formats(num); + Check( + dld->vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device, surface, &num, formats.data())); + return formats; +} + +std::vector PhysicalDevice::GetSurfacePresentModesKHR( + VkSurfaceKHR surface) const { + u32 num; + Check(dld->vkGetPhysicalDeviceSurfacePresentModesKHR(physical_device, surface, &num, nullptr)); + std::vector modes(num); + Check(dld->vkGetPhysicalDeviceSurfacePresentModesKHR(physical_device, surface, &num, + modes.data())); + return modes; +} + +VkPhysicalDeviceMemoryProperties PhysicalDevice::GetMemoryProperties() const noexcept { + VkPhysicalDeviceMemoryProperties properties; + dld->vkGetPhysicalDeviceMemoryProperties(physical_device, &properties); + return properties; +} + +u32 AvailableVersion(const InstanceDispatch& dld) noexcept { + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion; + if (!Proc(vkEnumerateInstanceVersion, dld, "vkEnumerateInstanceVersion")) { + // If the procedure is not found, Vulkan 1.0 is assumed + return VK_API_VERSION_1_0; + } + u32 version; + if (const VkResult result = vkEnumerateInstanceVersion(&version); result != VK_SUCCESS) { + LOG_ERROR(Render_Vulkan, "vkEnumerateInstanceVersion returned {}, assuming Vulkan 1.1", + ToString(result)); + return VK_API_VERSION_1_1; + } + return version; +} + +std::optional> EnumerateInstanceExtensionProperties( + const InstanceDispatch& dld) { + u32 num; + if (dld.vkEnumerateInstanceExtensionProperties(nullptr, &num, nullptr) != VK_SUCCESS) { + return std::nullopt; + } + std::vector properties(num); + if (dld.vkEnumerateInstanceExtensionProperties(nullptr, &num, properties.data()) != + VK_SUCCESS) { + return std::nullopt; + } + return properties; +} + +std::optional> EnumerateInstanceLayerProperties( + const InstanceDispatch& dld) { + u32 num; + if (dld.vkEnumerateInstanceLayerProperties(&num, nullptr) != VK_SUCCESS) { + return std::nullopt; + } + std::vector properties(num); + if (dld.vkEnumerateInstanceLayerProperties(&num, properties.data()) != VK_SUCCESS) { + return std::nullopt; + } + return properties; +} + +} // namespace Vulkan::vk diff --git a/src/video_core/vulkan_common/vulkan_wrapper.h b/src/video_core/vulkan_common/vulkan_wrapper.h new file mode 100644 index 000000000..f9a184e00 --- /dev/null +++ b/src/video_core/vulkan_common/vulkan_wrapper.h @@ -0,0 +1,1213 @@ +// Copyright 2020 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define VK_NO_PROTOTYPES +#include + +#include "common/common_types.h" + +#ifdef _MSC_VER +#pragma warning(disable : 26812) // Disable prefer enum class over enum +#endif + +namespace Vulkan::vk { + +/** + * Span for Vulkan arrays. + * Based on std::span but optimized for array access instead of iterators. + * Size returns uint32_t instead of size_t to ease interaction with Vulkan functions. + */ +template +class Span { +public: + using value_type = T; + using size_type = u32; + using difference_type = std::ptrdiff_t; + using reference = const T&; + using const_reference = const T&; + using pointer = const T*; + using const_pointer = const T*; + using iterator = const T*; + using const_iterator = const T*; + + /// Construct an empty span. + constexpr Span() noexcept = default; + + /// Construct an empty span + constexpr Span(std::nullptr_t) noexcept {} + + /// Construct a span from a single element. + constexpr Span(const T& value) noexcept : ptr{&value}, num{1} {} + + /// Construct a span from a range. + template + // requires std::data(const Range&) + // requires std::size(const Range&) + constexpr Span(const Range& range) : ptr{std::data(range)}, num{std::size(range)} {} + + /// Construct a span from a pointer and a size. + /// This is inteded for subranges. + constexpr Span(const T* ptr_, std::size_t num_) noexcept : ptr{ptr_}, num{num_} {} + + /// Returns the data pointer by the span. + constexpr const T* data() const noexcept { + return ptr; + } + + /// Returns the number of elements in the span. + /// @note Returns a 32 bits integer because most Vulkan functions expect this type. + constexpr u32 size() const noexcept { + return static_cast(num); + } + + /// Returns true when the span is empty. + constexpr bool empty() const noexcept { + return num == 0; + } + + /// Returns a reference to the element in the passed index. + /// @pre: index < size() + constexpr const T& operator[](std::size_t index) const noexcept { + return ptr[index]; + } + + /// Returns an iterator to the beginning of the span. + constexpr const T* begin() const noexcept { + return ptr; + } + + /// Returns an iterator to the end of the span. + constexpr const T* end() const noexcept { + return ptr + num; + } + + /// Returns an iterator to the beginning of the span. + constexpr const T* cbegin() const noexcept { + return ptr; + } + + /// Returns an iterator to the end of the span. + constexpr const T* cend() const noexcept { + return ptr + num; + } + +private: + const T* ptr = nullptr; + std::size_t num = 0; +}; + +/// Vulkan exception generated from a VkResult. +class Exception final : public std::exception { +public: + /// Construct the exception with a result. + /// @pre result != VK_SUCCESS + explicit Exception(VkResult result_) : result{result_} {} + virtual ~Exception() = default; + + const char* what() const noexcept override; + +private: + VkResult result; +}; + +/// Converts a VkResult enum into a rodata string +const char* ToString(VkResult) noexcept; + +/// Throws a Vulkan exception if result is not success. +inline void Check(VkResult result) { + if (result != VK_SUCCESS) { + throw Exception(result); + } +} + +/// Throws a Vulkan exception if result is an error. +/// @return result +inline VkResult Filter(VkResult result) { + if (result < 0) { + throw Exception(result); + } + return result; +} + +/// Table holding Vulkan instance function pointers. +struct InstanceDispatch { + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; + + PFN_vkCreateInstance vkCreateInstance; + PFN_vkDestroyInstance vkDestroyInstance; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties; + + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT; + PFN_vkCreateDevice vkCreateDevice; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT; + PFN_vkDestroyDevice vkDestroyDevice; + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; + PFN_vkQueuePresentKHR vkQueuePresentKHR; +}; + +/// Table holding Vulkan device function pointers. +struct DeviceDispatch : public InstanceDispatch { + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; + PFN_vkAllocateMemory vkAllocateMemory; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer; + PFN_vkBindBufferMemory vkBindBufferMemory; + PFN_vkBindImageMemory vkBindImageMemory; + PFN_vkCmdBeginQuery vkCmdBeginQuery; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer; + PFN_vkCmdBindPipeline vkCmdBindPipeline; + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers; + PFN_vkCmdBlitImage vkCmdBlitImage; + PFN_vkCmdClearAttachments vkCmdClearAttachments; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage; + PFN_vkCmdCopyImage vkCmdCopyImage; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer; + PFN_vkCmdDispatch vkCmdDispatch; + PFN_vkCmdDraw vkCmdDraw; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed; + PFN_vkCmdEndQuery vkCmdEndQuery; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT; + PFN_vkCmdFillBuffer vkCmdFillBuffer; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier; + PFN_vkCmdPushConstants vkCmdPushConstants; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds; + PFN_vkCmdSetEvent vkCmdSetEvent; + PFN_vkCmdSetScissor vkCmdSetScissor; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask; + PFN_vkCmdSetViewport vkCmdSetViewport; + PFN_vkCmdWaitEvents vkCmdWaitEvents; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT; + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT; + PFN_vkCmdResolveImage vkCmdResolveImage; + PFN_vkCreateBuffer vkCreateBuffer; + PFN_vkCreateBufferView vkCreateBufferView; + PFN_vkCreateCommandPool vkCreateCommandPool; + PFN_vkCreateComputePipelines vkCreateComputePipelines; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout; + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR; + PFN_vkCreateEvent vkCreateEvent; + PFN_vkCreateFence vkCreateFence; + PFN_vkCreateFramebuffer vkCreateFramebuffer; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines; + PFN_vkCreateImage vkCreateImage; + PFN_vkCreateImageView vkCreateImageView; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout; + PFN_vkCreateQueryPool vkCreateQueryPool; + PFN_vkCreateRenderPass vkCreateRenderPass; + PFN_vkCreateSampler vkCreateSampler; + PFN_vkCreateSemaphore vkCreateSemaphore; + PFN_vkCreateShaderModule vkCreateShaderModule; + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; + PFN_vkDestroyBuffer vkDestroyBuffer; + PFN_vkDestroyBufferView vkDestroyBufferView; + PFN_vkDestroyCommandPool vkDestroyCommandPool; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR; + PFN_vkDestroyEvent vkDestroyEvent; + PFN_vkDestroyFence vkDestroyFence; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer; + PFN_vkDestroyImage vkDestroyImage; + PFN_vkDestroyImageView vkDestroyImageView; + PFN_vkDestroyPipeline vkDestroyPipeline; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout; + PFN_vkDestroyQueryPool vkDestroyQueryPool; + PFN_vkDestroyRenderPass vkDestroyRenderPass; + PFN_vkDestroySampler vkDestroySampler; + PFN_vkDestroySemaphore vkDestroySemaphore; + PFN_vkDestroyShaderModule vkDestroyShaderModule; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle; + PFN_vkEndCommandBuffer vkEndCommandBuffer; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets; + PFN_vkFreeMemory vkFreeMemory; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; + PFN_vkGetDeviceQueue vkGetDeviceQueue; + PFN_vkGetEventStatus vkGetEventStatus; + PFN_vkGetFenceStatus vkGetFenceStatus; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults; + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR; + PFN_vkMapMemory vkMapMemory; + PFN_vkQueueSubmit vkQueueSubmit; + PFN_vkResetFences vkResetFences; + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT; + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT; + PFN_vkUnmapMemory vkUnmapMemory; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; + PFN_vkWaitForFences vkWaitForFences; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR; +}; + +/// Loads instance agnostic function pointers. +/// @return True on success, false on error. +bool Load(InstanceDispatch&) noexcept; + +/// Loads instance function pointers. +/// @return True on success, false on error. +bool Load(VkInstance, InstanceDispatch&) noexcept; + +void Destroy(VkInstance, const InstanceDispatch&) noexcept; +void Destroy(VkDevice, const InstanceDispatch&) noexcept; + +void Destroy(VkDevice, VkBuffer, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkBufferView, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkCommandPool, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkDescriptorPool, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkDescriptorSetLayout, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkDescriptorUpdateTemplateKHR, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkDeviceMemory, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkEvent, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkFence, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkFramebuffer, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkImage, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkImageView, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkPipeline, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkPipelineLayout, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkQueryPool, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkRenderPass, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkSampler, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkSwapchainKHR, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkSemaphore, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkShaderModule, const DeviceDispatch&) noexcept; +void Destroy(VkInstance, VkDebugUtilsMessengerEXT, const InstanceDispatch&) noexcept; +void Destroy(VkInstance, VkSurfaceKHR, const InstanceDispatch&) noexcept; + +VkResult Free(VkDevice, VkDescriptorPool, Span, const DeviceDispatch&) noexcept; +VkResult Free(VkDevice, VkCommandPool, Span, const DeviceDispatch&) noexcept; + +template +class Handle; + +/// Handle with an owning type. +/// Analogue to std::unique_ptr. +template +class Handle { +public: + /// Construct a handle and hold it's ownership. + explicit Handle(Type handle_, OwnerType owner_, const Dispatch& dld_) noexcept + : handle{handle_}, owner{owner_}, dld{&dld_} {} + + /// Construct an empty handle. + Handle() = default; + + /// Copying Vulkan objects is not supported and will never be. + Handle(const Handle&) = delete; + Handle& operator=(const Handle&) = delete; + + /// Construct a handle transfering the ownership from another handle. + Handle(Handle&& rhs) noexcept + : handle{std::exchange(rhs.handle, nullptr)}, owner{rhs.owner}, dld{rhs.dld} {} + + /// Assign the current handle transfering the ownership from another handle. + /// Destroys any previously held object. + Handle& operator=(Handle&& rhs) noexcept { + Release(); + handle = std::exchange(rhs.handle, nullptr); + owner = rhs.owner; + dld = rhs.dld; + return *this; + } + + /// Destroys the current handle if it existed. + ~Handle() noexcept { + Release(); + } + + /// Destroys any held object. + void reset() noexcept { + Release(); + handle = nullptr; + } + + /// Returns the address of the held object. + /// Intended for Vulkan structures that expect a pointer to an array. + const Type* address() const noexcept { + return std::addressof(handle); + } + + /// Returns the held Vulkan handle. + Type operator*() const noexcept { + return handle; + } + + /// Returns true when there's a held object. + explicit operator bool() const noexcept { + return handle != nullptr; + } + +protected: + Type handle = nullptr; + OwnerType owner = nullptr; + const Dispatch* dld = nullptr; + +private: + /// Destroys the held object if it exists. + void Release() noexcept { + if (handle) { + Destroy(owner, handle, *dld); + } + } +}; + +/// Dummy type used to specify a handle has no owner. +struct NoOwner {}; + +/// Handle without an owning type. +/// Analogue to std::unique_ptr +template +class Handle { +public: + /// Construct a handle and hold it's ownership. + explicit Handle(Type handle_, const Dispatch& dld_) noexcept : handle{handle_}, dld{&dld_} {} + + /// Construct an empty handle. + Handle() noexcept = default; + + /// Copying Vulkan objects is not supported and will never be. + Handle(const Handle&) = delete; + Handle& operator=(const Handle&) = delete; + + /// Construct a handle transfering ownership from another handle. + Handle(Handle&& rhs) noexcept : handle{std::exchange(rhs.handle, nullptr)}, dld{rhs.dld} {} + + /// Assign the current handle transfering the ownership from another handle. + /// Destroys any previously held object. + Handle& operator=(Handle&& rhs) noexcept { + Release(); + handle = std::exchange(rhs.handle, nullptr); + dld = rhs.dld; + return *this; + } + + /// Destroys the current handle if it existed. + ~Handle() noexcept { + Release(); + } + + /// Destroys any held object. + void reset() noexcept { + Release(); + handle = nullptr; + } + + /// Returns the address of the held object. + /// Intended for Vulkan structures that expect a pointer to an array. + const Type* address() const noexcept { + return std::addressof(handle); + } + + /// Returns the held Vulkan handle. + Type operator*() const noexcept { + return handle; + } + + /// Returns true when there's a held object. + operator bool() const noexcept { + return handle != nullptr; + } + +protected: + Type handle = nullptr; + const Dispatch* dld = nullptr; + +private: + /// Destroys the held object if it exists. + void Release() noexcept { + if (handle) { + Destroy(handle, *dld); + } + } +}; + +/// Array of a pool allocation. +/// Analogue to std::vector +template +class PoolAllocations { +public: + /// Construct an empty allocation. + PoolAllocations() = default; + + /// Construct an allocation. Errors are reported through IsOutOfPoolMemory(). + explicit PoolAllocations(std::unique_ptr allocations_, std::size_t num_, + VkDevice device_, PoolType pool_, const DeviceDispatch& dld_) noexcept + : allocations{std::move(allocations_)}, num{num_}, device{device_}, pool{pool_}, + dld{&dld_} {} + + /// Copying Vulkan allocations is not supported and will never be. + PoolAllocations(const PoolAllocations&) = delete; + PoolAllocations& operator=(const PoolAllocations&) = delete; + + /// Construct an allocation transfering ownership from another allocation. + PoolAllocations(PoolAllocations&& rhs) noexcept + : allocations{std::move(rhs.allocations)}, num{rhs.num}, device{rhs.device}, pool{rhs.pool}, + dld{rhs.dld} {} + + /// Assign an allocation transfering ownership from another allocation. + /// Releases any previously held allocation. + PoolAllocations& operator=(PoolAllocations&& rhs) noexcept { + Release(); + allocations = std::move(rhs.allocations); + num = rhs.num; + device = rhs.device; + pool = rhs.pool; + dld = rhs.dld; + return *this; + } + + /// Destroys any held allocation. + ~PoolAllocations() { + Release(); + } + + /// Returns the number of allocations. + std::size_t size() const noexcept { + return num; + } + + /// Returns a pointer to the array of allocations. + AllocationType const* data() const noexcept { + return allocations.get(); + } + + /// Returns the allocation in the specified index. + /// @pre index < size() + AllocationType operator[](std::size_t index) const noexcept { + return allocations[index]; + } + + /// True when a pool fails to construct. + bool IsOutOfPoolMemory() const noexcept { + return !device; + } + +private: + /// Destroys the held allocations if they exist. + void Release() noexcept { + if (!allocations) { + return; + } + const Span span(allocations.get(), num); + const VkResult result = Free(device, pool, span, *dld); + // There's no way to report errors from a destructor. + if (result != VK_SUCCESS) { + std::terminate(); + } + } + + std::unique_ptr allocations; + std::size_t num = 0; + VkDevice device = nullptr; + PoolType pool = nullptr; + const DeviceDispatch* dld = nullptr; +}; + +using DebugCallback = Handle; +using DescriptorSetLayout = Handle; +using DescriptorUpdateTemplateKHR = Handle; +using Pipeline = Handle; +using PipelineLayout = Handle; +using QueryPool = Handle; +using RenderPass = Handle; +using Sampler = Handle; +using SurfaceKHR = Handle; + +using DescriptorSets = PoolAllocations; +using CommandBuffers = PoolAllocations; + +/// Vulkan instance owning handle. +class Instance : public Handle { + using Handle::Handle; + +public: + /// Creates a Vulkan instance. Use "operator bool" for error handling. + static Instance Create(u32 version, Span layers, Span extensions, + InstanceDispatch& dispatch) noexcept; + + /// Enumerates physical devices. + /// @return Physical devices and an empty handle on failure. + std::optional> EnumeratePhysicalDevices(); + + /// Tries to create a debug callback messenger. Returns an empty handle on failure. + DebugCallback TryCreateDebugCallback(PFN_vkDebugUtilsMessengerCallbackEXT callback) noexcept; +}; + +class Queue { +public: + /// Construct an empty queue handle. + constexpr Queue() noexcept = default; + + /// Construct a queue handle. + constexpr Queue(VkQueue queue_, const DeviceDispatch& dld_) noexcept + : queue{queue_}, dld{&dld_} {} + + VkResult Submit(Span submit_infos, + VkFence fence = VK_NULL_HANDLE) const noexcept { + return dld->vkQueueSubmit(queue, submit_infos.size(), submit_infos.data(), fence); + } + + VkResult Present(const VkPresentInfoKHR& present_info) const noexcept { + return dld->vkQueuePresentKHR(queue, &present_info); + } + +private: + VkQueue queue = nullptr; + const DeviceDispatch* dld = nullptr; +}; + +class Buffer : public Handle { + using Handle::Handle; + +public: + /// Attaches a memory allocation. + void BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const; + + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class BufferView : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class Image : public Handle { + using Handle::Handle; + +public: + /// Attaches a memory allocation. + void BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const; + + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class ImageView : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class DeviceMemory : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; + + u8* Map(VkDeviceSize offset, VkDeviceSize size) const { + void* data; + Check(dld->vkMapMemory(owner, handle, offset, size, 0, &data)); + return static_cast(data); + } + + void Unmap() const noexcept { + dld->vkUnmapMemory(owner, handle); + } +}; + +class Fence : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; + + VkResult Wait(u64 timeout = std::numeric_limits::max()) const noexcept { + return dld->vkWaitForFences(owner, 1, &handle, true, timeout); + } + + VkResult GetStatus() const noexcept { + return dld->vkGetFenceStatus(owner, handle); + } + + void Reset() const { + Check(dld->vkResetFences(owner, 1, &handle)); + } +}; + +class Framebuffer : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class DescriptorPool : public Handle { + using Handle::Handle; + +public: + DescriptorSets Allocate(const VkDescriptorSetAllocateInfo& ai) const; + + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class CommandPool : public Handle { + using Handle::Handle; + +public: + CommandBuffers Allocate(std::size_t num_buffers, + VkCommandBufferLevel level = VK_COMMAND_BUFFER_LEVEL_PRIMARY) const; + + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class SwapchainKHR : public Handle { + using Handle::Handle; + +public: + std::vector GetImages() const; +}; + +class Event : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; + + VkResult GetStatus() const noexcept { + return dld->vkGetEventStatus(owner, handle); + } +}; + +class ShaderModule : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class Semaphore : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; + + [[nodiscard]] u64 GetCounter() const { + u64 value; + Check(dld->vkGetSemaphoreCounterValueKHR(owner, handle, &value)); + return value; + } + + /** + * Waits for a timeline semaphore on the host. + * + * @param value Value to wait + * @param timeout Time in nanoseconds to timeout + * @return True on successful wait, false on timeout + */ + bool Wait(u64 value, u64 timeout = std::numeric_limits::max()) const { + const VkSemaphoreWaitInfoKHR wait_info{ + .sType = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR, + .pNext = nullptr, + .flags = 0, + .semaphoreCount = 1, + .pSemaphores = &handle, + .pValues = &value, + }; + const VkResult result = dld->vkWaitSemaphoresKHR(owner, &wait_info, timeout); + switch (result) { + case VK_SUCCESS: + return true; + case VK_TIMEOUT: + return false; + default: + throw Exception(result); + } + } +}; + +class Device : public Handle { + using Handle::Handle; + +public: + static Device Create(VkPhysicalDevice physical_device, Span queues_ci, + Span enabled_extensions, const void* next, + DeviceDispatch& dispatch) noexcept; + + Queue GetQueue(u32 family_index) const noexcept; + + Buffer CreateBuffer(const VkBufferCreateInfo& ci) const; + + BufferView CreateBufferView(const VkBufferViewCreateInfo& ci) const; + + Image CreateImage(const VkImageCreateInfo& ci) const; + + ImageView CreateImageView(const VkImageViewCreateInfo& ci) const; + + Semaphore CreateSemaphore() const; + + Semaphore CreateSemaphore(const VkSemaphoreCreateInfo& ci) const; + + Fence CreateFence(const VkFenceCreateInfo& ci) const; + + DescriptorPool CreateDescriptorPool(const VkDescriptorPoolCreateInfo& ci) const; + + RenderPass CreateRenderPass(const VkRenderPassCreateInfo& ci) const; + + DescriptorSetLayout CreateDescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo& ci) const; + + PipelineLayout CreatePipelineLayout(const VkPipelineLayoutCreateInfo& ci) const; + + Pipeline CreateGraphicsPipeline(const VkGraphicsPipelineCreateInfo& ci) const; + + Pipeline CreateComputePipeline(const VkComputePipelineCreateInfo& ci) const; + + Sampler CreateSampler(const VkSamplerCreateInfo& ci) const; + + Framebuffer CreateFramebuffer(const VkFramebufferCreateInfo& ci) const; + + CommandPool CreateCommandPool(const VkCommandPoolCreateInfo& ci) const; + + DescriptorUpdateTemplateKHR CreateDescriptorUpdateTemplateKHR( + const VkDescriptorUpdateTemplateCreateInfoKHR& ci) const; + + QueryPool CreateQueryPool(const VkQueryPoolCreateInfo& ci) const; + + ShaderModule CreateShaderModule(const VkShaderModuleCreateInfo& ci) const; + + Event CreateEvent() const; + + SwapchainKHR CreateSwapchainKHR(const VkSwapchainCreateInfoKHR& ci) const; + + DeviceMemory TryAllocateMemory(const VkMemoryAllocateInfo& ai) const noexcept; + + DeviceMemory AllocateMemory(const VkMemoryAllocateInfo& ai) const; + + VkMemoryRequirements GetBufferMemoryRequirements(VkBuffer buffer) const noexcept; + + VkMemoryRequirements GetImageMemoryRequirements(VkImage image) const noexcept; + + void UpdateDescriptorSets(Span writes, + Span copies) const noexcept; + + void UpdateDescriptorSet(VkDescriptorSet set, VkDescriptorUpdateTemplateKHR update_template, + const void* data) const noexcept { + dld->vkUpdateDescriptorSetWithTemplateKHR(handle, set, update_template, data); + } + + VkResult AcquireNextImageKHR(VkSwapchainKHR swapchain, u64 timeout, VkSemaphore semaphore, + VkFence fence, u32* image_index) const noexcept { + return dld->vkAcquireNextImageKHR(handle, swapchain, timeout, semaphore, fence, + image_index); + } + + VkResult WaitIdle() const noexcept { + return dld->vkDeviceWaitIdle(handle); + } + + void ResetQueryPoolEXT(VkQueryPool query_pool, u32 first, u32 count) const noexcept { + dld->vkResetQueryPoolEXT(handle, query_pool, first, count); + } + + VkResult GetQueryResults(VkQueryPool query_pool, u32 first, u32 count, std::size_t data_size, + void* data, VkDeviceSize stride, + VkQueryResultFlags flags) const noexcept { + return dld->vkGetQueryPoolResults(handle, query_pool, first, count, data_size, data, stride, + flags); + } +}; + +class PhysicalDevice { +public: + constexpr PhysicalDevice() noexcept = default; + + constexpr PhysicalDevice(VkPhysicalDevice physical_device_, + const InstanceDispatch& dld_) noexcept + : physical_device{physical_device_}, dld{&dld_} {} + + constexpr operator VkPhysicalDevice() const noexcept { + return physical_device; + } + + VkPhysicalDeviceProperties GetProperties() const noexcept; + + void GetProperties2KHR(VkPhysicalDeviceProperties2KHR&) const noexcept; + + VkPhysicalDeviceFeatures GetFeatures() const noexcept; + + void GetFeatures2KHR(VkPhysicalDeviceFeatures2KHR&) const noexcept; + + VkFormatProperties GetFormatProperties(VkFormat) const noexcept; + + std::vector EnumerateDeviceExtensionProperties() const; + + std::vector GetQueueFamilyProperties() const; + + bool GetSurfaceSupportKHR(u32 queue_family_index, VkSurfaceKHR) const; + + VkSurfaceCapabilitiesKHR GetSurfaceCapabilitiesKHR(VkSurfaceKHR) const; + + std::vector GetSurfaceFormatsKHR(VkSurfaceKHR) const; + + std::vector GetSurfacePresentModesKHR(VkSurfaceKHR) const; + + VkPhysicalDeviceMemoryProperties GetMemoryProperties() const noexcept; + +private: + VkPhysicalDevice physical_device = nullptr; + const InstanceDispatch* dld = nullptr; +}; + +class CommandBuffer { +public: + CommandBuffer() noexcept = default; + + explicit CommandBuffer(VkCommandBuffer handle_, const DeviceDispatch& dld_) noexcept + : handle{handle_}, dld{&dld_} {} + + const VkCommandBuffer* address() const noexcept { + return &handle; + } + + void Begin(const VkCommandBufferBeginInfo& begin_info) const { + Check(dld->vkBeginCommandBuffer(handle, &begin_info)); + } + + void End() const { + Check(dld->vkEndCommandBuffer(handle)); + } + + void BeginRenderPass(const VkRenderPassBeginInfo& renderpass_bi, + VkSubpassContents contents) const noexcept { + dld->vkCmdBeginRenderPass(handle, &renderpass_bi, contents); + } + + void EndRenderPass() const noexcept { + dld->vkCmdEndRenderPass(handle); + } + + void BeginQuery(VkQueryPool query_pool, u32 query, VkQueryControlFlags flags) const noexcept { + dld->vkCmdBeginQuery(handle, query_pool, query, flags); + } + + void EndQuery(VkQueryPool query_pool, u32 query) const noexcept { + dld->vkCmdEndQuery(handle, query_pool, query); + } + + void BindDescriptorSets(VkPipelineBindPoint bind_point, VkPipelineLayout layout, u32 first, + Span sets, Span dynamic_offsets) const noexcept { + dld->vkCmdBindDescriptorSets(handle, bind_point, layout, first, sets.size(), sets.data(), + dynamic_offsets.size(), dynamic_offsets.data()); + } + + void BindPipeline(VkPipelineBindPoint bind_point, VkPipeline pipeline) const noexcept { + dld->vkCmdBindPipeline(handle, bind_point, pipeline); + } + + void BindIndexBuffer(VkBuffer buffer, VkDeviceSize offset, + VkIndexType index_type) const noexcept { + dld->vkCmdBindIndexBuffer(handle, buffer, offset, index_type); + } + + void BindVertexBuffers(u32 first, u32 count, const VkBuffer* buffers, + const VkDeviceSize* offsets) const noexcept { + dld->vkCmdBindVertexBuffers(handle, first, count, buffers, offsets); + } + + void BindVertexBuffer(u32 binding, VkBuffer buffer, VkDeviceSize offset) const noexcept { + BindVertexBuffers(binding, 1, &buffer, &offset); + } + + void Draw(u32 vertex_count, u32 instance_count, u32 first_vertex, + u32 first_instance) const noexcept { + dld->vkCmdDraw(handle, vertex_count, instance_count, first_vertex, first_instance); + } + + void DrawIndexed(u32 index_count, u32 instance_count, u32 first_index, u32 vertex_offset, + u32 first_instance) const noexcept { + dld->vkCmdDrawIndexed(handle, index_count, instance_count, first_index, vertex_offset, + first_instance); + } + + void ClearAttachments(Span attachments, + Span rects) const noexcept { + dld->vkCmdClearAttachments(handle, attachments.size(), attachments.data(), rects.size(), + rects.data()); + } + + void BlitImage(VkImage src_image, VkImageLayout src_layout, VkImage dst_image, + VkImageLayout dst_layout, Span regions, + VkFilter filter) const noexcept { + dld->vkCmdBlitImage(handle, src_image, src_layout, dst_image, dst_layout, regions.size(), + regions.data(), filter); + } + + void ResolveImage(VkImage src_image, VkImageLayout src_layout, VkImage dst_image, + VkImageLayout dst_layout, Span regions) { + dld->vkCmdResolveImage(handle, src_image, src_layout, dst_image, dst_layout, regions.size(), + regions.data()); + } + + void Dispatch(u32 x, u32 y, u32 z) const noexcept { + dld->vkCmdDispatch(handle, x, y, z); + } + + void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask, + VkDependencyFlags dependency_flags, Span memory_barriers, + Span buffer_barriers, + Span image_barriers) const noexcept { + dld->vkCmdPipelineBarrier(handle, src_stage_mask, dst_stage_mask, dependency_flags, + memory_barriers.size(), memory_barriers.data(), + buffer_barriers.size(), buffer_barriers.data(), + image_barriers.size(), image_barriers.data()); + } + + void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask, + VkDependencyFlags dependency_flags = 0) const noexcept { + PipelineBarrier(src_stage_mask, dst_stage_mask, dependency_flags, {}, {}, {}); + } + + void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask, + VkDependencyFlags dependency_flags, + const VkBufferMemoryBarrier& buffer_barrier) const noexcept { + PipelineBarrier(src_stage_mask, dst_stage_mask, dependency_flags, {}, buffer_barrier, {}); + } + + void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask, + VkDependencyFlags dependency_flags, + const VkImageMemoryBarrier& image_barrier) const noexcept { + PipelineBarrier(src_stage_mask, dst_stage_mask, dependency_flags, {}, {}, image_barrier); + } + + void CopyBufferToImage(VkBuffer src_buffer, VkImage dst_image, VkImageLayout dst_image_layout, + Span regions) const noexcept { + dld->vkCmdCopyBufferToImage(handle, src_buffer, dst_image, dst_image_layout, regions.size(), + regions.data()); + } + + void CopyBuffer(VkBuffer src_buffer, VkBuffer dst_buffer, + Span regions) const noexcept { + dld->vkCmdCopyBuffer(handle, src_buffer, dst_buffer, regions.size(), regions.data()); + } + + void CopyImage(VkImage src_image, VkImageLayout src_layout, VkImage dst_image, + VkImageLayout dst_layout, Span regions) const noexcept { + dld->vkCmdCopyImage(handle, src_image, src_layout, dst_image, dst_layout, regions.size(), + regions.data()); + } + + void CopyImageToBuffer(VkImage src_image, VkImageLayout src_layout, VkBuffer dst_buffer, + Span regions) const noexcept { + dld->vkCmdCopyImageToBuffer(handle, src_image, src_layout, dst_buffer, regions.size(), + regions.data()); + } + + void FillBuffer(VkBuffer dst_buffer, VkDeviceSize dst_offset, VkDeviceSize size, + u32 data) const noexcept { + dld->vkCmdFillBuffer(handle, dst_buffer, dst_offset, size, data); + } + + void PushConstants(VkPipelineLayout layout, VkShaderStageFlags flags, u32 offset, u32 size, + const void* values) const noexcept { + dld->vkCmdPushConstants(handle, layout, flags, offset, size, values); + } + + template + void PushConstants(VkPipelineLayout layout, VkShaderStageFlags flags, + const T& data) const noexcept { + static_assert(std::is_trivially_copyable_v, " is not trivially copyable"); + dld->vkCmdPushConstants(handle, layout, flags, 0, static_cast(sizeof(T)), &data); + } + + void SetViewport(u32 first, Span viewports) const noexcept { + dld->vkCmdSetViewport(handle, first, viewports.size(), viewports.data()); + } + + void SetScissor(u32 first, Span scissors) const noexcept { + dld->vkCmdSetScissor(handle, first, scissors.size(), scissors.data()); + } + + void SetBlendConstants(const float blend_constants[4]) const noexcept { + dld->vkCmdSetBlendConstants(handle, blend_constants); + } + + void SetStencilCompareMask(VkStencilFaceFlags face_mask, u32 compare_mask) const noexcept { + dld->vkCmdSetStencilCompareMask(handle, face_mask, compare_mask); + } + + void SetStencilReference(VkStencilFaceFlags face_mask, u32 reference) const noexcept { + dld->vkCmdSetStencilReference(handle, face_mask, reference); + } + + void SetStencilWriteMask(VkStencilFaceFlags face_mask, u32 write_mask) const noexcept { + dld->vkCmdSetStencilWriteMask(handle, face_mask, write_mask); + } + + void SetDepthBias(float constant_factor, float clamp, float slope_factor) const noexcept { + dld->vkCmdSetDepthBias(handle, constant_factor, clamp, slope_factor); + } + + void SetDepthBounds(float min_depth_bounds, float max_depth_bounds) const noexcept { + dld->vkCmdSetDepthBounds(handle, min_depth_bounds, max_depth_bounds); + } + + void SetEvent(VkEvent event, VkPipelineStageFlags stage_flags) const noexcept { + dld->vkCmdSetEvent(handle, event, stage_flags); + } + + void WaitEvents(Span events, VkPipelineStageFlags src_stage_mask, + VkPipelineStageFlags dst_stage_mask, Span memory_barriers, + Span buffer_barriers, + Span image_barriers) const noexcept { + dld->vkCmdWaitEvents(handle, events.size(), events.data(), src_stage_mask, dst_stage_mask, + memory_barriers.size(), memory_barriers.data(), buffer_barriers.size(), + buffer_barriers.data(), image_barriers.size(), image_barriers.data()); + } + + void BindVertexBuffers2EXT(u32 first_binding, u32 binding_count, const VkBuffer* buffers, + const VkDeviceSize* offsets, const VkDeviceSize* sizes, + const VkDeviceSize* strides) const noexcept { + dld->vkCmdBindVertexBuffers2EXT(handle, first_binding, binding_count, buffers, offsets, + sizes, strides); + } + + void SetCullModeEXT(VkCullModeFlags cull_mode) const noexcept { + dld->vkCmdSetCullModeEXT(handle, cull_mode); + } + + void SetDepthBoundsTestEnableEXT(bool enable) const noexcept { + dld->vkCmdSetDepthBoundsTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE); + } + + void SetDepthCompareOpEXT(VkCompareOp compare_op) const noexcept { + dld->vkCmdSetDepthCompareOpEXT(handle, compare_op); + } + + void SetDepthTestEnableEXT(bool enable) const noexcept { + dld->vkCmdSetDepthTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE); + } + + void SetDepthWriteEnableEXT(bool enable) const noexcept { + dld->vkCmdSetDepthWriteEnableEXT(handle, enable ? VK_TRUE : VK_FALSE); + } + + void SetFrontFaceEXT(VkFrontFace front_face) const noexcept { + dld->vkCmdSetFrontFaceEXT(handle, front_face); + } + + void SetPrimitiveTopologyEXT(VkPrimitiveTopology primitive_topology) const noexcept { + dld->vkCmdSetPrimitiveTopologyEXT(handle, primitive_topology); + } + + void SetStencilOpEXT(VkStencilFaceFlags face_mask, VkStencilOp fail_op, VkStencilOp pass_op, + VkStencilOp depth_fail_op, VkCompareOp compare_op) const noexcept { + dld->vkCmdSetStencilOpEXT(handle, face_mask, fail_op, pass_op, depth_fail_op, compare_op); + } + + void SetStencilTestEnableEXT(bool enable) const noexcept { + dld->vkCmdSetStencilTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE); + } + + void BindTransformFeedbackBuffersEXT(u32 first, u32 count, const VkBuffer* buffers, + const VkDeviceSize* offsets, + const VkDeviceSize* sizes) const noexcept { + dld->vkCmdBindTransformFeedbackBuffersEXT(handle, first, count, buffers, offsets, sizes); + } + + void BeginTransformFeedbackEXT(u32 first_counter_buffer, u32 counter_buffers_count, + const VkBuffer* counter_buffers, + const VkDeviceSize* counter_buffer_offsets) const noexcept { + dld->vkCmdBeginTransformFeedbackEXT(handle, first_counter_buffer, counter_buffers_count, + counter_buffers, counter_buffer_offsets); + } + + void EndTransformFeedbackEXT(u32 first_counter_buffer, u32 counter_buffers_count, + const VkBuffer* counter_buffers, + const VkDeviceSize* counter_buffer_offsets) const noexcept { + dld->vkCmdEndTransformFeedbackEXT(handle, first_counter_buffer, counter_buffers_count, + counter_buffers, counter_buffer_offsets); + } + + void BeginDebugUtilsLabelEXT(const char* label, std::span color) const noexcept { + const VkDebugUtilsLabelEXT label_info{ + .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, + .pNext = nullptr, + .pLabelName = label, + .color{color[0], color[1], color[2], color[3]}, + }; + dld->vkCmdBeginDebugUtilsLabelEXT(handle, &label_info); + } + + void EndDebugUtilsLabelEXT() const noexcept { + dld->vkCmdEndDebugUtilsLabelEXT(handle); + } + +private: + VkCommandBuffer handle; + const DeviceDispatch* dld; +}; + +u32 AvailableVersion(const InstanceDispatch& dld) noexcept; + +std::optional> EnumerateInstanceExtensionProperties( + const InstanceDispatch& dld); + +std::optional> EnumerateInstanceLayerProperties( + const InstanceDispatch& dld); + +} // namespace Vulkan::vk -- cgit v1.2.3 From 25f88d99cead2f7f6fdbf5e36e7578472aaa65bd Mon Sep 17 00:00:00 2001 From: ReinUsesLisp Date: Thu, 24 Dec 2020 22:05:48 -0300 Subject: renderer_vulkan: Move instance initialization to a separate file Simplify Vulkan's backend initialization code by moving it to a separate file, allowing us to initialize a Vulkan instance from different backends. --- src/video_core/vulkan_common/vulkan_instance.cpp | 152 +++++++++++++++++++++++ src/video_core/vulkan_common/vulkan_instance.h | 21 ++++ 2 files changed, 173 insertions(+) create mode 100644 src/video_core/vulkan_common/vulkan_instance.cpp create mode 100644 src/video_core/vulkan_common/vulkan_instance.h (limited to 'src/video_core/vulkan_common') diff --git a/src/video_core/vulkan_common/vulkan_instance.cpp b/src/video_core/vulkan_common/vulkan_instance.cpp new file mode 100644 index 000000000..c19f93e0a --- /dev/null +++ b/src/video_core/vulkan_common/vulkan_instance.cpp @@ -0,0 +1,152 @@ +// Copyright 2020 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include +#include +#include + +#include "common/common_types.h" +#include "common/dynamic_library.h" +#include "common/logging/log.h" +#include "core/frontend/emu_window.h" +#include "video_core/vulkan_common/vulkan_instance.h" +#include "video_core/vulkan_common/vulkan_wrapper.h" + +// Include these late to avoid polluting previous headers +#ifdef _WIN32 +#include +// ensure include order +#include +#endif + +#if !defined(_WIN32) && !defined(__APPLE__) +#include +#include +#include +#endif + +namespace Vulkan { +namespace { +[[nodiscard]] std::vector RequiredExtensions( + Core::Frontend::WindowSystemType window_type, bool enable_debug_utils) { + std::vector extensions; + extensions.reserve(6); + switch (window_type) { + case Core::Frontend::WindowSystemType::Headless: + break; +#ifdef _WIN32 + case Core::Frontend::WindowSystemType::Windows: + extensions.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME); + break; +#endif +#if !defined(_WIN32) && !defined(__APPLE__) + case Core::Frontend::WindowSystemType::X11: + extensions.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME); + break; + case Core::Frontend::WindowSystemType::Wayland: + extensions.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME); + break; +#endif + default: + LOG_ERROR(Render_Vulkan, "Presentation not supported on this platform"); + break; + } + if (window_type != Core::Frontend::WindowSystemType::Headless) { + extensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME); + } + if (enable_debug_utils) { + extensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME); + } + extensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME); + return extensions; +} + +[[nodiscard]] bool AreExtensionsSupported(const vk::InstanceDispatch& dld, + std::span extensions) { + const std::optional properties = vk::EnumerateInstanceExtensionProperties(dld); + if (!properties) { + LOG_ERROR(Render_Vulkan, "Failed to query extension properties"); + return false; + } + for (const char* extension : extensions) { + const auto it = std::ranges::find_if(*properties, [extension](const auto& prop) { + return std::strcmp(extension, prop.extensionName) == 0; + }); + if (it == properties->end()) { + LOG_ERROR(Render_Vulkan, "Required instance extension {} is not available", extension); + return false; + } + } + return true; +} + +[[nodiscard]] std::vector Layers(bool enable_layers) { + std::vector layers; + if (enable_layers) { + layers.push_back("VK_LAYER_KHRONOS_validation"); + } + return layers; +} + +void RemoveUnavailableLayers(const vk::InstanceDispatch& dld, std::vector& layers) { + const std::optional layer_properties = vk::EnumerateInstanceLayerProperties(dld); + if (!layer_properties) { + LOG_ERROR(Render_Vulkan, "Failed to query layer properties, disabling layers"); + layers.clear(); + } + std::erase_if(layers, [&layer_properties](const char* layer) { + const auto comp = [layer](const VkLayerProperties& layer_property) { + return std::strcmp(layer, layer_property.layerName) == 0; + }; + const auto it = std::ranges::find_if(*layer_properties, comp); + if (it == layer_properties->end()) { + LOG_ERROR(Render_Vulkan, "Layer {} not available, removing it", layer); + return true; + } + return false; + }); +} +} // Anonymous namespace + +std::pair CreateInstance(Common::DynamicLibrary& library, + vk::InstanceDispatch& dld, + Core::Frontend::WindowSystemType window_type, + bool enable_debug_utils, bool enable_layers) { + if (!library.IsOpen()) { + LOG_ERROR(Render_Vulkan, "Vulkan library not available"); + return {}; + } + if (!library.GetSymbol("vkGetInstanceProcAddr", &dld.vkGetInstanceProcAddr)) { + LOG_ERROR(Render_Vulkan, "vkGetInstanceProcAddr not present in Vulkan"); + return {}; + } + if (!vk::Load(dld)) { + LOG_ERROR(Render_Vulkan, "Failed to load Vulkan function pointers"); + return {}; + } + const std::vector extensions = RequiredExtensions(window_type, enable_debug_utils); + if (!AreExtensionsSupported(dld, extensions)) { + return {}; + } + + std::vector layers = Layers(enable_layers); + RemoveUnavailableLayers(dld, layers); + + // Limit the maximum version of Vulkan to avoid using untested version. + const u32 version = std::min(vk::AvailableVersion(dld), VK_API_VERSION_1_1); + + vk::Instance instance = vk::Instance::Create(version, layers, extensions, dld); + if (!instance) { + LOG_ERROR(Render_Vulkan, "Failed to create Vulkan instance"); + return {}; + } + if (!vk::Load(*instance, dld)) { + LOG_ERROR(Render_Vulkan, "Failed to load Vulkan instance function pointers"); + } + return std::make_pair(std::move(instance), version); +} + +} // namespace Vulkan diff --git a/src/video_core/vulkan_common/vulkan_instance.h b/src/video_core/vulkan_common/vulkan_instance.h new file mode 100644 index 000000000..ff2be0a48 --- /dev/null +++ b/src/video_core/vulkan_common/vulkan_instance.h @@ -0,0 +1,21 @@ +// Copyright 2020 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +#include "common/common_types.h" +#include "common/dynamic_library.h" +#include "core/frontend/emu_window.h" +#include "video_core/vulkan_common/vulkan_wrapper.h" + +namespace Vulkan { + +[[nodiscard]] std::pair CreateInstance( + Common::DynamicLibrary& library, vk::InstanceDispatch& dld, + Core::Frontend::WindowSystemType window_type = Core::Frontend::WindowSystemType::Headless, + bool enable_debug_utils = false, bool enable_layers = false); + +} // namespace Vulkan -- cgit v1.2.3 From 47843b4f097ced5e99c5567b8ac3fd53b80fab0a Mon Sep 17 00:00:00 2001 From: ReinUsesLisp Date: Fri, 25 Dec 2020 02:01:13 -0300 Subject: renderer_vulkan: Create debug callback on separate file and throw Initialize debug callbacks (messenger) from a separate file. This allows sharing code with different backends. Change our Vulkan error handling to use exceptions instead of error codes, simplifying the initialization process. --- .../vulkan_common/vulkan_debug_callback.cpp | 45 ++++++++++++++++++++++ .../vulkan_common/vulkan_debug_callback.h | 11 ++++++ src/video_core/vulkan_common/vulkan_instance.cpp | 14 +++---- src/video_core/vulkan_common/vulkan_wrapper.cpp | 39 +++++-------------- src/video_core/vulkan_common/vulkan_wrapper.h | 15 +++++--- 5 files changed, 79 insertions(+), 45 deletions(-) create mode 100644 src/video_core/vulkan_common/vulkan_debug_callback.cpp create mode 100644 src/video_core/vulkan_common/vulkan_debug_callback.h (limited to 'src/video_core/vulkan_common') diff --git a/src/video_core/vulkan_common/vulkan_debug_callback.cpp b/src/video_core/vulkan_common/vulkan_debug_callback.cpp new file mode 100644 index 000000000..ea7af8ad4 --- /dev/null +++ b/src/video_core/vulkan_common/vulkan_debug_callback.cpp @@ -0,0 +1,45 @@ +// Copyright 2020 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include "common/logging/log.h" +#include "video_core/vulkan_common/vulkan_debug_callback.h" + +namespace Vulkan { +namespace { +VkBool32 Callback(VkDebugUtilsMessageSeverityFlagBitsEXT severity, + VkDebugUtilsMessageTypeFlagsEXT type, + const VkDebugUtilsMessengerCallbackDataEXT* data, + [[maybe_unused]] void* user_data) { + const std::string_view message{data->pMessage}; + if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) { + LOG_CRITICAL(Render_Vulkan, "{}", message); + } else if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) { + LOG_WARNING(Render_Vulkan, "{}", message); + } else if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT) { + LOG_INFO(Render_Vulkan, "{}", message); + } else if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT) { + LOG_DEBUG(Render_Vulkan, "{}", message); + } + return VK_FALSE; +} +} // Anonymous namespace + +vk::DebugUtilsMessenger CreateDebugCallback(const vk::Instance& instance) { + return instance.CreateDebugUtilsMessenger(VkDebugUtilsMessengerCreateInfoEXT{ + .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, + .pNext = nullptr, + .flags = 0, + .messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, + .messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT, + .pfnUserCallback = Callback, + }); +} + +} // namespace Vulkan diff --git a/src/video_core/vulkan_common/vulkan_debug_callback.h b/src/video_core/vulkan_common/vulkan_debug_callback.h new file mode 100644 index 000000000..2efcd244c --- /dev/null +++ b/src/video_core/vulkan_common/vulkan_debug_callback.h @@ -0,0 +1,11 @@ +// Copyright 2020 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "video_core/vulkan_common/vulkan_wrapper.h" + +namespace Vulkan { + +vk::DebugUtilsMessenger CreateDebugCallback(const vk::Instance& instance); + +} // namespace Vulkan diff --git a/src/video_core/vulkan_common/vulkan_instance.cpp b/src/video_core/vulkan_common/vulkan_instance.cpp index c19f93e0a..d3d8630e5 100644 --- a/src/video_core/vulkan_common/vulkan_instance.cpp +++ b/src/video_core/vulkan_common/vulkan_instance.cpp @@ -117,21 +117,20 @@ std::pair CreateInstance(Common::DynamicLibrary& library, bool enable_debug_utils, bool enable_layers) { if (!library.IsOpen()) { LOG_ERROR(Render_Vulkan, "Vulkan library not available"); - return {}; + throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED); } if (!library.GetSymbol("vkGetInstanceProcAddr", &dld.vkGetInstanceProcAddr)) { LOG_ERROR(Render_Vulkan, "vkGetInstanceProcAddr not present in Vulkan"); - return {}; + throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED); } if (!vk::Load(dld)) { LOG_ERROR(Render_Vulkan, "Failed to load Vulkan function pointers"); - return {}; + throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED); } const std::vector extensions = RequiredExtensions(window_type, enable_debug_utils); if (!AreExtensionsSupported(dld, extensions)) { - return {}; + throw vk::Exception(VK_ERROR_EXTENSION_NOT_PRESENT); } - std::vector layers = Layers(enable_layers); RemoveUnavailableLayers(dld, layers); @@ -139,12 +138,9 @@ std::pair CreateInstance(Common::DynamicLibrary& library, const u32 version = std::min(vk::AvailableVersion(dld), VK_API_VERSION_1_1); vk::Instance instance = vk::Instance::Create(version, layers, extensions, dld); - if (!instance) { - LOG_ERROR(Render_Vulkan, "Failed to create Vulkan instance"); - return {}; - } if (!vk::Load(*instance, dld)) { LOG_ERROR(Render_Vulkan, "Failed to load Vulkan instance function pointers"); + throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED); } return std::make_pair(std::move(instance), version); } diff --git a/src/video_core/vulkan_common/vulkan_wrapper.cpp b/src/video_core/vulkan_common/vulkan_wrapper.cpp index 478402bbd..f4177537b 100644 --- a/src/video_core/vulkan_common/vulkan_wrapper.cpp +++ b/src/video_core/vulkan_common/vulkan_wrapper.cpp @@ -435,7 +435,7 @@ VkResult Free(VkDevice device, VkCommandPool handle, Span buffe } Instance Instance::Create(u32 version, Span layers, Span extensions, - InstanceDispatch& dispatch) noexcept { + InstanceDispatch& dispatch) { const VkApplicationInfo application_info{ .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO, .pNext = nullptr, @@ -455,22 +455,17 @@ Instance Instance::Create(u32 version, Span layers, Span> Instance::EnumeratePhysicalDevices() { +std::optional> Instance::EnumeratePhysicalDevices() const { u32 num; if (dld->vkEnumeratePhysicalDevices(handle, &num, nullptr) != VK_SUCCESS) { return std::nullopt; @@ -483,27 +478,11 @@ std::optional> Instance::EnumeratePhysicalDevices( return std::make_optional(std::move(physical_devices)); } -DebugCallback Instance::TryCreateDebugCallback( - PFN_vkDebugUtilsMessengerCallbackEXT callback) noexcept { - const VkDebugUtilsMessengerCreateInfoEXT ci{ - .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, - .pNext = nullptr, - .flags = 0, - .messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT | - VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | - VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT | - VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, - .messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | - VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, - .pfnUserCallback = callback, - .pUserData = nullptr, - }; - - VkDebugUtilsMessengerEXT messenger; - if (dld->vkCreateDebugUtilsMessengerEXT(handle, &ci, nullptr, &messenger) != VK_SUCCESS) { - return {}; - } - return DebugCallback(messenger, handle, *dld); +DebugUtilsMessenger Instance::CreateDebugUtilsMessenger( + const VkDebugUtilsMessengerCreateInfoEXT& create_info) const { + VkDebugUtilsMessengerEXT object; + Check(dld->vkCreateDebugUtilsMessengerEXT(handle, &create_info, nullptr, &object)); + return DebugUtilsMessenger(object, handle, *dld); } void Buffer::BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const { diff --git a/src/video_core/vulkan_common/vulkan_wrapper.h b/src/video_core/vulkan_common/vulkan_wrapper.h index f9a184e00..03ca97ac0 100644 --- a/src/video_core/vulkan_common/vulkan_wrapper.h +++ b/src/video_core/vulkan_common/vulkan_wrapper.h @@ -555,7 +555,7 @@ private: const DeviceDispatch* dld = nullptr; }; -using DebugCallback = Handle; +using DebugUtilsMessenger = Handle; using DescriptorSetLayout = Handle; using DescriptorUpdateTemplateKHR = Handle; using Pipeline = Handle; @@ -573,16 +573,19 @@ class Instance : public Handle { using Handle::Handle; public: - /// Creates a Vulkan instance. Use "operator bool" for error handling. + /// Creates a Vulkan instance. + /// @throw Exception on initialization error. static Instance Create(u32 version, Span layers, Span extensions, - InstanceDispatch& dispatch) noexcept; + InstanceDispatch& dispatch); /// Enumerates physical devices. /// @return Physical devices and an empty handle on failure. - std::optional> EnumeratePhysicalDevices(); + std::optional> EnumeratePhysicalDevices() const; - /// Tries to create a debug callback messenger. Returns an empty handle on failure. - DebugCallback TryCreateDebugCallback(PFN_vkDebugUtilsMessengerCallbackEXT callback) noexcept; + /// Creates a debug callback messenger. + /// @throw Exception on creation failure. + DebugUtilsMessenger CreateDebugUtilsMessenger( + const VkDebugUtilsMessengerCreateInfoEXT& create_info) const; }; class Queue { -- cgit v1.2.3 From 11f0f7598df993c717752030c05f7b1eca3c762c Mon Sep 17 00:00:00 2001 From: ReinUsesLisp Date: Fri, 25 Dec 2020 02:14:15 -0300 Subject: renderer_vulkan: Initialize surface in separate file Move surface initialization code to a separate file. It's unlikely to use this code outside of Vulkan, but keeping platform-specific code (Win32, Xlib, Wayland) in its own translation unit keeps things cleaner. --- src/video_core/vulkan_common/vulkan_surface.cpp | 81 +++++++++++++++++++++++++ src/video_core/vulkan_common/vulkan_surface.h | 18 ++++++ src/video_core/vulkan_common/vulkan_wrapper.h | 5 ++ 3 files changed, 104 insertions(+) create mode 100644 src/video_core/vulkan_common/vulkan_surface.cpp create mode 100644 src/video_core/vulkan_common/vulkan_surface.h (limited to 'src/video_core/vulkan_common') diff --git a/src/video_core/vulkan_common/vulkan_surface.cpp b/src/video_core/vulkan_common/vulkan_surface.cpp new file mode 100644 index 000000000..3c3238f96 --- /dev/null +++ b/src/video_core/vulkan_common/vulkan_surface.cpp @@ -0,0 +1,81 @@ +// Copyright 2020 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include "common/logging/log.h" +#include "core/frontend/emu_window.h" +#include "video_core/vulkan_common/vulkan_surface.h" +#include "video_core/vulkan_common/vulkan_wrapper.h" + +// Include these late to avoid polluting previous headers +#ifdef _WIN32 +#include +// ensure include order +#include +#endif + +#if !defined(_WIN32) && !defined(__APPLE__) +#include +#include +#include +#endif + +namespace Vulkan { + +vk::SurfaceKHR CreateSurface(const vk::Instance& instance, + const Core::Frontend::EmuWindow& emu_window) { + [[maybe_unused]] const vk::InstanceDispatch& dld = instance.Dispatch(); + [[maybe_unused]] const auto& window_info = emu_window.GetWindowInfo(); + VkSurfaceKHR unsafe_surface = nullptr; + +#ifdef _WIN32 + if (window_info.type == Core::Frontend::WindowSystemType::Windows) { + const HWND hWnd = static_cast(window_info.render_surface); + const VkWin32SurfaceCreateInfoKHR win32_ci{VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, + nullptr, 0, nullptr, hWnd}; + const auto vkCreateWin32SurfaceKHR = reinterpret_cast( + dld.vkGetInstanceProcAddr(*instance, "vkCreateWin32SurfaceKHR")); + if (!vkCreateWin32SurfaceKHR || + vkCreateWin32SurfaceKHR(*instance, &win32_ci, nullptr, &unsafe_surface) != VK_SUCCESS) { + LOG_ERROR(Render_Vulkan, "Failed to initialize Win32 surface"); + throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED); + } + } +#endif +#if !defined(_WIN32) && !defined(__APPLE__) + if (window_info.type == Core::Frontend::WindowSystemType::X11) { + const VkXlibSurfaceCreateInfoKHR xlib_ci{ + VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, nullptr, 0, + static_cast(window_info.display_connection), + reinterpret_cast(window_info.render_surface)}; + const auto vkCreateXlibSurfaceKHR = reinterpret_cast( + dld.vkGetInstanceProcAddr(*instance, "vkCreateXlibSurfaceKHR")); + if (!vkCreateXlibSurfaceKHR || + vkCreateXlibSurfaceKHR(*instance, &xlib_ci, nullptr, &unsafe_surface) != VK_SUCCESS) { + LOG_ERROR(Render_Vulkan, "Failed to initialize Xlib surface"); + throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED); + } + } + if (window_info.type == Core::Frontend::WindowSystemType::Wayland) { + const VkWaylandSurfaceCreateInfoKHR wayland_ci{ + VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, nullptr, 0, + static_cast(window_info.display_connection), + static_cast(window_info.render_surface)}; + const auto vkCreateWaylandSurfaceKHR = reinterpret_cast( + dld.vkGetInstanceProcAddr(*instance, "vkCreateWaylandSurfaceKHR")); + if (!vkCreateWaylandSurfaceKHR || + vkCreateWaylandSurfaceKHR(*instance, &wayland_ci, nullptr, &unsafe_surface) != + VK_SUCCESS) { + LOG_ERROR(Render_Vulkan, "Failed to initialize Wayland surface"); + throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED); + } + } +#endif + if (!unsafe_surface) { + LOG_ERROR(Render_Vulkan, "Presentation not supported on this platform"); + throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED); + } + return vk::SurfaceKHR(unsafe_surface, *instance, dld); +} + +} // namespace Vulkan diff --git a/src/video_core/vulkan_common/vulkan_surface.h b/src/video_core/vulkan_common/vulkan_surface.h new file mode 100644 index 000000000..05a169e32 --- /dev/null +++ b/src/video_core/vulkan_common/vulkan_surface.h @@ -0,0 +1,18 @@ +// Copyright 2020 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include "video_core/vulkan_common/vulkan_wrapper.h" + +namespace Core::Frontend { +class EmuWindow; +} + +namespace Vulkan { + +[[nodiscard]] vk::SurfaceKHR CreateSurface(const vk::Instance& instance, + const Core::Frontend::EmuWindow& emu_window); + +} // namespace Vulkan diff --git a/src/video_core/vulkan_common/vulkan_wrapper.h b/src/video_core/vulkan_common/vulkan_wrapper.h index 03ca97ac0..012982a3f 100644 --- a/src/video_core/vulkan_common/vulkan_wrapper.h +++ b/src/video_core/vulkan_common/vulkan_wrapper.h @@ -586,6 +586,11 @@ public: /// @throw Exception on creation failure. DebugUtilsMessenger CreateDebugUtilsMessenger( const VkDebugUtilsMessengerCreateInfoEXT& create_info) const; + + /// Returns dispatch table. + const InstanceDispatch& Dispatch() const noexcept { + return *dld; + } }; class Queue { -- cgit v1.2.3 From 085adfea00a525796a3bf4b2dd345e1df656c930 Mon Sep 17 00:00:00 2001 From: ReinUsesLisp Date: Fri, 25 Dec 2020 02:27:57 -0300 Subject: renderer_vulkan: Throw when enumerating devices fails Report device enumeration errors with exceptions to be consistent with other initialization related function calls. Reduces the amount of code to maintain. --- src/video_core/vulkan_common/vulkan_instance.cpp | 2 +- src/video_core/vulkan_common/vulkan_instance.h | 2 +- src/video_core/vulkan_common/vulkan_wrapper.cpp | 12 ++++-------- src/video_core/vulkan_common/vulkan_wrapper.h | 3 ++- 4 files changed, 8 insertions(+), 11 deletions(-) (limited to 'src/video_core/vulkan_common') diff --git a/src/video_core/vulkan_common/vulkan_instance.cpp b/src/video_core/vulkan_common/vulkan_instance.cpp index d3d8630e5..ee46fc6cc 100644 --- a/src/video_core/vulkan_common/vulkan_instance.cpp +++ b/src/video_core/vulkan_common/vulkan_instance.cpp @@ -111,7 +111,7 @@ void RemoveUnavailableLayers(const vk::InstanceDispatch& dld, std::vector CreateInstance(Common::DynamicLibrary& library, +std::pair CreateInstance(const Common::DynamicLibrary& library, vk::InstanceDispatch& dld, Core::Frontend::WindowSystemType window_type, bool enable_debug_utils, bool enable_layers) { diff --git a/src/video_core/vulkan_common/vulkan_instance.h b/src/video_core/vulkan_common/vulkan_instance.h index ff2be0a48..5acca9756 100644 --- a/src/video_core/vulkan_common/vulkan_instance.h +++ b/src/video_core/vulkan_common/vulkan_instance.h @@ -14,7 +14,7 @@ namespace Vulkan { [[nodiscard]] std::pair CreateInstance( - Common::DynamicLibrary& library, vk::InstanceDispatch& dld, + const Common::DynamicLibrary& library, vk::InstanceDispatch& dld, Core::Frontend::WindowSystemType window_type = Core::Frontend::WindowSystemType::Headless, bool enable_debug_utils = false, bool enable_layers = false); diff --git a/src/video_core/vulkan_common/vulkan_wrapper.cpp b/src/video_core/vulkan_common/vulkan_wrapper.cpp index f4177537b..8698c3f92 100644 --- a/src/video_core/vulkan_common/vulkan_wrapper.cpp +++ b/src/video_core/vulkan_common/vulkan_wrapper.cpp @@ -465,17 +465,13 @@ Instance Instance::Create(u32 version, Span layers, Span> Instance::EnumeratePhysicalDevices() const { +std::vector Instance::EnumeratePhysicalDevices() const { u32 num; - if (dld->vkEnumeratePhysicalDevices(handle, &num, nullptr) != VK_SUCCESS) { - return std::nullopt; - } + Check(dld->vkEnumeratePhysicalDevices(handle, &num, nullptr)); std::vector physical_devices(num); - if (dld->vkEnumeratePhysicalDevices(handle, &num, physical_devices.data()) != VK_SUCCESS) { - return std::nullopt; - } + Check(dld->vkEnumeratePhysicalDevices(handle, &num, physical_devices.data())); SortPhysicalDevices(physical_devices, *dld); - return std::make_optional(std::move(physical_devices)); + return physical_devices; } DebugUtilsMessenger Instance::CreateDebugUtilsMessenger( diff --git a/src/video_core/vulkan_common/vulkan_wrapper.h b/src/video_core/vulkan_common/vulkan_wrapper.h index 012982a3f..af3083c84 100644 --- a/src/video_core/vulkan_common/vulkan_wrapper.h +++ b/src/video_core/vulkan_common/vulkan_wrapper.h @@ -580,7 +580,8 @@ public: /// Enumerates physical devices. /// @return Physical devices and an empty handle on failure. - std::optional> EnumeratePhysicalDevices() const; + /// @throw Exception on Vulkan error. + std::vector EnumeratePhysicalDevices() const; /// Creates a debug callback messenger. /// @throw Exception on creation failure. -- cgit v1.2.3 From 53ea06dc17ccf9232aa3326a4621500058f9d253 Mon Sep 17 00:00:00 2001 From: ReinUsesLisp Date: Fri, 25 Dec 2020 02:42:03 -0300 Subject: renderer_vulkan: Remove two step initialization on VKDevice The Vulkan device abstraction either initializes successfully on the constructor or throws a Vulkan exception. --- src/video_core/vulkan_common/vulkan_wrapper.cpp | 7 ++----- src/video_core/vulkan_common/vulkan_wrapper.h | 2 +- 2 files changed, 3 insertions(+), 6 deletions(-) (limited to 'src/video_core/vulkan_common') diff --git a/src/video_core/vulkan_common/vulkan_wrapper.cpp b/src/video_core/vulkan_common/vulkan_wrapper.cpp index 8698c3f92..5e15ad607 100644 --- a/src/video_core/vulkan_common/vulkan_wrapper.cpp +++ b/src/video_core/vulkan_common/vulkan_wrapper.cpp @@ -580,7 +580,7 @@ void Semaphore::SetObjectNameEXT(const char* name) const { Device Device::Create(VkPhysicalDevice physical_device, Span queues_ci, Span enabled_extensions, const void* next, - DeviceDispatch& dispatch) noexcept { + DeviceDispatch& dispatch) { const VkDeviceCreateInfo ci{ .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, .pNext = next, @@ -593,11 +593,8 @@ Device Device::Create(VkPhysicalDevice physical_device, Span { public: static Device Create(VkPhysicalDevice physical_device, Span queues_ci, Span enabled_extensions, const void* next, - DeviceDispatch& dispatch) noexcept; + DeviceDispatch& dispatch); Queue GetQueue(u32 family_index) const noexcept; -- cgit v1.2.3 From cdbee27692d73046cecf56fdea1c90f72ebbc0ce Mon Sep 17 00:00:00 2001 From: ReinUsesLisp Date: Wed, 30 Dec 2020 04:58:38 -0300 Subject: vulkan_instance: Allow different Vulkan versions and enforce 1.1 For listing the available physical devices we can use Vulkan 1.0. Now that MoltenVK supports 1.1 we can require it for running games. Add missing documentation. --- src/video_core/vulkan_common/vulkan_instance.cpp | 21 ++++++++++++--------- src/video_core/vulkan_common/vulkan_instance.h | 19 +++++++++++++++---- 2 files changed, 27 insertions(+), 13 deletions(-) (limited to 'src/video_core/vulkan_common') diff --git a/src/video_core/vulkan_common/vulkan_instance.cpp b/src/video_core/vulkan_common/vulkan_instance.cpp index ee46fc6cc..889ecda0c 100644 --- a/src/video_core/vulkan_common/vulkan_instance.cpp +++ b/src/video_core/vulkan_common/vulkan_instance.cpp @@ -111,10 +111,9 @@ void RemoveUnavailableLayers(const vk::InstanceDispatch& dld, std::vector CreateInstance(const Common::DynamicLibrary& library, - vk::InstanceDispatch& dld, - Core::Frontend::WindowSystemType window_type, - bool enable_debug_utils, bool enable_layers) { +vk::Instance CreateInstance(const Common::DynamicLibrary& library, vk::InstanceDispatch& dld, + u32 required_version, Core::Frontend::WindowSystemType window_type, + bool enable_debug_utils, bool enable_layers) { if (!library.IsOpen()) { LOG_ERROR(Render_Vulkan, "Vulkan library not available"); throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED); @@ -134,15 +133,19 @@ std::pair CreateInstance(const Common::DynamicLibrary& librar std::vector layers = Layers(enable_layers); RemoveUnavailableLayers(dld, layers); - // Limit the maximum version of Vulkan to avoid using untested version. - const u32 version = std::min(vk::AvailableVersion(dld), VK_API_VERSION_1_1); - - vk::Instance instance = vk::Instance::Create(version, layers, extensions, dld); + const u32 available_version = vk::AvailableVersion(dld); + if (available_version < required_version) { + LOG_ERROR(Render_Vulkan, "Vulkan {}.{} is not supported, {}.{} is required", + VK_VERSION_MAJOR(available_version), VK_VERSION_MINOR(available_version), + VK_VERSION_MAJOR(required_version), VK_VERSION_MINOR(required_version)); + throw vk::Exception(VK_ERROR_INCOMPATIBLE_DRIVER); + } + vk::Instance instance = vk::Instance::Create(required_version, layers, extensions, dld); if (!vk::Load(*instance, dld)) { LOG_ERROR(Render_Vulkan, "Failed to load Vulkan instance function pointers"); throw vk::Exception(VK_ERROR_INITIALIZATION_FAILED); } - return std::make_pair(std::move(instance), version); + return instance; } } // namespace Vulkan diff --git a/src/video_core/vulkan_common/vulkan_instance.h b/src/video_core/vulkan_common/vulkan_instance.h index 5acca9756..e5e3a7144 100644 --- a/src/video_core/vulkan_common/vulkan_instance.h +++ b/src/video_core/vulkan_common/vulkan_instance.h @@ -4,8 +4,6 @@ #pragma once -#include - #include "common/common_types.h" #include "common/dynamic_library.h" #include "core/frontend/emu_window.h" @@ -13,8 +11,21 @@ namespace Vulkan { -[[nodiscard]] std::pair CreateInstance( - const Common::DynamicLibrary& library, vk::InstanceDispatch& dld, +/** + * Create a Vulkan instance + * + * @param library Dynamic library to load the Vulkan instance from + * @param dld Dispatch table to load function pointers into + * @param required_version Required Vulkan version (for example, VK_API_VERSION_1_1) + * @param window_type Window system type's enabled extension + * @param enable_debug_utils Whether to enable VK_EXT_debug_utils_extension_name or not + * @param enable_layers Whether to enable Vulkan validation layers or not + * + * @return A new Vulkan instance + * @throw vk::Exception on failure + */ +[[nodiscard]] vk::Instance CreateInstance( + const Common::DynamicLibrary& library, vk::InstanceDispatch& dld, u32 required_version, Core::Frontend::WindowSystemType window_type = Core::Frontend::WindowSystemType::Headless, bool enable_debug_utils = false, bool enable_layers = false); -- cgit v1.2.3 From 3753553b6a7b493a03f4e6f0e0f726c334502eb0 Mon Sep 17 00:00:00 2001 From: ReinUsesLisp Date: Sat, 26 Dec 2020 01:19:46 -0300 Subject: renderer_vulkan: Move device abstraction to vulkan_common --- src/video_core/vulkan_common/vulkan_device.cpp | 883 +++++++++++++++++++++++++ src/video_core/vulkan_common/vulkan_device.h | 306 +++++++++ 2 files changed, 1189 insertions(+) create mode 100644 src/video_core/vulkan_common/vulkan_device.cpp create mode 100644 src/video_core/vulkan_common/vulkan_device.h (limited to 'src/video_core/vulkan_common') diff --git a/src/video_core/vulkan_common/vulkan_device.cpp b/src/video_core/vulkan_common/vulkan_device.cpp new file mode 100644 index 000000000..67183eed8 --- /dev/null +++ b/src/video_core/vulkan_common/vulkan_device.cpp @@ -0,0 +1,883 @@ +// Copyright 2018 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "common/assert.h" +#include "core/settings.h" +#include "video_core/vulkan_common/vulkan_device.h" +#include "video_core/vulkan_common/vulkan_wrapper.h" + +namespace Vulkan { + +namespace { + +namespace Alternatives { + +constexpr std::array Depth24UnormS8_UINT{ + VK_FORMAT_D32_SFLOAT_S8_UINT, + VK_FORMAT_D16_UNORM_S8_UINT, + VkFormat{}, +}; + +constexpr std::array Depth16UnormS8_UINT{ + VK_FORMAT_D24_UNORM_S8_UINT, + VK_FORMAT_D32_SFLOAT_S8_UINT, + VkFormat{}, +}; + +} // namespace Alternatives + +constexpr std::array REQUIRED_EXTENSIONS{ + VK_KHR_SWAPCHAIN_EXTENSION_NAME, + VK_KHR_MAINTENANCE1_EXTENSION_NAME, + VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME, + VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, + VK_KHR_16BIT_STORAGE_EXTENSION_NAME, + VK_KHR_8BIT_STORAGE_EXTENSION_NAME, + VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME, + VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME, + VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME, + VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME, + VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME, + VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME, + VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME, + VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME, +}; + +template +void SetNext(void**& next, T& data) { + *next = &data; + next = &data.pNext; +} + +constexpr const VkFormat* GetFormatAlternatives(VkFormat format) { + switch (format) { + case VK_FORMAT_D24_UNORM_S8_UINT: + return Alternatives::Depth24UnormS8_UINT.data(); + case VK_FORMAT_D16_UNORM_S8_UINT: + return Alternatives::Depth16UnormS8_UINT.data(); + default: + return nullptr; + } +} + +VkFormatFeatureFlags GetFormatFeatures(VkFormatProperties properties, FormatType format_type) { + switch (format_type) { + case FormatType::Linear: + return properties.linearTilingFeatures; + case FormatType::Optimal: + return properties.optimalTilingFeatures; + case FormatType::Buffer: + return properties.bufferFeatures; + default: + return {}; + } +} + +[[nodiscard]] bool IsRDNA(std::string_view device_name, VkDriverIdKHR driver_id) { + static constexpr std::array RDNA_DEVICES{ + "5700", + "5600", + "5500", + "5300", + }; + if (driver_id != VK_DRIVER_ID_AMD_PROPRIETARY_KHR) { + return false; + } + return std::any_of(RDNA_DEVICES.begin(), RDNA_DEVICES.end(), [device_name](const char* name) { + return device_name.find(name) != std::string_view::npos; + }); +} + +std::unordered_map GetFormatProperties( + vk::PhysicalDevice physical, const vk::InstanceDispatch& dld) { + static constexpr std::array formats{ + VK_FORMAT_A8B8G8R8_UNORM_PACK32, + VK_FORMAT_A8B8G8R8_UINT_PACK32, + VK_FORMAT_A8B8G8R8_SNORM_PACK32, + VK_FORMAT_A8B8G8R8_SINT_PACK32, + VK_FORMAT_A8B8G8R8_SRGB_PACK32, + VK_FORMAT_B5G6R5_UNORM_PACK16, + VK_FORMAT_A2B10G10R10_UNORM_PACK32, + VK_FORMAT_A2B10G10R10_UINT_PACK32, + VK_FORMAT_A1R5G5B5_UNORM_PACK16, + VK_FORMAT_R32G32B32A32_SFLOAT, + VK_FORMAT_R32G32B32A32_SINT, + VK_FORMAT_R32G32B32A32_UINT, + VK_FORMAT_R32G32_SFLOAT, + VK_FORMAT_R32G32_SINT, + VK_FORMAT_R32G32_UINT, + VK_FORMAT_R16G16B16A16_SINT, + VK_FORMAT_R16G16B16A16_UINT, + VK_FORMAT_R16G16B16A16_SNORM, + VK_FORMAT_R16G16B16A16_UNORM, + VK_FORMAT_R16G16_UNORM, + VK_FORMAT_R16G16_SNORM, + VK_FORMAT_R16G16_SFLOAT, + VK_FORMAT_R16G16_SINT, + VK_FORMAT_R16_UNORM, + VK_FORMAT_R16_UINT, + VK_FORMAT_R8G8B8A8_SRGB, + VK_FORMAT_R8G8_UNORM, + VK_FORMAT_R8G8_SNORM, + VK_FORMAT_R8G8_SINT, + VK_FORMAT_R8G8_UINT, + VK_FORMAT_R8_UNORM, + VK_FORMAT_R8_SNORM, + VK_FORMAT_R8_SINT, + VK_FORMAT_R8_UINT, + VK_FORMAT_B10G11R11_UFLOAT_PACK32, + VK_FORMAT_R32_SFLOAT, + VK_FORMAT_R32_UINT, + VK_FORMAT_R32_SINT, + VK_FORMAT_R16_SFLOAT, + VK_FORMAT_R16G16B16A16_SFLOAT, + VK_FORMAT_B8G8R8A8_UNORM, + VK_FORMAT_B8G8R8A8_SRGB, + VK_FORMAT_R4G4B4A4_UNORM_PACK16, + VK_FORMAT_D32_SFLOAT, + VK_FORMAT_D16_UNORM, + VK_FORMAT_D16_UNORM_S8_UINT, + VK_FORMAT_D24_UNORM_S8_UINT, + VK_FORMAT_D32_SFLOAT_S8_UINT, + VK_FORMAT_BC1_RGBA_UNORM_BLOCK, + VK_FORMAT_BC2_UNORM_BLOCK, + VK_FORMAT_BC3_UNORM_BLOCK, + VK_FORMAT_BC4_UNORM_BLOCK, + VK_FORMAT_BC4_SNORM_BLOCK, + VK_FORMAT_BC5_UNORM_BLOCK, + VK_FORMAT_BC5_SNORM_BLOCK, + VK_FORMAT_BC7_UNORM_BLOCK, + VK_FORMAT_BC6H_UFLOAT_BLOCK, + VK_FORMAT_BC6H_SFLOAT_BLOCK, + VK_FORMAT_BC1_RGBA_SRGB_BLOCK, + VK_FORMAT_BC2_SRGB_BLOCK, + VK_FORMAT_BC3_SRGB_BLOCK, + VK_FORMAT_BC7_SRGB_BLOCK, + VK_FORMAT_ASTC_4x4_UNORM_BLOCK, + VK_FORMAT_ASTC_4x4_SRGB_BLOCK, + VK_FORMAT_ASTC_5x4_UNORM_BLOCK, + VK_FORMAT_ASTC_5x4_SRGB_BLOCK, + VK_FORMAT_ASTC_5x5_UNORM_BLOCK, + VK_FORMAT_ASTC_5x5_SRGB_BLOCK, + VK_FORMAT_ASTC_6x5_UNORM_BLOCK, + VK_FORMAT_ASTC_6x5_SRGB_BLOCK, + VK_FORMAT_ASTC_6x6_UNORM_BLOCK, + VK_FORMAT_ASTC_6x6_SRGB_BLOCK, + VK_FORMAT_ASTC_8x5_UNORM_BLOCK, + VK_FORMAT_ASTC_8x5_SRGB_BLOCK, + VK_FORMAT_ASTC_8x6_UNORM_BLOCK, + VK_FORMAT_ASTC_8x6_SRGB_BLOCK, + VK_FORMAT_ASTC_8x8_UNORM_BLOCK, + VK_FORMAT_ASTC_8x8_SRGB_BLOCK, + VK_FORMAT_ASTC_10x5_UNORM_BLOCK, + VK_FORMAT_ASTC_10x5_SRGB_BLOCK, + VK_FORMAT_ASTC_10x6_UNORM_BLOCK, + VK_FORMAT_ASTC_10x6_SRGB_BLOCK, + VK_FORMAT_ASTC_10x8_UNORM_BLOCK, + VK_FORMAT_ASTC_10x8_SRGB_BLOCK, + VK_FORMAT_ASTC_10x10_UNORM_BLOCK, + VK_FORMAT_ASTC_10x10_SRGB_BLOCK, + VK_FORMAT_ASTC_12x10_UNORM_BLOCK, + VK_FORMAT_ASTC_12x10_SRGB_BLOCK, + VK_FORMAT_ASTC_12x12_UNORM_BLOCK, + VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + VK_FORMAT_ASTC_8x6_UNORM_BLOCK, + VK_FORMAT_ASTC_8x6_SRGB_BLOCK, + VK_FORMAT_ASTC_6x5_UNORM_BLOCK, + VK_FORMAT_ASTC_6x5_SRGB_BLOCK, + VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, + }; + std::unordered_map format_properties; + for (const auto format : formats) { + format_properties.emplace(format, physical.GetFormatProperties(format)); + } + return format_properties; +} + +} // Anonymous namespace + +Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR surface, + const vk::InstanceDispatch& dld_) + : instance{instance_}, dld{dld_}, physical{physical_}, properties{physical.GetProperties()}, + format_properties{GetFormatProperties(physical, dld)} { + CheckSuitability(); + SetupFamilies(surface); + SetupFeatures(); + + const auto queue_cis = GetDeviceQueueCreateInfos(); + const std::vector extensions = LoadExtensions(); + + VkPhysicalDeviceFeatures2 features2{ + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + .pNext = nullptr, + }; + const void* first_next = &features2; + void** next = &features2.pNext; + + features2.features = { + .robustBufferAccess = false, + .fullDrawIndexUint32 = false, + .imageCubeArray = true, + .independentBlend = true, + .geometryShader = true, + .tessellationShader = true, + .sampleRateShading = false, + .dualSrcBlend = false, + .logicOp = false, + .multiDrawIndirect = false, + .drawIndirectFirstInstance = false, + .depthClamp = true, + .depthBiasClamp = true, + .fillModeNonSolid = false, + .depthBounds = false, + .wideLines = false, + .largePoints = true, + .alphaToOne = false, + .multiViewport = true, + .samplerAnisotropy = true, + .textureCompressionETC2 = false, + .textureCompressionASTC_LDR = is_optimal_astc_supported, + .textureCompressionBC = false, + .occlusionQueryPrecise = true, + .pipelineStatisticsQuery = false, + .vertexPipelineStoresAndAtomics = true, + .fragmentStoresAndAtomics = true, + .shaderTessellationAndGeometryPointSize = false, + .shaderImageGatherExtended = true, + .shaderStorageImageExtendedFormats = false, + .shaderStorageImageMultisample = true, + .shaderStorageImageReadWithoutFormat = is_formatless_image_load_supported, + .shaderStorageImageWriteWithoutFormat = true, + .shaderUniformBufferArrayDynamicIndexing = false, + .shaderSampledImageArrayDynamicIndexing = false, + .shaderStorageBufferArrayDynamicIndexing = false, + .shaderStorageImageArrayDynamicIndexing = false, + .shaderClipDistance = false, + .shaderCullDistance = false, + .shaderFloat64 = false, + .shaderInt64 = false, + .shaderInt16 = false, + .shaderResourceResidency = false, + .shaderResourceMinLod = false, + .sparseBinding = false, + .sparseResidencyBuffer = false, + .sparseResidencyImage2D = false, + .sparseResidencyImage3D = false, + .sparseResidency2Samples = false, + .sparseResidency4Samples = false, + .sparseResidency8Samples = false, + .sparseResidency16Samples = false, + .sparseResidencyAliased = false, + .variableMultisampleRate = false, + .inheritedQueries = false, + }; + VkPhysicalDeviceTimelineSemaphoreFeaturesKHR timeline_semaphore{ + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR, + .pNext = nullptr, + .timelineSemaphore = true, + }; + SetNext(next, timeline_semaphore); + + VkPhysicalDevice16BitStorageFeaturesKHR bit16_storage{ + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, + .pNext = nullptr, + .storageBuffer16BitAccess = false, + .uniformAndStorageBuffer16BitAccess = true, + .storagePushConstant16 = false, + .storageInputOutput16 = false, + }; + SetNext(next, bit16_storage); + + VkPhysicalDevice8BitStorageFeaturesKHR bit8_storage{ + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, + .pNext = nullptr, + .storageBuffer8BitAccess = false, + .uniformAndStorageBuffer8BitAccess = true, + .storagePushConstant8 = false, + }; + SetNext(next, bit8_storage); + + VkPhysicalDeviceHostQueryResetFeaturesEXT host_query_reset{ + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT, + .hostQueryReset = true, + }; + SetNext(next, host_query_reset); + + VkPhysicalDeviceFloat16Int8FeaturesKHR float16_int8; + if (is_float16_supported) { + float16_int8 = { + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, + .pNext = nullptr, + .shaderFloat16 = true, + .shaderInt8 = false, + }; + SetNext(next, float16_int8); + } else { + LOG_INFO(Render_Vulkan, "Device doesn't support float16 natively"); + } + + if (!nv_viewport_swizzle) { + LOG_INFO(Render_Vulkan, "Device doesn't support viewport swizzles"); + } + + VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR std430_layout; + if (khr_uniform_buffer_standard_layout) { + std430_layout = { + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, + .pNext = nullptr, + .uniformBufferStandardLayout = true, + }; + SetNext(next, std430_layout); + } else { + LOG_INFO(Render_Vulkan, "Device doesn't support packed UBOs"); + } + + VkPhysicalDeviceIndexTypeUint8FeaturesEXT index_type_uint8; + if (ext_index_type_uint8) { + index_type_uint8 = { + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, + .pNext = nullptr, + .indexTypeUint8 = true, + }; + SetNext(next, index_type_uint8); + } else { + LOG_INFO(Render_Vulkan, "Device doesn't support uint8 indexes"); + } + + VkPhysicalDeviceTransformFeedbackFeaturesEXT transform_feedback; + if (ext_transform_feedback) { + transform_feedback = { + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, + .pNext = nullptr, + .transformFeedback = true, + .geometryStreams = true, + }; + SetNext(next, transform_feedback); + } else { + LOG_INFO(Render_Vulkan, "Device doesn't support transform feedbacks"); + } + + VkPhysicalDeviceCustomBorderColorFeaturesEXT custom_border; + if (ext_custom_border_color) { + custom_border = { + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT, + .pNext = nullptr, + .customBorderColors = VK_TRUE, + .customBorderColorWithoutFormat = VK_TRUE, + }; + SetNext(next, custom_border); + } else { + LOG_INFO(Render_Vulkan, "Device doesn't support custom border colors"); + } + + VkPhysicalDeviceExtendedDynamicStateFeaturesEXT dynamic_state; + if (ext_extended_dynamic_state) { + dynamic_state = { + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT, + .pNext = nullptr, + .extendedDynamicState = VK_TRUE, + }; + SetNext(next, dynamic_state); + } else { + LOG_INFO(Render_Vulkan, "Device doesn't support extended dynamic state"); + } + + VkPhysicalDeviceRobustness2FeaturesEXT robustness2; + if (ext_robustness2) { + robustness2 = { + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT, + .pNext = nullptr, + .robustBufferAccess2 = false, + .robustImageAccess2 = true, + .nullDescriptor = true, + }; + SetNext(next, robustness2); + } else { + LOG_INFO(Render_Vulkan, "Device doesn't support robustness2"); + } + + if (!ext_depth_range_unrestricted) { + LOG_INFO(Render_Vulkan, "Device doesn't support depth range unrestricted"); + } + + VkDeviceDiagnosticsConfigCreateInfoNV diagnostics_nv; + if (nv_device_diagnostics_config) { + nsight_aftermath_tracker.Initialize(); + + diagnostics_nv = { + .sType = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV, + .pNext = &features2, + .flags = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV | + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV | + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV, + }; + first_next = &diagnostics_nv; + } + logical = vk::Device::Create(physical, queue_cis, extensions, first_next, dld); + + CollectTelemetryParameters(); + CollectToolingInfo(); + + if (ext_extended_dynamic_state && driver_id == VK_DRIVER_ID_MESA_RADV) { + LOG_WARNING( + Render_Vulkan, + "Blacklisting RADV for VK_EXT_extended_dynamic state, likely due to a bug in yuzu"); + ext_extended_dynamic_state = false; + } + if (ext_extended_dynamic_state && IsRDNA(properties.deviceName, driver_id)) { + // AMD's proprietary driver supports VK_EXT_extended_dynamic_state but on RDNA devices it + // seems to cause stability issues + LOG_WARNING( + Render_Vulkan, + "Blacklisting AMD proprietary on RDNA devices from VK_EXT_extended_dynamic_state"); + ext_extended_dynamic_state = false; + } + + graphics_queue = logical.GetQueue(graphics_family); + present_queue = logical.GetQueue(present_family); + + use_asynchronous_shaders = Settings::values.use_asynchronous_shaders.GetValue(); +} + +Device::~Device() = default; + +VkFormat Device::GetSupportedFormat(VkFormat wanted_format, VkFormatFeatureFlags wanted_usage, + FormatType format_type) const { + if (IsFormatSupported(wanted_format, wanted_usage, format_type)) { + return wanted_format; + } + // The wanted format is not supported by hardware, search for alternatives + const VkFormat* alternatives = GetFormatAlternatives(wanted_format); + if (alternatives == nullptr) { + UNREACHABLE_MSG("Format={} with usage={} and type={} has no defined alternatives and host " + "hardware does not support it", + wanted_format, wanted_usage, format_type); + return wanted_format; + } + + std::size_t i = 0; + for (VkFormat alternative = *alternatives; alternative; alternative = alternatives[++i]) { + if (!IsFormatSupported(alternative, wanted_usage, format_type)) { + continue; + } + LOG_WARNING(Render_Vulkan, + "Emulating format={} with alternative format={} with usage={} and type={}", + wanted_format, alternative, wanted_usage, format_type); + return alternative; + } + + // No alternatives found, panic + UNREACHABLE_MSG("Format={} with usage={} and type={} is not supported by the host hardware and " + "doesn't support any of the alternatives", + wanted_format, wanted_usage, format_type); + return wanted_format; +} + +void Device::ReportLoss() const { + LOG_CRITICAL(Render_Vulkan, "Device loss occured!"); + + // Wait for the log to flush and for Nsight Aftermath to dump the results + std::this_thread::sleep_for(std::chrono::seconds{15}); +} + +void Device::SaveShader(const std::vector& spirv) const { + nsight_aftermath_tracker.SaveShader(spirv); +} + +bool Device::IsOptimalAstcSupported(const VkPhysicalDeviceFeatures& features) const { + // Disable for now to avoid converting ASTC twice. + static constexpr std::array astc_formats = { + VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_4x4_SRGB_BLOCK, + VK_FORMAT_ASTC_5x4_UNORM_BLOCK, VK_FORMAT_ASTC_5x4_SRGB_BLOCK, + VK_FORMAT_ASTC_5x5_UNORM_BLOCK, VK_FORMAT_ASTC_5x5_SRGB_BLOCK, + VK_FORMAT_ASTC_6x5_UNORM_BLOCK, VK_FORMAT_ASTC_6x5_SRGB_BLOCK, + VK_FORMAT_ASTC_6x6_UNORM_BLOCK, VK_FORMAT_ASTC_6x6_SRGB_BLOCK, + VK_FORMAT_ASTC_8x5_UNORM_BLOCK, VK_FORMAT_ASTC_8x5_SRGB_BLOCK, + VK_FORMAT_ASTC_8x6_UNORM_BLOCK, VK_FORMAT_ASTC_8x6_SRGB_BLOCK, + VK_FORMAT_ASTC_8x8_UNORM_BLOCK, VK_FORMAT_ASTC_8x8_SRGB_BLOCK, + VK_FORMAT_ASTC_10x5_UNORM_BLOCK, VK_FORMAT_ASTC_10x5_SRGB_BLOCK, + VK_FORMAT_ASTC_10x6_UNORM_BLOCK, VK_FORMAT_ASTC_10x6_SRGB_BLOCK, + VK_FORMAT_ASTC_10x8_UNORM_BLOCK, VK_FORMAT_ASTC_10x8_SRGB_BLOCK, + VK_FORMAT_ASTC_10x10_UNORM_BLOCK, VK_FORMAT_ASTC_10x10_SRGB_BLOCK, + VK_FORMAT_ASTC_12x10_UNORM_BLOCK, VK_FORMAT_ASTC_12x10_SRGB_BLOCK, + VK_FORMAT_ASTC_12x12_UNORM_BLOCK, VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + }; + if (!features.textureCompressionASTC_LDR) { + return false; + } + const auto format_feature_usage{ + VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_BLIT_SRC_BIT | + VK_FORMAT_FEATURE_BLIT_DST_BIT | VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | + VK_FORMAT_FEATURE_TRANSFER_DST_BIT}; + for (const auto format : astc_formats) { + const auto physical_format_properties{physical.GetFormatProperties(format)}; + if ((physical_format_properties.optimalTilingFeatures & format_feature_usage) == 0) { + return false; + } + } + return true; +} + +bool Device::TestDepthStencilBlits() const { + static constexpr VkFormatFeatureFlags required_features = + VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT; + const auto test_features = [](VkFormatProperties props) { + return (props.optimalTilingFeatures & required_features) == required_features; + }; + return test_features(format_properties.at(VK_FORMAT_D32_SFLOAT_S8_UINT)) && + test_features(format_properties.at(VK_FORMAT_D24_UNORM_S8_UINT)); +} + +bool Device::IsFormatSupported(VkFormat wanted_format, VkFormatFeatureFlags wanted_usage, + FormatType format_type) const { + const auto it = format_properties.find(wanted_format); + if (it == format_properties.end()) { + UNIMPLEMENTED_MSG("Unimplemented format query={}", wanted_format); + return true; + } + const auto supported_usage = GetFormatFeatures(it->second, format_type); + return (supported_usage & wanted_usage) == wanted_usage; +} + +void Device::CheckSuitability() const { + std::bitset available_extensions; + for (const VkExtensionProperties& property : physical.EnumerateDeviceExtensionProperties()) { + for (std::size_t i = 0; i < REQUIRED_EXTENSIONS.size(); ++i) { + if (available_extensions[i]) { + continue; + } + const std::string_view name{property.extensionName}; + available_extensions[i] = name == REQUIRED_EXTENSIONS[i]; + } + } + for (size_t i = 0; i < REQUIRED_EXTENSIONS.size(); ++i) { + if (available_extensions[i]) { + continue; + } + LOG_ERROR(Render_Vulkan, "Missing required extension: {}", REQUIRED_EXTENSIONS[i]); + throw vk::Exception(VK_ERROR_EXTENSION_NOT_PRESENT); + } + struct LimitTuple { + u32 minimum; + u32 value; + const char* name; + }; + const VkPhysicalDeviceLimits& limits{properties.limits}; + const std::array limits_report{ + LimitTuple{65536, limits.maxUniformBufferRange, "maxUniformBufferRange"}, + LimitTuple{16, limits.maxViewports, "maxViewports"}, + LimitTuple{8, limits.maxColorAttachments, "maxColorAttachments"}, + LimitTuple{8, limits.maxClipDistances, "maxClipDistances"}, + }; + for (const auto& tuple : limits_report) { + if (tuple.value < tuple.minimum) { + LOG_ERROR(Render_Vulkan, "{} has to be {} or greater but it is {}", tuple.name, + tuple.minimum, tuple.value); + throw vk::Exception(VK_ERROR_FEATURE_NOT_PRESENT); + } + } + const VkPhysicalDeviceFeatures features{physical.GetFeatures()}; + const std::array feature_report{ + std::make_pair(features.vertexPipelineStoresAndAtomics, "vertexPipelineStoresAndAtomics"), + std::make_pair(features.imageCubeArray, "imageCubeArray"), + std::make_pair(features.independentBlend, "independentBlend"), + std::make_pair(features.depthClamp, "depthClamp"), + std::make_pair(features.samplerAnisotropy, "samplerAnisotropy"), + std::make_pair(features.largePoints, "largePoints"), + std::make_pair(features.multiViewport, "multiViewport"), + std::make_pair(features.depthBiasClamp, "depthBiasClamp"), + std::make_pair(features.geometryShader, "geometryShader"), + std::make_pair(features.tessellationShader, "tessellationShader"), + std::make_pair(features.occlusionQueryPrecise, "occlusionQueryPrecise"), + std::make_pair(features.fragmentStoresAndAtomics, "fragmentStoresAndAtomics"), + std::make_pair(features.shaderImageGatherExtended, "shaderImageGatherExtended"), + std::make_pair(features.shaderStorageImageMultisample, "shaderStorageImageMultisample"), + std::make_pair(features.shaderStorageImageWriteWithoutFormat, + "shaderStorageImageWriteWithoutFormat"), + }; + for (const auto& [is_supported, name] : feature_report) { + if (is_supported) { + continue; + } + LOG_ERROR(Render_Vulkan, "Missing required feature: {}", name); + throw vk::Exception(VK_ERROR_FEATURE_NOT_PRESENT); + } +} + +std::vector Device::LoadExtensions() { + std::vector extensions; + extensions.reserve(7 + REQUIRED_EXTENSIONS.size()); + extensions.insert(extensions.begin(), REQUIRED_EXTENSIONS.begin(), REQUIRED_EXTENSIONS.end()); + + bool has_khr_shader_float16_int8{}; + bool has_ext_subgroup_size_control{}; + bool has_ext_transform_feedback{}; + bool has_ext_custom_border_color{}; + bool has_ext_extended_dynamic_state{}; + bool has_ext_robustness2{}; + for (const VkExtensionProperties& extension : physical.EnumerateDeviceExtensionProperties()) { + const auto test = [&](std::optional> status, const char* name, + bool push) { + if (extension.extensionName != std::string_view(name)) { + return; + } + if (push) { + extensions.push_back(name); + } + if (status) { + status->get() = true; + } + }; + test(nv_viewport_swizzle, VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME, true); + test(khr_uniform_buffer_standard_layout, + VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME, true); + test(has_khr_shader_float16_int8, VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME, false); + test(ext_depth_range_unrestricted, VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME, true); + test(ext_index_type_uint8, VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME, true); + test(ext_sampler_filter_minmax, VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME, true); + test(ext_shader_viewport_index_layer, VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME, + true); + test(ext_tooling_info, VK_EXT_TOOLING_INFO_EXTENSION_NAME, true); + test(ext_shader_stencil_export, VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, true); + test(has_ext_transform_feedback, VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME, false); + test(has_ext_custom_border_color, VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME, false); + test(has_ext_extended_dynamic_state, VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME, false); + test(has_ext_robustness2, VK_EXT_ROBUSTNESS_2_EXTENSION_NAME, false); + test(has_ext_subgroup_size_control, VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME, false); + if (Settings::values.renderer_debug) { + test(nv_device_diagnostics_config, VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME, + true); + } + } + + VkPhysicalDeviceFeatures2KHR features; + features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR; + + VkPhysicalDeviceProperties2KHR physical_properties; + physical_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR; + + if (has_khr_shader_float16_int8) { + VkPhysicalDeviceFloat16Int8FeaturesKHR float16_int8_features; + float16_int8_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR; + float16_int8_features.pNext = nullptr; + features.pNext = &float16_int8_features; + + physical.GetFeatures2KHR(features); + is_float16_supported = float16_int8_features.shaderFloat16; + extensions.push_back(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME); + } + + if (has_ext_subgroup_size_control) { + VkPhysicalDeviceSubgroupSizeControlFeaturesEXT subgroup_features; + subgroup_features.sType = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT; + subgroup_features.pNext = nullptr; + features.pNext = &subgroup_features; + physical.GetFeatures2KHR(features); + + VkPhysicalDeviceSubgroupSizeControlPropertiesEXT subgroup_properties; + subgroup_properties.sType = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT; + subgroup_properties.pNext = nullptr; + physical_properties.pNext = &subgroup_properties; + physical.GetProperties2KHR(physical_properties); + + is_warp_potentially_bigger = subgroup_properties.maxSubgroupSize > GuestWarpSize; + + if (subgroup_features.subgroupSizeControl && + subgroup_properties.minSubgroupSize <= GuestWarpSize && + subgroup_properties.maxSubgroupSize >= GuestWarpSize) { + extensions.push_back(VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME); + guest_warp_stages = subgroup_properties.requiredSubgroupSizeStages; + } + } else { + is_warp_potentially_bigger = true; + } + + if (has_ext_transform_feedback) { + VkPhysicalDeviceTransformFeedbackFeaturesEXT tfb_features; + tfb_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT; + tfb_features.pNext = nullptr; + features.pNext = &tfb_features; + physical.GetFeatures2KHR(features); + + VkPhysicalDeviceTransformFeedbackPropertiesEXT tfb_properties; + tfb_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT; + tfb_properties.pNext = nullptr; + physical_properties.pNext = &tfb_properties; + physical.GetProperties2KHR(physical_properties); + + if (tfb_features.transformFeedback && tfb_features.geometryStreams && + tfb_properties.maxTransformFeedbackStreams >= 4 && + tfb_properties.maxTransformFeedbackBuffers && tfb_properties.transformFeedbackQueries && + tfb_properties.transformFeedbackDraw) { + extensions.push_back(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME); + ext_transform_feedback = true; + } + } + + if (has_ext_custom_border_color) { + VkPhysicalDeviceCustomBorderColorFeaturesEXT border_features; + border_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT; + border_features.pNext = nullptr; + features.pNext = &border_features; + physical.GetFeatures2KHR(features); + + if (border_features.customBorderColors && border_features.customBorderColorWithoutFormat) { + extensions.push_back(VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME); + ext_custom_border_color = true; + } + } + + if (has_ext_extended_dynamic_state) { + VkPhysicalDeviceExtendedDynamicStateFeaturesEXT dynamic_state; + dynamic_state.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT; + dynamic_state.pNext = nullptr; + features.pNext = &dynamic_state; + physical.GetFeatures2KHR(features); + + if (dynamic_state.extendedDynamicState) { + extensions.push_back(VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME); + ext_extended_dynamic_state = true; + } + } + + if (has_ext_robustness2) { + VkPhysicalDeviceRobustness2FeaturesEXT robustness2; + robustness2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT; + robustness2.pNext = nullptr; + features.pNext = &robustness2; + physical.GetFeatures2KHR(features); + if (robustness2.nullDescriptor && robustness2.robustImageAccess2) { + extensions.push_back(VK_EXT_ROBUSTNESS_2_EXTENSION_NAME); + ext_robustness2 = true; + } + } + + return extensions; +} + +void Device::SetupFamilies(VkSurfaceKHR surface) { + const std::vector queue_family_properties = physical.GetQueueFamilyProperties(); + std::optional graphics; + std::optional present; + for (u32 index = 0; index < static_cast(queue_family_properties.size()); ++index) { + if (graphics && present) { + break; + } + const VkQueueFamilyProperties& queue_family = queue_family_properties[index]; + if (queue_family.queueCount == 0) { + continue; + } + if (queue_family.queueFlags & VK_QUEUE_GRAPHICS_BIT) { + graphics = index; + } + if (physical.GetSurfaceSupportKHR(index, surface)) { + present = index; + } + } + if (!graphics) { + LOG_ERROR(Render_Vulkan, "Device lacks a graphics queue"); + throw vk::Exception(VK_ERROR_FEATURE_NOT_PRESENT); + } + if (!present) { + LOG_ERROR(Render_Vulkan, "Device lacks a present queue"); + throw vk::Exception(VK_ERROR_FEATURE_NOT_PRESENT); + } + graphics_family = *graphics; + present_family = *present; +} + +void Device::SetupFeatures() { + const auto supported_features{physical.GetFeatures()}; + is_formatless_image_load_supported = supported_features.shaderStorageImageReadWithoutFormat; + is_blit_depth_stencil_supported = TestDepthStencilBlits(); + is_optimal_astc_supported = IsOptimalAstcSupported(supported_features); +} + +void Device::CollectTelemetryParameters() { + VkPhysicalDeviceDriverPropertiesKHR driver{ + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, + .pNext = nullptr, + .driverID = {}, + .driverName = {}, + .driverInfo = {}, + .conformanceVersion = {}, + }; + + VkPhysicalDeviceProperties2KHR device_properties{ + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR, + .pNext = &driver, + .properties = {}, + }; + physical.GetProperties2KHR(device_properties); + + driver_id = driver.driverID; + vendor_name = driver.driverName; + + const std::vector extensions = physical.EnumerateDeviceExtensionProperties(); + reported_extensions.reserve(std::size(extensions)); + for (const auto& extension : extensions) { + reported_extensions.emplace_back(extension.extensionName); + } +} + +void Device::CollectToolingInfo() { + if (!ext_tooling_info) { + return; + } + const auto vkGetPhysicalDeviceToolPropertiesEXT = + reinterpret_cast( + dld.vkGetInstanceProcAddr(instance, "vkGetPhysicalDeviceToolPropertiesEXT")); + if (!vkGetPhysicalDeviceToolPropertiesEXT) { + return; + } + u32 tool_count = 0; + if (vkGetPhysicalDeviceToolPropertiesEXT(physical, &tool_count, nullptr) != VK_SUCCESS) { + return; + } + std::vector tools(tool_count); + if (vkGetPhysicalDeviceToolPropertiesEXT(physical, &tool_count, tools.data()) != VK_SUCCESS) { + return; + } + for (const VkPhysicalDeviceToolPropertiesEXT& tool : tools) { + const std::string_view name = tool.name; + LOG_INFO(Render_Vulkan, "{}", name); + has_renderdoc = has_renderdoc || name == "RenderDoc"; + has_nsight_graphics = has_nsight_graphics || name == "NVIDIA Nsight Graphics"; + } +} + +std::vector Device::GetDeviceQueueCreateInfos() const { + static constexpr float QUEUE_PRIORITY = 1.0f; + + std::unordered_set unique_queue_families{graphics_family, present_family}; + std::vector queue_cis; + queue_cis.reserve(unique_queue_families.size()); + + for (const u32 queue_family : unique_queue_families) { + auto& ci = queue_cis.emplace_back(VkDeviceQueueCreateInfo{ + .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, + .pNext = nullptr, + .flags = 0, + .queueFamilyIndex = queue_family, + .queueCount = 1, + .pQueuePriorities = nullptr, + }); + ci.pQueuePriorities = &QUEUE_PRIORITY; + } + + return queue_cis; +} + +} // namespace Vulkan diff --git a/src/video_core/vulkan_common/vulkan_device.h b/src/video_core/vulkan_common/vulkan_device.h new file mode 100644 index 000000000..b2651e049 --- /dev/null +++ b/src/video_core/vulkan_common/vulkan_device.h @@ -0,0 +1,306 @@ +// Copyright 2018 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include +#include + +#include "common/common_types.h" +#include "video_core/renderer_vulkan/nsight_aftermath_tracker.h" +#include "video_core/vulkan_common/vulkan_wrapper.h" + +namespace Vulkan { + +/// Format usage descriptor. +enum class FormatType { Linear, Optimal, Buffer }; + +/// Subgroup size of the guest emulated hardware (Nvidia has 32 threads per subgroup). +const u32 GuestWarpSize = 32; + +/// Handles data specific to a physical device. +class Device final { +public: + explicit Device(VkInstance instance, vk::PhysicalDevice physical, VkSurfaceKHR surface, + const vk::InstanceDispatch& dld); + ~Device(); + + /** + * Returns a format supported by the device for the passed requeriments. + * @param wanted_format The ideal format to be returned. It may not be the returned format. + * @param wanted_usage The usage that must be fulfilled even if the format is not supported. + * @param format_type Format type usage. + * @returns A format supported by the device. + */ + VkFormat GetSupportedFormat(VkFormat wanted_format, VkFormatFeatureFlags wanted_usage, + FormatType format_type) const; + + /// Reports a device loss. + void ReportLoss() const; + + /// Reports a shader to Nsight Aftermath. + void SaveShader(const std::vector& spirv) const; + + /// Returns the dispatch loader with direct function pointers of the device. + const vk::DeviceDispatch& GetDispatchLoader() const { + return dld; + } + + /// Returns the logical device. + const vk::Device& GetLogical() const { + return logical; + } + + /// Returns the physical device. + vk::PhysicalDevice GetPhysical() const { + return physical; + } + + /// Returns the main graphics queue. + vk::Queue GetGraphicsQueue() const { + return graphics_queue; + } + + /// Returns the main present queue. + vk::Queue GetPresentQueue() const { + return present_queue; + } + + /// Returns main graphics queue family index. + u32 GetGraphicsFamily() const { + return graphics_family; + } + + /// Returns main present queue family index. + u32 GetPresentFamily() const { + return present_family; + } + + /// Returns the current Vulkan API version provided in Vulkan-formatted version numbers. + u32 ApiVersion() const { + return properties.apiVersion; + } + + /// Returns the current driver version provided in Vulkan-formatted version numbers. + u32 GetDriverVersion() const { + return properties.driverVersion; + } + + /// Returns the device name. + std::string_view GetModelName() const { + return properties.deviceName; + } + + /// Returns the driver ID. + VkDriverIdKHR GetDriverID() const { + return driver_id; + } + + /// Returns uniform buffer alignment requeriment. + VkDeviceSize GetUniformBufferAlignment() const { + return properties.limits.minUniformBufferOffsetAlignment; + } + + /// Returns storage alignment requeriment. + VkDeviceSize GetStorageBufferAlignment() const { + return properties.limits.minStorageBufferOffsetAlignment; + } + + /// Returns the maximum range for storage buffers. + VkDeviceSize GetMaxStorageBufferRange() const { + return properties.limits.maxStorageBufferRange; + } + + /// Returns the maximum size for push constants. + VkDeviceSize GetMaxPushConstantsSize() const { + return properties.limits.maxPushConstantsSize; + } + + /// Returns the maximum size for shared memory. + u32 GetMaxComputeSharedMemorySize() const { + return properties.limits.maxComputeSharedMemorySize; + } + + /// Returns true if ASTC is natively supported. + bool IsOptimalAstcSupported() const { + return is_optimal_astc_supported; + } + + /// Returns true if the device supports float16 natively + bool IsFloat16Supported() const { + return is_float16_supported; + } + + /// Returns true if the device warp size can potentially be bigger than guest's warp size. + bool IsWarpSizePotentiallyBiggerThanGuest() const { + return is_warp_potentially_bigger; + } + + /// Returns true if the device can be forced to use the guest warp size. + bool IsGuestWarpSizeSupported(VkShaderStageFlagBits stage) const { + return guest_warp_stages & stage; + } + + /// Returns true if formatless image load is supported. + bool IsFormatlessImageLoadSupported() const { + return is_formatless_image_load_supported; + } + + /// Returns true when blitting from and to depth stencil images is supported. + bool IsBlitDepthStencilSupported() const { + return is_blit_depth_stencil_supported; + } + + /// Returns true if the device supports VK_NV_viewport_swizzle. + bool IsNvViewportSwizzleSupported() const { + return nv_viewport_swizzle; + } + + /// Returns true if the device supports VK_EXT_scalar_block_layout. + bool IsKhrUniformBufferStandardLayoutSupported() const { + return khr_uniform_buffer_standard_layout; + } + + /// Returns true if the device supports VK_EXT_index_type_uint8. + bool IsExtIndexTypeUint8Supported() const { + return ext_index_type_uint8; + } + + /// Returns true if the device supports VK_EXT_sampler_filter_minmax. + bool IsExtSamplerFilterMinmaxSupported() const { + return ext_sampler_filter_minmax; + } + + /// Returns true if the device supports VK_EXT_depth_range_unrestricted. + bool IsExtDepthRangeUnrestrictedSupported() const { + return ext_depth_range_unrestricted; + } + + /// Returns true if the device supports VK_EXT_shader_viewport_index_layer. + bool IsExtShaderViewportIndexLayerSupported() const { + return ext_shader_viewport_index_layer; + } + + /// Returns true if the device supports VK_EXT_transform_feedback. + bool IsExtTransformFeedbackSupported() const { + return ext_transform_feedback; + } + + /// Returns true if the device supports VK_EXT_custom_border_color. + bool IsExtCustomBorderColorSupported() const { + return ext_custom_border_color; + } + + /// Returns true if the device supports VK_EXT_extended_dynamic_state. + bool IsExtExtendedDynamicStateSupported() const { + return ext_extended_dynamic_state; + } + + /// Returns true if the device supports VK_EXT_shader_stencil_export. + bool IsExtShaderStencilExportSupported() const { + return ext_shader_stencil_export; + } + + /// Returns true when a known debugging tool is attached. + bool HasDebuggingToolAttached() const { + return has_renderdoc || has_nsight_graphics; + } + + /// Returns the vendor name reported from Vulkan. + std::string_view GetVendorName() const { + return vendor_name; + } + + /// Returns the list of available extensions. + const std::vector& GetAvailableExtensions() const { + return reported_extensions; + } + + /// Returns true if the setting for async shader compilation is enabled. + bool UseAsynchronousShaders() const { + return use_asynchronous_shaders; + } + +private: + /// Checks if the physical device is suitable. + void CheckSuitability() const; + + /// Loads extensions into a vector and stores available ones in this object. + std::vector LoadExtensions(); + + /// Sets up queue families. + void SetupFamilies(VkSurfaceKHR surface); + + /// Sets up device features. + void SetupFeatures(); + + /// Collects telemetry information from the device. + void CollectTelemetryParameters(); + + /// Collects information about attached tools. + void CollectToolingInfo(); + + /// Returns a list of queue initialization descriptors. + std::vector GetDeviceQueueCreateInfos() const; + + /// Returns true if ASTC textures are natively supported. + bool IsOptimalAstcSupported(const VkPhysicalDeviceFeatures& features) const; + + /// Returns true if the device natively supports blitting depth stencil images. + bool TestDepthStencilBlits() const; + + /// Returns true if a format is supported. + bool IsFormatSupported(VkFormat wanted_format, VkFormatFeatureFlags wanted_usage, + FormatType format_type) const; + + VkInstance instance; ///< Vulkan instance. + vk::DeviceDispatch dld; ///< Device function pointers. + vk::PhysicalDevice physical; ///< Physical device. + VkPhysicalDeviceProperties properties; ///< Device properties. + vk::Device logical; ///< Logical device. + vk::Queue graphics_queue; ///< Main graphics queue. + vk::Queue present_queue; ///< Main present queue. + u32 instance_version{}; ///< Vulkan onstance version. + u32 graphics_family{}; ///< Main graphics queue family index. + u32 present_family{}; ///< Main present queue family index. + VkDriverIdKHR driver_id{}; ///< Driver ID. + VkShaderStageFlags guest_warp_stages{}; ///< Stages where the guest warp size can be forced.ed + bool is_optimal_astc_supported{}; ///< Support for native ASTC. + bool is_float16_supported{}; ///< Support for float16 arithmetics. + bool is_warp_potentially_bigger{}; ///< Host warp size can be bigger than guest. + bool is_formatless_image_load_supported{}; ///< Support for shader image read without format. + bool is_blit_depth_stencil_supported{}; ///< Support for blitting from and to depth stencil. + bool nv_viewport_swizzle{}; ///< Support for VK_NV_viewport_swizzle. + bool khr_uniform_buffer_standard_layout{}; ///< Support for std430 on UBOs. + bool ext_index_type_uint8{}; ///< Support for VK_EXT_index_type_uint8. + bool ext_sampler_filter_minmax{}; ///< Support for VK_EXT_sampler_filter_minmax. + bool ext_depth_range_unrestricted{}; ///< Support for VK_EXT_depth_range_unrestricted. + bool ext_shader_viewport_index_layer{}; ///< Support for VK_EXT_shader_viewport_index_layer. + bool ext_tooling_info{}; ///< Support for VK_EXT_tooling_info. + bool ext_transform_feedback{}; ///< Support for VK_EXT_transform_feedback. + bool ext_custom_border_color{}; ///< Support for VK_EXT_custom_border_color. + bool ext_extended_dynamic_state{}; ///< Support for VK_EXT_extended_dynamic_state. + bool ext_robustness2{}; ///< Support for VK_EXT_robustness2. + bool ext_shader_stencil_export{}; ///< Support for VK_EXT_shader_stencil_export. + bool nv_device_diagnostics_config{}; ///< Support for VK_NV_device_diagnostics_config. + bool has_renderdoc{}; ///< Has RenderDoc attached + bool has_nsight_graphics{}; ///< Has Nsight Graphics attached + + // Asynchronous Graphics Pipeline setting + bool use_asynchronous_shaders{}; ///< Setting to use asynchronous shaders/graphics pipeline + + // Telemetry parameters + std::string vendor_name; ///< Device's driver name. + std::vector reported_extensions; ///< Reported Vulkan extensions. + + /// Format properties dictionary. + std::unordered_map format_properties; + + /// Nsight Aftermath GPU crash tracker + NsightAftermathTracker nsight_aftermath_tracker; +}; + +} // namespace Vulkan -- cgit v1.2.3 From d235cf393399c386a59b5e39d39bc9efb161aea0 Mon Sep 17 00:00:00 2001 From: ReinUsesLisp Date: Sat, 26 Dec 2020 01:26:52 -0300 Subject: renderer_vulkan/nsight_aftermath_tracker: Move to vulkan_common --- .../vulkan_common/nsight_aftermath_tracker.cpp | 212 +++++++++++++++++++++ .../vulkan_common/nsight_aftermath_tracker.h | 82 ++++++++ src/video_core/vulkan_common/vulkan_device.cpp | 7 +- src/video_core/vulkan_common/vulkan_device.h | 5 +- 4 files changed, 302 insertions(+), 4 deletions(-) create mode 100644 src/video_core/vulkan_common/nsight_aftermath_tracker.cpp create mode 100644 src/video_core/vulkan_common/nsight_aftermath_tracker.h (limited to 'src/video_core/vulkan_common') diff --git a/src/video_core/vulkan_common/nsight_aftermath_tracker.cpp b/src/video_core/vulkan_common/nsight_aftermath_tracker.cpp new file mode 100644 index 000000000..8d10ac29e --- /dev/null +++ b/src/video_core/vulkan_common/nsight_aftermath_tracker.cpp @@ -0,0 +1,212 @@ +// Copyright 2020 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#ifdef HAS_NSIGHT_AFTERMATH + +#include +#include +#include +#include +#include + +#include + +#define VK_NO_PROTOTYPES +#include + +#include +#include +#include +#include + +#include "common/common_paths.h" +#include "common/common_types.h" +#include "common/file_util.h" +#include "common/logging/log.h" +#include "common/scope_exit.h" + +#include "video_core/renderer_vulkan/nsight_aftermath_tracker.h" + +namespace Vulkan { + +static constexpr char AFTERMATH_LIB_NAME[] = "GFSDK_Aftermath_Lib.x64.dll"; + +NsightAftermathTracker::NsightAftermathTracker() { + if (!dl.Open(AFTERMATH_LIB_NAME)) { + LOG_ERROR(Render_Vulkan, "Failed to load Nsight Aftermath DLL"); + return; + } + if (!dl.GetSymbol("GFSDK_Aftermath_DisableGpuCrashDumps", + &GFSDK_Aftermath_DisableGpuCrashDumps) || + !dl.GetSymbol("GFSDK_Aftermath_EnableGpuCrashDumps", + &GFSDK_Aftermath_EnableGpuCrashDumps) || + !dl.GetSymbol("GFSDK_Aftermath_GetShaderDebugInfoIdentifier", + &GFSDK_Aftermath_GetShaderDebugInfoIdentifier) || + !dl.GetSymbol("GFSDK_Aftermath_GetShaderHashSpirv", &GFSDK_Aftermath_GetShaderHashSpirv) || + !dl.GetSymbol("GFSDK_Aftermath_GpuCrashDump_CreateDecoder", + &GFSDK_Aftermath_GpuCrashDump_CreateDecoder) || + !dl.GetSymbol("GFSDK_Aftermath_GpuCrashDump_DestroyDecoder", + &GFSDK_Aftermath_GpuCrashDump_DestroyDecoder) || + !dl.GetSymbol("GFSDK_Aftermath_GpuCrashDump_GenerateJSON", + &GFSDK_Aftermath_GpuCrashDump_GenerateJSON) || + !dl.GetSymbol("GFSDK_Aftermath_GpuCrashDump_GetJSON", + &GFSDK_Aftermath_GpuCrashDump_GetJSON)) { + LOG_ERROR(Render_Vulkan, "Failed to load Nsight Aftermath function pointers"); + return false; + } + dump_dir = Common::FS::GetUserPath(Common::FS::UserPath::LogDir) + "gpucrash"; + + void(Common::FS::DeleteDirRecursively(dump_dir)); + if (!Common::FS::CreateDir(dump_dir)) { + LOG_ERROR(Render_Vulkan, "Failed to create Nsight Aftermath dump directory"); + return; + } + if (!GFSDK_Aftermath_SUCCEED(GFSDK_Aftermath_EnableGpuCrashDumps( + GFSDK_Aftermath_Version_API, GFSDK_Aftermath_GpuCrashDumpWatchedApiFlags_Vulkan, + GFSDK_Aftermath_GpuCrashDumpFeatureFlags_Default, GpuCrashDumpCallback, + ShaderDebugInfoCallback, CrashDumpDescriptionCallback, this))) { + LOG_ERROR(Render_Vulkan, "GFSDK_Aftermath_EnableGpuCrashDumps failed"); + return; + } + LOG_INFO(Render_Vulkan, "Nsight Aftermath dump directory is \"{}\"", dump_dir); + initialized = true; +} + +NsightAftermathTracker::~NsightAftermathTracker() { + if (initialized) { + (void)GFSDK_Aftermath_DisableGpuCrashDumps(); + } +} + +void NsightAftermathTracker::SaveShader(const std::vector& spirv) const { + if (!initialized) { + return; + } + + std::vector spirv_copy = spirv; + GFSDK_Aftermath_SpirvCode shader; + shader.pData = spirv_copy.data(); + shader.size = static_cast(spirv_copy.size() * 4); + + std::scoped_lock lock{mutex}; + + GFSDK_Aftermath_ShaderHash hash; + if (!GFSDK_Aftermath_SUCCEED( + GFSDK_Aftermath_GetShaderHashSpirv(GFSDK_Aftermath_Version_API, &shader, &hash))) { + LOG_ERROR(Render_Vulkan, "Failed to hash SPIR-V module"); + return; + } + + Common::FS::IOFile file(fmt::format("{}/source_{:016x}.spv", dump_dir, hash.hash), "wb"); + if (!file.IsOpen()) { + LOG_ERROR(Render_Vulkan, "Failed to dump SPIR-V module with hash={:016x}", hash.hash); + return; + } + if (file.WriteArray(spirv.data(), spirv.size()) != spirv.size()) { + LOG_ERROR(Render_Vulkan, "Failed to write SPIR-V module with hash={:016x}", hash.hash); + return; + } +} + +void NsightAftermathTracker::OnGpuCrashDumpCallback(const void* gpu_crash_dump, + u32 gpu_crash_dump_size) { + std::scoped_lock lock{mutex}; + + LOG_CRITICAL(Render_Vulkan, "called"); + + GFSDK_Aftermath_GpuCrashDump_Decoder decoder; + if (!GFSDK_Aftermath_SUCCEED(GFSDK_Aftermath_GpuCrashDump_CreateDecoder( + GFSDK_Aftermath_Version_API, gpu_crash_dump, gpu_crash_dump_size, &decoder))) { + LOG_ERROR(Render_Vulkan, "Failed to create decoder"); + return; + } + SCOPE_EXIT({ GFSDK_Aftermath_GpuCrashDump_DestroyDecoder(decoder); }); + + u32 json_size = 0; + if (!GFSDK_Aftermath_SUCCEED(GFSDK_Aftermath_GpuCrashDump_GenerateJSON( + decoder, GFSDK_Aftermath_GpuCrashDumpDecoderFlags_ALL_INFO, + GFSDK_Aftermath_GpuCrashDumpFormatterFlags_NONE, nullptr, nullptr, nullptr, nullptr, + this, &json_size))) { + LOG_ERROR(Render_Vulkan, "Failed to generate JSON"); + return; + } + std::vector json(json_size); + if (!GFSDK_Aftermath_SUCCEED( + GFSDK_Aftermath_GpuCrashDump_GetJSON(decoder, json_size, json.data()))) { + LOG_ERROR(Render_Vulkan, "Failed to query JSON"); + return; + } + + const std::string base_name = [this] { + const int id = dump_id++; + if (id == 0) { + return fmt::format("{}/crash.nv-gpudmp", dump_dir); + } else { + return fmt::format("{}/crash_{}.nv-gpudmp", dump_dir, id); + } + }(); + + std::string_view dump_view(static_cast(gpu_crash_dump), gpu_crash_dump_size); + if (Common::FS::WriteStringToFile(false, base_name, dump_view) != gpu_crash_dump_size) { + LOG_ERROR(Render_Vulkan, "Failed to write dump file"); + return; + } + const std::string_view json_view(json.data(), json.size()); + if (Common::FS::WriteStringToFile(true, base_name + ".json", json_view) != json.size()) { + LOG_ERROR(Render_Vulkan, "Failed to write JSON"); + return; + } +} + +void NsightAftermathTracker::OnShaderDebugInfoCallback(const void* shader_debug_info, + u32 shader_debug_info_size) { + std::scoped_lock lock{mutex}; + + GFSDK_Aftermath_ShaderDebugInfoIdentifier identifier; + if (!GFSDK_Aftermath_SUCCEED(GFSDK_Aftermath_GetShaderDebugInfoIdentifier( + GFSDK_Aftermath_Version_API, shader_debug_info, shader_debug_info_size, &identifier))) { + LOG_ERROR(Render_Vulkan, "GFSDK_Aftermath_GetShaderDebugInfoIdentifier failed"); + return; + } + + const std::string path = + fmt::format("{}/shader_{:016x}{:016x}.nvdbg", dump_dir, identifier.id[0], identifier.id[1]); + Common::FS::IOFile file(path, "wb"); + if (!file.IsOpen()) { + LOG_ERROR(Render_Vulkan, "Failed to create file {}", path); + return; + } + if (file.WriteBytes(static_cast(shader_debug_info), shader_debug_info_size) != + shader_debug_info_size) { + LOG_ERROR(Render_Vulkan, "Failed to write file {}", path); + return; + } +} + +void NsightAftermathTracker::OnCrashDumpDescriptionCallback( + PFN_GFSDK_Aftermath_AddGpuCrashDumpDescription add_description) { + add_description(GFSDK_Aftermath_GpuCrashDumpDescriptionKey_ApplicationName, "yuzu"); +} + +void NsightAftermathTracker::GpuCrashDumpCallback(const void* gpu_crash_dump, + u32 gpu_crash_dump_size, void* user_data) { + static_cast(user_data)->OnGpuCrashDumpCallback(gpu_crash_dump, + gpu_crash_dump_size); +} + +void NsightAftermathTracker::ShaderDebugInfoCallback(const void* shader_debug_info, + u32 shader_debug_info_size, void* user_data) { + static_cast(user_data)->OnShaderDebugInfoCallback( + shader_debug_info, shader_debug_info_size); +} + +void NsightAftermathTracker::CrashDumpDescriptionCallback( + PFN_GFSDK_Aftermath_AddGpuCrashDumpDescription add_description, void* user_data) { + static_cast(user_data)->OnCrashDumpDescriptionCallback( + add_description); +} + +} // namespace Vulkan + +#endif // HAS_NSIGHT_AFTERMATH diff --git a/src/video_core/vulkan_common/nsight_aftermath_tracker.h b/src/video_core/vulkan_common/nsight_aftermath_tracker.h new file mode 100644 index 000000000..cee3847fb --- /dev/null +++ b/src/video_core/vulkan_common/nsight_aftermath_tracker.h @@ -0,0 +1,82 @@ +// Copyright 2020 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include + +#define VK_NO_PROTOTYPES +#include + +#ifdef HAS_NSIGHT_AFTERMATH +#include +#include +#include +#endif + +#include "common/common_types.h" +#include "common/dynamic_library.h" + +namespace Vulkan { + +class NsightAftermathTracker { +public: + NsightAftermathTracker(); + ~NsightAftermathTracker(); + + NsightAftermathTracker(const NsightAftermathTracker&) = delete; + NsightAftermathTracker& operator=(const NsightAftermathTracker&) = delete; + + // Delete move semantics because Aftermath initialization uses a pointer to this. + NsightAftermathTracker(NsightAftermathTracker&&) = delete; + NsightAftermathTracker& operator=(NsightAftermathTracker&&) = delete; + + void SaveShader(const std::vector& spirv) const; + +private: +#ifdef HAS_NSIGHT_AFTERMATH + static void GpuCrashDumpCallback(const void* gpu_crash_dump, u32 gpu_crash_dump_size, + void* user_data); + + static void ShaderDebugInfoCallback(const void* shader_debug_info, u32 shader_debug_info_size, + void* user_data); + + static void CrashDumpDescriptionCallback( + PFN_GFSDK_Aftermath_AddGpuCrashDumpDescription add_description, void* user_data); + + void OnGpuCrashDumpCallback(const void* gpu_crash_dump, u32 gpu_crash_dump_size); + + void OnShaderDebugInfoCallback(const void* shader_debug_info, u32 shader_debug_info_size); + + void OnCrashDumpDescriptionCallback( + PFN_GFSDK_Aftermath_AddGpuCrashDumpDescription add_description); + + mutable std::mutex mutex; + + std::string dump_dir; + int dump_id = 0; + + bool initialized = false; + + Common::DynamicLibrary dl; + PFN_GFSDK_Aftermath_DisableGpuCrashDumps GFSDK_Aftermath_DisableGpuCrashDumps; + PFN_GFSDK_Aftermath_EnableGpuCrashDumps GFSDK_Aftermath_EnableGpuCrashDumps; + PFN_GFSDK_Aftermath_GetShaderDebugInfoIdentifier GFSDK_Aftermath_GetShaderDebugInfoIdentifier; + PFN_GFSDK_Aftermath_GetShaderHashSpirv GFSDK_Aftermath_GetShaderHashSpirv; + PFN_GFSDK_Aftermath_GpuCrashDump_CreateDecoder GFSDK_Aftermath_GpuCrashDump_CreateDecoder; + PFN_GFSDK_Aftermath_GpuCrashDump_DestroyDecoder GFSDK_Aftermath_GpuCrashDump_DestroyDecoder; + PFN_GFSDK_Aftermath_GpuCrashDump_GenerateJSON GFSDK_Aftermath_GpuCrashDump_GenerateJSON; + PFN_GFSDK_Aftermath_GpuCrashDump_GetJSON GFSDK_Aftermath_GpuCrashDump_GetJSON; +#endif +}; + +#ifndef HAS_NSIGHT_AFTERMATH +inline NsightAftermathTracker::NsightAftermathTracker() = default; +inline NsightAftermathTracker::~NsightAftermathTracker() = default; +inline void NsightAftermathTracker::SaveShader(const std::vector&) const {} +#endif + +} // namespace Vulkan diff --git a/src/video_core/vulkan_common/vulkan_device.cpp b/src/video_core/vulkan_common/vulkan_device.cpp index 67183eed8..f300f22c9 100644 --- a/src/video_core/vulkan_common/vulkan_device.cpp +++ b/src/video_core/vulkan_common/vulkan_device.cpp @@ -13,6 +13,7 @@ #include "common/assert.h" #include "core/settings.h" +#include "video_core/vulkan_common/nsight_aftermath_tracker.h" #include "video_core/vulkan_common/vulkan_device.h" #include "video_core/vulkan_common/vulkan_wrapper.h" @@ -412,7 +413,7 @@ Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR VkDeviceDiagnosticsConfigCreateInfoNV diagnostics_nv; if (nv_device_diagnostics_config) { - nsight_aftermath_tracker.Initialize(); + nsight_aftermath_tracker = std::make_unique(); diagnostics_nv = { .sType = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV, @@ -491,7 +492,9 @@ void Device::ReportLoss() const { } void Device::SaveShader(const std::vector& spirv) const { - nsight_aftermath_tracker.SaveShader(spirv); + if (nsight_aftermath_tracker) { + nsight_aftermath_tracker->SaveShader(spirv); + } } bool Device::IsOptimalAstcSupported(const VkPhysicalDeviceFeatures& features) const { diff --git a/src/video_core/vulkan_common/vulkan_device.h b/src/video_core/vulkan_common/vulkan_device.h index b2651e049..a973c3ce4 100644 --- a/src/video_core/vulkan_common/vulkan_device.h +++ b/src/video_core/vulkan_common/vulkan_device.h @@ -10,11 +10,12 @@ #include #include "common/common_types.h" -#include "video_core/renderer_vulkan/nsight_aftermath_tracker.h" #include "video_core/vulkan_common/vulkan_wrapper.h" namespace Vulkan { +class NsightAftermathTracker; + /// Format usage descriptor. enum class FormatType { Linear, Optimal, Buffer }; @@ -300,7 +301,7 @@ private: std::unordered_map format_properties; /// Nsight Aftermath GPU crash tracker - NsightAftermathTracker nsight_aftermath_tracker; + std::unique_ptr nsight_aftermath_tracker; }; } // namespace Vulkan -- cgit v1.2.3 From ac1e4734c2998e764e4116592510690ee2e8af11 Mon Sep 17 00:00:00 2001 From: ReinUsesLisp Date: Sat, 26 Dec 2020 01:32:46 -0300 Subject: vulkan_device: Allow creating a device without surface --- src/video_core/vulkan_common/vulkan_device.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'src/video_core/vulkan_common') diff --git a/src/video_core/vulkan_common/vulkan_device.cpp b/src/video_core/vulkan_common/vulkan_device.cpp index f300f22c9..75173324e 100644 --- a/src/video_core/vulkan_common/vulkan_device.cpp +++ b/src/video_core/vulkan_common/vulkan_device.cpp @@ -775,7 +775,7 @@ void Device::SetupFamilies(VkSurfaceKHR surface) { std::optional graphics; std::optional present; for (u32 index = 0; index < static_cast(queue_family_properties.size()); ++index) { - if (graphics && present) { + if (graphics && (present || !surface)) { break; } const VkQueueFamilyProperties& queue_family = queue_family_properties[index]; @@ -785,7 +785,7 @@ void Device::SetupFamilies(VkSurfaceKHR surface) { if (queue_family.queueFlags & VK_QUEUE_GRAPHICS_BIT) { graphics = index; } - if (physical.GetSurfaceSupportKHR(index, surface)) { + if (surface && physical.GetSurfaceSupportKHR(index, surface)) { present = index; } } @@ -793,7 +793,7 @@ void Device::SetupFamilies(VkSurfaceKHR surface) { LOG_ERROR(Render_Vulkan, "Device lacks a graphics queue"); throw vk::Exception(VK_ERROR_FEATURE_NOT_PRESENT); } - if (!present) { + if (surface && !present) { LOG_ERROR(Render_Vulkan, "Device lacks a present queue"); throw vk::Exception(VK_ERROR_FEATURE_NOT_PRESENT); } -- cgit v1.2.3 From 21199cb9657096ee546413164110e79f33def6a8 Mon Sep 17 00:00:00 2001 From: MerryMage Date: Thu, 7 Jan 2021 17:37:47 +0000 Subject: vulkan_library: Common::DynamicLibrary::Open is [[nodiscard]] Ignore the return value on __APPLE__ systems as well --- src/video_core/vulkan_common/vulkan_library.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src/video_core/vulkan_common') diff --git a/src/video_core/vulkan_common/vulkan_library.cpp b/src/video_core/vulkan_common/vulkan_library.cpp index 27c958221..557871d81 100644 --- a/src/video_core/vulkan_common/vulkan_library.cpp +++ b/src/video_core/vulkan_common/vulkan_library.cpp @@ -20,7 +20,7 @@ Common::DynamicLibrary OpenLibrary() { // Use the libvulkan.dylib from the application bundle. const std::string filename = Common::FS::GetBundleDirectory() + "/Contents/Frameworks/libvulkan.dylib"; - library.Open(filename.c_str()); + void(library.Open(filename.c_str())); } #else std::string filename = Common::DynamicLibrary::GetVersionedFilename("vulkan", 1); -- cgit v1.2.3