summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/common/settings.h2
-rw-r--r--src/core/CMakeLists.txt1
-rw-r--r--src/core/hle/kernel/k_code_memory.cpp29
-rw-r--r--src/core/hle/kernel/k_code_memory.h6
-rw-r--r--src/core/hle/kernel/k_memory_manager.cpp8
-rw-r--r--src/core/hle/kernel/k_page_group.cpp121
-rw-r--r--src/core/hle/kernel/k_page_group.h163
-rw-r--r--src/core/hle/kernel/k_page_table.cpp142
-rw-r--r--src/core/hle/kernel/k_page_table.h9
-rw-r--r--src/core/hle/kernel/k_shared_memory.cpp19
-rw-r--r--src/core/hle/kernel/memory_types.h3
-rw-r--r--src/core/hle/kernel/svc.cpp2
-rw-r--r--src/input_common/drivers/tas_input.cpp12
-rw-r--r--src/input_common/drivers/tas_input.h2
-rw-r--r--src/video_core/renderer_vulkan/renderer_vulkan.cpp21
-rw-r--r--src/video_core/renderer_vulkan/vk_scheduler.cpp5
-rw-r--r--src/video_core/renderer_vulkan/vk_scheduler.h7
-rw-r--r--src/video_core/renderer_vulkan/vk_turbo_mode.cpp17
-rw-r--r--src/video_core/renderer_vulkan/vk_turbo_mode.h9
-rw-r--r--src/video_core/vulkan_common/vulkan_device.cpp1390
-rw-r--r--src/video_core/vulkan_common/vulkan_device.h442
-rw-r--r--src/video_core/vulkan_common/vulkan_wrapper.cpp10
-rw-r--r--src/video_core/vulkan_common/vulkan_wrapper.h12
-rw-r--r--src/yuzu/Info.plist2
-rw-r--r--src/yuzu/bootmanager.cpp19
-rw-r--r--src/yuzu/bootmanager.h4
-rw-r--r--src/yuzu/configuration/configure_graphics_advanced.cpp5
-rw-r--r--src/yuzu/main.cpp37
-rw-r--r--src/yuzu/main.h4
29 files changed, 1127 insertions, 1376 deletions
diff --git a/src/common/settings.h b/src/common/settings.h
index 9eb3711ca..a457e3f23 100644
--- a/src/common/settings.h
+++ b/src/common/settings.h
@@ -415,7 +415,7 @@ struct Values {
415 // Renderer 415 // Renderer
416 SwitchableSetting<RendererBackend, true> renderer_backend{ 416 SwitchableSetting<RendererBackend, true> renderer_backend{
417 RendererBackend::Vulkan, RendererBackend::OpenGL, RendererBackend::Null, "backend"}; 417 RendererBackend::Vulkan, RendererBackend::OpenGL, RendererBackend::Null, "backend"};
418 SwitchableSetting<bool> renderer_force_max_clock{true, "force_max_clock"}; 418 SwitchableSetting<bool> renderer_force_max_clock{false, "force_max_clock"};
419 Setting<bool> renderer_debug{false, "debug"}; 419 Setting<bool> renderer_debug{false, "debug"};
420 Setting<bool> renderer_shader_feedback{false, "shader_feedback"}; 420 Setting<bool> renderer_shader_feedback{false, "shader_feedback"};
421 Setting<bool> enable_nsight_aftermath{false, "nsight_aftermath"}; 421 Setting<bool> enable_nsight_aftermath{false, "nsight_aftermath"};
diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt
index 0252c8c31..5afdeb5ff 100644
--- a/src/core/CMakeLists.txt
+++ b/src/core/CMakeLists.txt
@@ -226,6 +226,7 @@ add_library(core STATIC
226 hle/kernel/k_page_buffer.h 226 hle/kernel/k_page_buffer.h
227 hle/kernel/k_page_heap.cpp 227 hle/kernel/k_page_heap.cpp
228 hle/kernel/k_page_heap.h 228 hle/kernel/k_page_heap.h
229 hle/kernel/k_page_group.cpp
229 hle/kernel/k_page_group.h 230 hle/kernel/k_page_group.h
230 hle/kernel/k_page_table.cpp 231 hle/kernel/k_page_table.cpp
231 hle/kernel/k_page_table.h 232 hle/kernel/k_page_table.h
diff --git a/src/core/hle/kernel/k_code_memory.cpp b/src/core/hle/kernel/k_code_memory.cpp
index 4b1c134d4..d9da1e600 100644
--- a/src/core/hle/kernel/k_code_memory.cpp
+++ b/src/core/hle/kernel/k_code_memory.cpp
@@ -27,13 +27,13 @@ Result KCodeMemory::Initialize(Core::DeviceMemory& device_memory, VAddr addr, si
27 auto& page_table = m_owner->PageTable(); 27 auto& page_table = m_owner->PageTable();
28 28
29 // Construct the page group. 29 // Construct the page group.
30 m_page_group = {}; 30 m_page_group.emplace(kernel, page_table.GetBlockInfoManager());
31 31
32 // Lock the memory. 32 // Lock the memory.
33 R_TRY(page_table.LockForCodeMemory(&m_page_group, addr, size)) 33 R_TRY(page_table.LockForCodeMemory(std::addressof(*m_page_group), addr, size))
34 34
35 // Clear the memory. 35 // Clear the memory.
36 for (const auto& block : m_page_group.Nodes()) { 36 for (const auto& block : *m_page_group) {
37 std::memset(device_memory.GetPointer<void>(block.GetAddress()), 0xFF, block.GetSize()); 37 std::memset(device_memory.GetPointer<void>(block.GetAddress()), 0xFF, block.GetSize());
38 } 38 }
39 39
@@ -51,12 +51,13 @@ Result KCodeMemory::Initialize(Core::DeviceMemory& device_memory, VAddr addr, si
51void KCodeMemory::Finalize() { 51void KCodeMemory::Finalize() {
52 // Unlock. 52 // Unlock.
53 if (!m_is_mapped && !m_is_owner_mapped) { 53 if (!m_is_mapped && !m_is_owner_mapped) {
54 const size_t size = m_page_group.GetNumPages() * PageSize; 54 const size_t size = m_page_group->GetNumPages() * PageSize;
55 m_owner->PageTable().UnlockForCodeMemory(m_address, size, m_page_group); 55 m_owner->PageTable().UnlockForCodeMemory(m_address, size, *m_page_group);
56 } 56 }
57 57
58 // Close the page group. 58 // Close the page group.
59 m_page_group = {}; 59 m_page_group->Close();
60 m_page_group->Finalize();
60 61
61 // Close our reference to our owner. 62 // Close our reference to our owner.
62 m_owner->Close(); 63 m_owner->Close();
@@ -64,7 +65,7 @@ void KCodeMemory::Finalize() {
64 65
65Result KCodeMemory::Map(VAddr address, size_t size) { 66Result KCodeMemory::Map(VAddr address, size_t size) {
66 // Validate the size. 67 // Validate the size.
67 R_UNLESS(m_page_group.GetNumPages() == Common::DivideUp(size, PageSize), ResultInvalidSize); 68 R_UNLESS(m_page_group->GetNumPages() == Common::DivideUp(size, PageSize), ResultInvalidSize);
68 69
69 // Lock ourselves. 70 // Lock ourselves.
70 KScopedLightLock lk(m_lock); 71 KScopedLightLock lk(m_lock);
@@ -74,7 +75,7 @@ Result KCodeMemory::Map(VAddr address, size_t size) {
74 75
75 // Map the memory. 76 // Map the memory.
76 R_TRY(kernel.CurrentProcess()->PageTable().MapPages( 77 R_TRY(kernel.CurrentProcess()->PageTable().MapPages(
77 address, m_page_group, KMemoryState::CodeOut, KMemoryPermission::UserReadWrite)); 78 address, *m_page_group, KMemoryState::CodeOut, KMemoryPermission::UserReadWrite));
78 79
79 // Mark ourselves as mapped. 80 // Mark ourselves as mapped.
80 m_is_mapped = true; 81 m_is_mapped = true;
@@ -84,13 +85,13 @@ Result KCodeMemory::Map(VAddr address, size_t size) {
84 85
85Result KCodeMemory::Unmap(VAddr address, size_t size) { 86Result KCodeMemory::Unmap(VAddr address, size_t size) {
86 // Validate the size. 87 // Validate the size.
87 R_UNLESS(m_page_group.GetNumPages() == Common::DivideUp(size, PageSize), ResultInvalidSize); 88 R_UNLESS(m_page_group->GetNumPages() == Common::DivideUp(size, PageSize), ResultInvalidSize);
88 89
89 // Lock ourselves. 90 // Lock ourselves.
90 KScopedLightLock lk(m_lock); 91 KScopedLightLock lk(m_lock);
91 92
92 // Unmap the memory. 93 // Unmap the memory.
93 R_TRY(kernel.CurrentProcess()->PageTable().UnmapPages(address, m_page_group, 94 R_TRY(kernel.CurrentProcess()->PageTable().UnmapPages(address, *m_page_group,
94 KMemoryState::CodeOut)); 95 KMemoryState::CodeOut));
95 96
96 // Mark ourselves as unmapped. 97 // Mark ourselves as unmapped.
@@ -101,7 +102,7 @@ Result KCodeMemory::Unmap(VAddr address, size_t size) {
101 102
102Result KCodeMemory::MapToOwner(VAddr address, size_t size, Svc::MemoryPermission perm) { 103Result KCodeMemory::MapToOwner(VAddr address, size_t size, Svc::MemoryPermission perm) {
103 // Validate the size. 104 // Validate the size.
104 R_UNLESS(m_page_group.GetNumPages() == Common::DivideUp(size, PageSize), ResultInvalidSize); 105 R_UNLESS(m_page_group->GetNumPages() == Common::DivideUp(size, PageSize), ResultInvalidSize);
105 106
106 // Lock ourselves. 107 // Lock ourselves.
107 KScopedLightLock lk(m_lock); 108 KScopedLightLock lk(m_lock);
@@ -125,7 +126,7 @@ Result KCodeMemory::MapToOwner(VAddr address, size_t size, Svc::MemoryPermission
125 126
126 // Map the memory. 127 // Map the memory.
127 R_TRY( 128 R_TRY(
128 m_owner->PageTable().MapPages(address, m_page_group, KMemoryState::GeneratedCode, k_perm)); 129 m_owner->PageTable().MapPages(address, *m_page_group, KMemoryState::GeneratedCode, k_perm));
129 130
130 // Mark ourselves as mapped. 131 // Mark ourselves as mapped.
131 m_is_owner_mapped = true; 132 m_is_owner_mapped = true;
@@ -135,13 +136,13 @@ Result KCodeMemory::MapToOwner(VAddr address, size_t size, Svc::MemoryPermission
135 136
136Result KCodeMemory::UnmapFromOwner(VAddr address, size_t size) { 137Result KCodeMemory::UnmapFromOwner(VAddr address, size_t size) {
137 // Validate the size. 138 // Validate the size.
138 R_UNLESS(m_page_group.GetNumPages() == Common::DivideUp(size, PageSize), ResultInvalidSize); 139 R_UNLESS(m_page_group->GetNumPages() == Common::DivideUp(size, PageSize), ResultInvalidSize);
139 140
140 // Lock ourselves. 141 // Lock ourselves.
141 KScopedLightLock lk(m_lock); 142 KScopedLightLock lk(m_lock);
142 143
143 // Unmap the memory. 144 // Unmap the memory.
144 R_TRY(m_owner->PageTable().UnmapPages(address, m_page_group, KMemoryState::GeneratedCode)); 145 R_TRY(m_owner->PageTable().UnmapPages(address, *m_page_group, KMemoryState::GeneratedCode));
145 146
146 // Mark ourselves as unmapped. 147 // Mark ourselves as unmapped.
147 m_is_owner_mapped = false; 148 m_is_owner_mapped = false;
diff --git a/src/core/hle/kernel/k_code_memory.h b/src/core/hle/kernel/k_code_memory.h
index 2e7e1436a..5b260b385 100644
--- a/src/core/hle/kernel/k_code_memory.h
+++ b/src/core/hle/kernel/k_code_memory.h
@@ -3,6 +3,8 @@
3 3
4#pragma once 4#pragma once
5 5
6#include <optional>
7
6#include "common/common_types.h" 8#include "common/common_types.h"
7#include "core/device_memory.h" 9#include "core/device_memory.h"
8#include "core/hle/kernel/k_auto_object.h" 10#include "core/hle/kernel/k_auto_object.h"
@@ -49,11 +51,11 @@ public:
49 return m_address; 51 return m_address;
50 } 52 }
51 size_t GetSize() const { 53 size_t GetSize() const {
52 return m_is_initialized ? m_page_group.GetNumPages() * PageSize : 0; 54 return m_is_initialized ? m_page_group->GetNumPages() * PageSize : 0;
53 } 55 }
54 56
55private: 57private:
56 KPageGroup m_page_group{}; 58 std::optional<KPageGroup> m_page_group{};
57 KProcess* m_owner{}; 59 KProcess* m_owner{};
58 VAddr m_address{}; 60 VAddr m_address{};
59 KLightLock m_lock; 61 KLightLock m_lock;
diff --git a/src/core/hle/kernel/k_memory_manager.cpp b/src/core/hle/kernel/k_memory_manager.cpp
index bd33571da..cd6ea388e 100644
--- a/src/core/hle/kernel/k_memory_manager.cpp
+++ b/src/core/hle/kernel/k_memory_manager.cpp
@@ -223,7 +223,7 @@ Result KMemoryManager::AllocatePageGroupImpl(KPageGroup* out, size_t num_pages,
223 223
224 // Ensure that we don't leave anything un-freed. 224 // Ensure that we don't leave anything un-freed.
225 ON_RESULT_FAILURE { 225 ON_RESULT_FAILURE {
226 for (const auto& it : out->Nodes()) { 226 for (const auto& it : *out) {
227 auto& manager = this->GetManager(it.GetAddress()); 227 auto& manager = this->GetManager(it.GetAddress());
228 const size_t node_num_pages = std::min<u64>( 228 const size_t node_num_pages = std::min<u64>(
229 it.GetNumPages(), (manager.GetEndAddress() - it.GetAddress()) / PageSize); 229 it.GetNumPages(), (manager.GetEndAddress() - it.GetAddress()) / PageSize);
@@ -285,7 +285,7 @@ Result KMemoryManager::AllocateAndOpen(KPageGroup* out, size_t num_pages, u32 op
285 m_has_optimized_process[static_cast<size_t>(pool)], true)); 285 m_has_optimized_process[static_cast<size_t>(pool)], true));
286 286
287 // Open the first reference to the pages. 287 // Open the first reference to the pages.
288 for (const auto& block : out->Nodes()) { 288 for (const auto& block : *out) {
289 PAddr cur_address = block.GetAddress(); 289 PAddr cur_address = block.GetAddress();
290 size_t remaining_pages = block.GetNumPages(); 290 size_t remaining_pages = block.GetNumPages();
291 while (remaining_pages > 0) { 291 while (remaining_pages > 0) {
@@ -335,7 +335,7 @@ Result KMemoryManager::AllocateForProcess(KPageGroup* out, size_t num_pages, u32
335 // Perform optimized memory tracking, if we should. 335 // Perform optimized memory tracking, if we should.
336 if (optimized) { 336 if (optimized) {
337 // Iterate over the allocated blocks. 337 // Iterate over the allocated blocks.
338 for (const auto& block : out->Nodes()) { 338 for (const auto& block : *out) {
339 // Get the block extents. 339 // Get the block extents.
340 const PAddr block_address = block.GetAddress(); 340 const PAddr block_address = block.GetAddress();
341 const size_t block_pages = block.GetNumPages(); 341 const size_t block_pages = block.GetNumPages();
@@ -391,7 +391,7 @@ Result KMemoryManager::AllocateForProcess(KPageGroup* out, size_t num_pages, u32
391 } 391 }
392 } else { 392 } else {
393 // Set all the allocated memory. 393 // Set all the allocated memory.
394 for (const auto& block : out->Nodes()) { 394 for (const auto& block : *out) {
395 std::memset(m_system.DeviceMemory().GetPointer<void>(block.GetAddress()), fill_pattern, 395 std::memset(m_system.DeviceMemory().GetPointer<void>(block.GetAddress()), fill_pattern,
396 block.GetSize()); 396 block.GetSize());
397 } 397 }
diff --git a/src/core/hle/kernel/k_page_group.cpp b/src/core/hle/kernel/k_page_group.cpp
new file mode 100644
index 000000000..d8c644a33
--- /dev/null
+++ b/src/core/hle/kernel/k_page_group.cpp
@@ -0,0 +1,121 @@
1// SPDX-FileCopyrightText: Copyright 2022 yuzu Emulator Project
2// SPDX-License-Identifier: GPL-2.0-or-later
3
4#include "core/hle/kernel/k_dynamic_resource_manager.h"
5#include "core/hle/kernel/k_memory_manager.h"
6#include "core/hle/kernel/k_page_group.h"
7#include "core/hle/kernel/kernel.h"
8#include "core/hle/kernel/svc_results.h"
9
10namespace Kernel {
11
12void KPageGroup::Finalize() {
13 KBlockInfo* cur = m_first_block;
14 while (cur != nullptr) {
15 KBlockInfo* next = cur->GetNext();
16 m_manager->Free(cur);
17 cur = next;
18 }
19
20 m_first_block = nullptr;
21 m_last_block = nullptr;
22}
23
24void KPageGroup::CloseAndReset() {
25 auto& mm = m_kernel.MemoryManager();
26
27 KBlockInfo* cur = m_first_block;
28 while (cur != nullptr) {
29 KBlockInfo* next = cur->GetNext();
30 mm.Close(cur->GetAddress(), cur->GetNumPages());
31 m_manager->Free(cur);
32 cur = next;
33 }
34
35 m_first_block = nullptr;
36 m_last_block = nullptr;
37}
38
39size_t KPageGroup::GetNumPages() const {
40 size_t num_pages = 0;
41
42 for (const auto& it : *this) {
43 num_pages += it.GetNumPages();
44 }
45
46 return num_pages;
47}
48
49Result KPageGroup::AddBlock(KPhysicalAddress addr, size_t num_pages) {
50 // Succeed immediately if we're adding no pages.
51 R_SUCCEED_IF(num_pages == 0);
52
53 // Check for overflow.
54 ASSERT(addr < addr + num_pages * PageSize);
55
56 // Try to just append to the last block.
57 if (m_last_block != nullptr) {
58 R_SUCCEED_IF(m_last_block->TryConcatenate(addr, num_pages));
59 }
60
61 // Allocate a new block.
62 KBlockInfo* new_block = m_manager->Allocate();
63 R_UNLESS(new_block != nullptr, ResultOutOfResource);
64
65 // Initialize the block.
66 new_block->Initialize(addr, num_pages);
67
68 // Add the block to our list.
69 if (m_last_block != nullptr) {
70 m_last_block->SetNext(new_block);
71 } else {
72 m_first_block = new_block;
73 }
74 m_last_block = new_block;
75
76 R_SUCCEED();
77}
78
79void KPageGroup::Open() const {
80 auto& mm = m_kernel.MemoryManager();
81
82 for (const auto& it : *this) {
83 mm.Open(it.GetAddress(), it.GetNumPages());
84 }
85}
86
87void KPageGroup::OpenFirst() const {
88 auto& mm = m_kernel.MemoryManager();
89
90 for (const auto& it : *this) {
91 mm.OpenFirst(it.GetAddress(), it.GetNumPages());
92 }
93}
94
95void KPageGroup::Close() const {
96 auto& mm = m_kernel.MemoryManager();
97
98 for (const auto& it : *this) {
99 mm.Close(it.GetAddress(), it.GetNumPages());
100 }
101}
102
103bool KPageGroup::IsEquivalentTo(const KPageGroup& rhs) const {
104 auto lit = this->begin();
105 auto rit = rhs.begin();
106 auto lend = this->end();
107 auto rend = rhs.end();
108
109 while (lit != lend && rit != rend) {
110 if (*lit != *rit) {
111 return false;
112 }
113
114 ++lit;
115 ++rit;
116 }
117
118 return lit == lend && rit == rend;
119}
120
121} // namespace Kernel
diff --git a/src/core/hle/kernel/k_page_group.h b/src/core/hle/kernel/k_page_group.h
index 316f172f2..c07f17663 100644
--- a/src/core/hle/kernel/k_page_group.h
+++ b/src/core/hle/kernel/k_page_group.h
@@ -1,4 +1,4 @@
1// SPDX-FileCopyrightText: Copyright 2020 yuzu Emulator Project 1// SPDX-FileCopyrightText: Copyright 2022 yuzu Emulator Project
2// SPDX-License-Identifier: GPL-2.0-or-later 2// SPDX-License-Identifier: GPL-2.0-or-later
3 3
4#pragma once 4#pragma once
@@ -13,24 +13,23 @@
13 13
14namespace Kernel { 14namespace Kernel {
15 15
16class KBlockInfoManager;
17class KernelCore;
16class KPageGroup; 18class KPageGroup;
17 19
18class KBlockInfo { 20class KBlockInfo {
19private:
20 friend class KPageGroup;
21
22public: 21public:
23 constexpr KBlockInfo() = default; 22 constexpr explicit KBlockInfo() : m_next(nullptr) {}
24 23
25 constexpr void Initialize(PAddr addr, size_t np) { 24 constexpr void Initialize(KPhysicalAddress addr, size_t np) {
26 ASSERT(Common::IsAligned(addr, PageSize)); 25 ASSERT(Common::IsAligned(addr, PageSize));
27 ASSERT(static_cast<u32>(np) == np); 26 ASSERT(static_cast<u32>(np) == np);
28 27
29 m_page_index = static_cast<u32>(addr) / PageSize; 28 m_page_index = static_cast<u32>(addr / PageSize);
30 m_num_pages = static_cast<u32>(np); 29 m_num_pages = static_cast<u32>(np);
31 } 30 }
32 31
33 constexpr PAddr GetAddress() const { 32 constexpr KPhysicalAddress GetAddress() const {
34 return m_page_index * PageSize; 33 return m_page_index * PageSize;
35 } 34 }
36 constexpr size_t GetNumPages() const { 35 constexpr size_t GetNumPages() const {
@@ -39,10 +38,10 @@ public:
39 constexpr size_t GetSize() const { 38 constexpr size_t GetSize() const {
40 return this->GetNumPages() * PageSize; 39 return this->GetNumPages() * PageSize;
41 } 40 }
42 constexpr PAddr GetEndAddress() const { 41 constexpr KPhysicalAddress GetEndAddress() const {
43 return (m_page_index + m_num_pages) * PageSize; 42 return (m_page_index + m_num_pages) * PageSize;
44 } 43 }
45 constexpr PAddr GetLastAddress() const { 44 constexpr KPhysicalAddress GetLastAddress() const {
46 return this->GetEndAddress() - 1; 45 return this->GetEndAddress() - 1;
47 } 46 }
48 47
@@ -62,8 +61,8 @@ public:
62 return !(*this == rhs); 61 return !(*this == rhs);
63 } 62 }
64 63
65 constexpr bool IsStrictlyBefore(PAddr addr) const { 64 constexpr bool IsStrictlyBefore(KPhysicalAddress addr) const {
66 const PAddr end = this->GetEndAddress(); 65 const KPhysicalAddress end = this->GetEndAddress();
67 66
68 if (m_page_index != 0 && end == 0) { 67 if (m_page_index != 0 && end == 0) {
69 return false; 68 return false;
@@ -72,11 +71,11 @@ public:
72 return end < addr; 71 return end < addr;
73 } 72 }
74 73
75 constexpr bool operator<(PAddr addr) const { 74 constexpr bool operator<(KPhysicalAddress addr) const {
76 return this->IsStrictlyBefore(addr); 75 return this->IsStrictlyBefore(addr);
77 } 76 }
78 77
79 constexpr bool TryConcatenate(PAddr addr, size_t np) { 78 constexpr bool TryConcatenate(KPhysicalAddress addr, size_t np) {
80 if (addr != 0 && addr == this->GetEndAddress()) { 79 if (addr != 0 && addr == this->GetEndAddress()) {
81 m_num_pages += static_cast<u32>(np); 80 m_num_pages += static_cast<u32>(np);
82 return true; 81 return true;
@@ -90,96 +89,118 @@ private:
90 } 89 }
91 90
92private: 91private:
92 friend class KPageGroup;
93
93 KBlockInfo* m_next{}; 94 KBlockInfo* m_next{};
94 u32 m_page_index{}; 95 u32 m_page_index{};
95 u32 m_num_pages{}; 96 u32 m_num_pages{};
96}; 97};
97static_assert(sizeof(KBlockInfo) <= 0x10); 98static_assert(sizeof(KBlockInfo) <= 0x10);
98 99
99class KPageGroup final { 100class KPageGroup {
100public: 101public:
101 class Node final { 102 class Iterator {
102 public: 103 public:
103 constexpr Node(u64 addr_, std::size_t num_pages_) : addr{addr_}, num_pages{num_pages_} {} 104 using iterator_category = std::forward_iterator_tag;
105 using value_type = const KBlockInfo;
106 using difference_type = std::ptrdiff_t;
107 using pointer = value_type*;
108 using reference = value_type&;
109
110 constexpr explicit Iterator(pointer n) : m_node(n) {}
111
112 constexpr bool operator==(const Iterator& rhs) const {
113 return m_node == rhs.m_node;
114 }
115 constexpr bool operator!=(const Iterator& rhs) const {
116 return !(*this == rhs);
117 }
104 118
105 constexpr u64 GetAddress() const { 119 constexpr pointer operator->() const {
106 return addr; 120 return m_node;
121 }
122 constexpr reference operator*() const {
123 return *m_node;
107 } 124 }
108 125
109 constexpr std::size_t GetNumPages() const { 126 constexpr Iterator& operator++() {
110 return num_pages; 127 m_node = m_node->GetNext();
128 return *this;
111 } 129 }
112 130
113 constexpr std::size_t GetSize() const { 131 constexpr Iterator operator++(int) {
114 return GetNumPages() * PageSize; 132 const Iterator it{*this};
133 ++(*this);
134 return it;
115 } 135 }
116 136
117 private: 137 private:
118 u64 addr{}; 138 pointer m_node{};
119 std::size_t num_pages{};
120 }; 139 };
121 140
122public: 141 explicit KPageGroup(KernelCore& kernel, KBlockInfoManager* m)
123 KPageGroup() = default; 142 : m_kernel{kernel}, m_manager{m} {}
124 KPageGroup(u64 address, u64 num_pages) { 143 ~KPageGroup() {
125 ASSERT(AddBlock(address, num_pages).IsSuccess()); 144 this->Finalize();
126 } 145 }
127 146
128 constexpr std::list<Node>& Nodes() { 147 void CloseAndReset();
129 return nodes; 148 void Finalize();
130 }
131 149
132 constexpr const std::list<Node>& Nodes() const { 150 Iterator begin() const {
133 return nodes; 151 return Iterator{m_first_block};
152 }
153 Iterator end() const {
154 return Iterator{nullptr};
155 }
156 bool empty() const {
157 return m_first_block == nullptr;
134 } 158 }
135 159
136 std::size_t GetNumPages() const { 160 Result AddBlock(KPhysicalAddress addr, size_t num_pages);
137 std::size_t num_pages = 0; 161 void Open() const;
138 for (const Node& node : nodes) { 162 void OpenFirst() const;
139 num_pages += node.GetNumPages(); 163 void Close() const;
140 } 164
141 return num_pages; 165 size_t GetNumPages() const;
142 } 166
143 167 bool IsEquivalentTo(const KPageGroup& rhs) const;
144 bool IsEqual(KPageGroup& other) const { 168
145 auto this_node = nodes.begin(); 169 bool operator==(const KPageGroup& rhs) const {
146 auto other_node = other.nodes.begin(); 170 return this->IsEquivalentTo(rhs);
147 while (this_node != nodes.end() && other_node != other.nodes.end()) { 171 }
148 if (this_node->GetAddress() != other_node->GetAddress() ||
149 this_node->GetNumPages() != other_node->GetNumPages()) {
150 return false;
151 }
152 this_node = std::next(this_node);
153 other_node = std::next(other_node);
154 }
155 172
156 return this_node == nodes.end() && other_node == other.nodes.end(); 173 bool operator!=(const KPageGroup& rhs) const {
174 return !(*this == rhs);
157 } 175 }
158 176
159 Result AddBlock(u64 address, u64 num_pages) { 177private:
160 if (!num_pages) { 178 KernelCore& m_kernel;
161 return ResultSuccess; 179 KBlockInfo* m_first_block{};
180 KBlockInfo* m_last_block{};
181 KBlockInfoManager* m_manager{};
182};
183
184class KScopedPageGroup {
185public:
186 explicit KScopedPageGroup(const KPageGroup* gp) : m_pg(gp) {
187 if (m_pg) {
188 m_pg->Open();
162 } 189 }
163 if (!nodes.empty()) { 190 }
164 const auto node = nodes.back(); 191 explicit KScopedPageGroup(const KPageGroup& gp) : KScopedPageGroup(std::addressof(gp)) {}
165 if (node.GetAddress() + node.GetNumPages() * PageSize == address) { 192 ~KScopedPageGroup() {
166 address = node.GetAddress(); 193 if (m_pg) {
167 num_pages += node.GetNumPages(); 194 m_pg->Close();
168 nodes.pop_back();
169 }
170 } 195 }
171 nodes.push_back({address, num_pages});
172 return ResultSuccess;
173 } 196 }
174 197
175 bool Empty() const { 198 void CancelClose() {
176 return nodes.empty(); 199 m_pg = nullptr;
177 } 200 }
178 201
179 void Finalize() {}
180
181private: 202private:
182 std::list<Node> nodes; 203 const KPageGroup* m_pg{};
183}; 204};
184 205
185} // namespace Kernel 206} // namespace Kernel
diff --git a/src/core/hle/kernel/k_page_table.cpp b/src/core/hle/kernel/k_page_table.cpp
index 612fc76fa..9c7ac22dc 100644
--- a/src/core/hle/kernel/k_page_table.cpp
+++ b/src/core/hle/kernel/k_page_table.cpp
@@ -100,7 +100,7 @@ constexpr size_t GetAddressSpaceWidthFromType(FileSys::ProgramAddressSpaceType a
100 100
101KPageTable::KPageTable(Core::System& system_) 101KPageTable::KPageTable(Core::System& system_)
102 : m_general_lock{system_.Kernel()}, 102 : m_general_lock{system_.Kernel()},
103 m_map_physical_memory_lock{system_.Kernel()}, m_system{system_} {} 103 m_map_physical_memory_lock{system_.Kernel()}, m_system{system_}, m_kernel{system_.Kernel()} {}
104 104
105KPageTable::~KPageTable() = default; 105KPageTable::~KPageTable() = default;
106 106
@@ -373,7 +373,7 @@ Result KPageTable::MapProcessCode(VAddr addr, size_t num_pages, KMemoryState sta
373 m_memory_block_slab_manager); 373 m_memory_block_slab_manager);
374 374
375 // Allocate and open. 375 // Allocate and open.
376 KPageGroup pg; 376 KPageGroup pg{m_kernel, m_block_info_manager};
377 R_TRY(m_system.Kernel().MemoryManager().AllocateAndOpen( 377 R_TRY(m_system.Kernel().MemoryManager().AllocateAndOpen(
378 &pg, num_pages, 378 &pg, num_pages,
379 KMemoryManager::EncodeOption(KMemoryManager::Pool::Application, m_allocation_option))); 379 KMemoryManager::EncodeOption(KMemoryManager::Pool::Application, m_allocation_option)));
@@ -432,7 +432,7 @@ Result KPageTable::MapCodeMemory(VAddr dst_address, VAddr src_address, size_t si
432 const size_t num_pages = size / PageSize; 432 const size_t num_pages = size / PageSize;
433 433
434 // Create page groups for the memory being mapped. 434 // Create page groups for the memory being mapped.
435 KPageGroup pg; 435 KPageGroup pg{m_kernel, m_block_info_manager};
436 AddRegionToPages(src_address, num_pages, pg); 436 AddRegionToPages(src_address, num_pages, pg);
437 437
438 // Reprotect the source as kernel-read/not mapped. 438 // Reprotect the source as kernel-read/not mapped.
@@ -593,7 +593,7 @@ Result KPageTable::MakePageGroup(KPageGroup& pg, VAddr addr, size_t num_pages) {
593 const size_t size = num_pages * PageSize; 593 const size_t size = num_pages * PageSize;
594 594
595 // We're making a new group, not adding to an existing one. 595 // We're making a new group, not adding to an existing one.
596 R_UNLESS(pg.Empty(), ResultInvalidCurrentMemory); 596 R_UNLESS(pg.empty(), ResultInvalidCurrentMemory);
597 597
598 // Begin traversal. 598 // Begin traversal.
599 Common::PageTable::TraversalContext context; 599 Common::PageTable::TraversalContext context;
@@ -640,11 +640,10 @@ Result KPageTable::MakePageGroup(KPageGroup& pg, VAddr addr, size_t num_pages) {
640 R_SUCCEED(); 640 R_SUCCEED();
641} 641}
642 642
643bool KPageTable::IsValidPageGroup(const KPageGroup& pg_ll, VAddr addr, size_t num_pages) { 643bool KPageTable::IsValidPageGroup(const KPageGroup& pg, VAddr addr, size_t num_pages) {
644 ASSERT(this->IsLockedByCurrentThread()); 644 ASSERT(this->IsLockedByCurrentThread());
645 645
646 const size_t size = num_pages * PageSize; 646 const size_t size = num_pages * PageSize;
647 const auto& pg = pg_ll.Nodes();
648 const auto& memory_layout = m_system.Kernel().MemoryLayout(); 647 const auto& memory_layout = m_system.Kernel().MemoryLayout();
649 648
650 // Empty groups are necessarily invalid. 649 // Empty groups are necessarily invalid.
@@ -942,9 +941,6 @@ Result KPageTable::SetupForIpcServer(VAddr* out_addr, size_t size, VAddr src_add
942 941
943 ON_RESULT_FAILURE { 942 ON_RESULT_FAILURE {
944 if (cur_mapped_addr != dst_addr) { 943 if (cur_mapped_addr != dst_addr) {
945 // HACK: Manually close the pages.
946 HACK_ClosePages(dst_addr, (cur_mapped_addr - dst_addr) / PageSize);
947
948 ASSERT(Operate(dst_addr, (cur_mapped_addr - dst_addr) / PageSize, 944 ASSERT(Operate(dst_addr, (cur_mapped_addr - dst_addr) / PageSize,
949 KMemoryPermission::None, OperationType::Unmap) 945 KMemoryPermission::None, OperationType::Unmap)
950 .IsSuccess()); 946 .IsSuccess());
@@ -1020,9 +1016,6 @@ Result KPageTable::SetupForIpcServer(VAddr* out_addr, size_t size, VAddr src_add
1020 // Map the page. 1016 // Map the page.
1021 R_TRY(Operate(cur_mapped_addr, 1, test_perm, OperationType::Map, start_partial_page)); 1017 R_TRY(Operate(cur_mapped_addr, 1, test_perm, OperationType::Map, start_partial_page));
1022 1018
1023 // HACK: Manually open the pages.
1024 HACK_OpenPages(start_partial_page, 1);
1025
1026 // Update tracking extents. 1019 // Update tracking extents.
1027 cur_mapped_addr += PageSize; 1020 cur_mapped_addr += PageSize;
1028 cur_block_addr += PageSize; 1021 cur_block_addr += PageSize;
@@ -1051,9 +1044,6 @@ Result KPageTable::SetupForIpcServer(VAddr* out_addr, size_t size, VAddr src_add
1051 R_TRY(Operate(cur_mapped_addr, cur_block_size / PageSize, test_perm, OperationType::Map, 1044 R_TRY(Operate(cur_mapped_addr, cur_block_size / PageSize, test_perm, OperationType::Map,
1052 cur_block_addr)); 1045 cur_block_addr));
1053 1046
1054 // HACK: Manually open the pages.
1055 HACK_OpenPages(cur_block_addr, cur_block_size / PageSize);
1056
1057 // Update tracking extents. 1047 // Update tracking extents.
1058 cur_mapped_addr += cur_block_size; 1048 cur_mapped_addr += cur_block_size;
1059 cur_block_addr = next_entry.phys_addr; 1049 cur_block_addr = next_entry.phys_addr;
@@ -1073,9 +1063,6 @@ Result KPageTable::SetupForIpcServer(VAddr* out_addr, size_t size, VAddr src_add
1073 R_TRY(Operate(cur_mapped_addr, last_block_size / PageSize, test_perm, OperationType::Map, 1063 R_TRY(Operate(cur_mapped_addr, last_block_size / PageSize, test_perm, OperationType::Map,
1074 cur_block_addr)); 1064 cur_block_addr));
1075 1065
1076 // HACK: Manually open the pages.
1077 HACK_OpenPages(cur_block_addr, last_block_size / PageSize);
1078
1079 // Update tracking extents. 1066 // Update tracking extents.
1080 cur_mapped_addr += last_block_size; 1067 cur_mapped_addr += last_block_size;
1081 cur_block_addr += last_block_size; 1068 cur_block_addr += last_block_size;
@@ -1107,9 +1094,6 @@ Result KPageTable::SetupForIpcServer(VAddr* out_addr, size_t size, VAddr src_add
1107 1094
1108 // Map the page. 1095 // Map the page.
1109 R_TRY(Operate(cur_mapped_addr, 1, test_perm, OperationType::Map, end_partial_page)); 1096 R_TRY(Operate(cur_mapped_addr, 1, test_perm, OperationType::Map, end_partial_page));
1110
1111 // HACK: Manually open the pages.
1112 HACK_OpenPages(end_partial_page, 1);
1113 } 1097 }
1114 1098
1115 // Update memory blocks to reflect our changes 1099 // Update memory blocks to reflect our changes
@@ -1211,9 +1195,6 @@ Result KPageTable::CleanupForIpcServer(VAddr address, size_t size, KMemoryState
1211 const size_t aligned_size = aligned_end - aligned_start; 1195 const size_t aligned_size = aligned_end - aligned_start;
1212 const size_t aligned_num_pages = aligned_size / PageSize; 1196 const size_t aligned_num_pages = aligned_size / PageSize;
1213 1197
1214 // HACK: Manually close the pages.
1215 HACK_ClosePages(aligned_start, aligned_num_pages);
1216
1217 // Unmap the pages. 1198 // Unmap the pages.
1218 R_TRY(Operate(aligned_start, aligned_num_pages, KMemoryPermission::None, OperationType::Unmap)); 1199 R_TRY(Operate(aligned_start, aligned_num_pages, KMemoryPermission::None, OperationType::Unmap));
1219 1200
@@ -1501,17 +1482,6 @@ void KPageTable::CleanupForIpcClientOnServerSetupFailure([[maybe_unused]] PageLi
1501 } 1482 }
1502} 1483}
1503 1484
1504void KPageTable::HACK_OpenPages(PAddr phys_addr, size_t num_pages) {
1505 m_system.Kernel().MemoryManager().OpenFirst(phys_addr, num_pages);
1506}
1507
1508void KPageTable::HACK_ClosePages(VAddr virt_addr, size_t num_pages) {
1509 for (size_t index = 0; index < num_pages; ++index) {
1510 const auto paddr = GetPhysicalAddr(virt_addr + (index * PageSize));
1511 m_system.Kernel().MemoryManager().Close(paddr, 1);
1512 }
1513}
1514
1515Result KPageTable::MapPhysicalMemory(VAddr address, size_t size) { 1485Result KPageTable::MapPhysicalMemory(VAddr address, size_t size) {
1516 // Lock the physical memory lock. 1486 // Lock the physical memory lock.
1517 KScopedLightLock phys_lk(m_map_physical_memory_lock); 1487 KScopedLightLock phys_lk(m_map_physical_memory_lock);
@@ -1572,7 +1542,7 @@ Result KPageTable::MapPhysicalMemory(VAddr address, size_t size) {
1572 R_UNLESS(memory_reservation.Succeeded(), ResultLimitReached); 1542 R_UNLESS(memory_reservation.Succeeded(), ResultLimitReached);
1573 1543
1574 // Allocate pages for the new memory. 1544 // Allocate pages for the new memory.
1575 KPageGroup pg; 1545 KPageGroup pg{m_kernel, m_block_info_manager};
1576 R_TRY(m_system.Kernel().MemoryManager().AllocateForProcess( 1546 R_TRY(m_system.Kernel().MemoryManager().AllocateForProcess(
1577 &pg, (size - mapped_size) / PageSize, m_allocate_option, 0, 0)); 1547 &pg, (size - mapped_size) / PageSize, m_allocate_option, 0, 0));
1578 1548
@@ -1650,7 +1620,7 @@ Result KPageTable::MapPhysicalMemory(VAddr address, size_t size) {
1650 KScopedPageTableUpdater updater(this); 1620 KScopedPageTableUpdater updater(this);
1651 1621
1652 // Prepare to iterate over the memory. 1622 // Prepare to iterate over the memory.
1653 auto pg_it = pg.Nodes().begin(); 1623 auto pg_it = pg.begin();
1654 PAddr pg_phys_addr = pg_it->GetAddress(); 1624 PAddr pg_phys_addr = pg_it->GetAddress();
1655 size_t pg_pages = pg_it->GetNumPages(); 1625 size_t pg_pages = pg_it->GetNumPages();
1656 1626
@@ -1680,9 +1650,6 @@ Result KPageTable::MapPhysicalMemory(VAddr address, size_t size) {
1680 last_unmap_address + 1 - cur_address) / 1650 last_unmap_address + 1 - cur_address) /
1681 PageSize; 1651 PageSize;
1682 1652
1683 // HACK: Manually close the pages.
1684 HACK_ClosePages(cur_address, cur_pages);
1685
1686 // Unmap. 1653 // Unmap.
1687 ASSERT(Operate(cur_address, cur_pages, KMemoryPermission::None, 1654 ASSERT(Operate(cur_address, cur_pages, KMemoryPermission::None,
1688 OperationType::Unmap) 1655 OperationType::Unmap)
@@ -1703,7 +1670,7 @@ Result KPageTable::MapPhysicalMemory(VAddr address, size_t size) {
1703 // Release any remaining unmapped memory. 1670 // Release any remaining unmapped memory.
1704 m_system.Kernel().MemoryManager().OpenFirst(pg_phys_addr, pg_pages); 1671 m_system.Kernel().MemoryManager().OpenFirst(pg_phys_addr, pg_pages);
1705 m_system.Kernel().MemoryManager().Close(pg_phys_addr, pg_pages); 1672 m_system.Kernel().MemoryManager().Close(pg_phys_addr, pg_pages);
1706 for (++pg_it; pg_it != pg.Nodes().end(); ++pg_it) { 1673 for (++pg_it; pg_it != pg.end(); ++pg_it) {
1707 m_system.Kernel().MemoryManager().OpenFirst(pg_it->GetAddress(), 1674 m_system.Kernel().MemoryManager().OpenFirst(pg_it->GetAddress(),
1708 pg_it->GetNumPages()); 1675 pg_it->GetNumPages());
1709 m_system.Kernel().MemoryManager().Close(pg_it->GetAddress(), 1676 m_system.Kernel().MemoryManager().Close(pg_it->GetAddress(),
@@ -1731,7 +1698,7 @@ Result KPageTable::MapPhysicalMemory(VAddr address, size_t size) {
1731 // Check if we're at the end of the physical block. 1698 // Check if we're at the end of the physical block.
1732 if (pg_pages == 0) { 1699 if (pg_pages == 0) {
1733 // Ensure there are more pages to map. 1700 // Ensure there are more pages to map.
1734 ASSERT(pg_it != pg.Nodes().end()); 1701 ASSERT(pg_it != pg.end());
1735 1702
1736 // Advance our physical block. 1703 // Advance our physical block.
1737 ++pg_it; 1704 ++pg_it;
@@ -1742,10 +1709,7 @@ Result KPageTable::MapPhysicalMemory(VAddr address, size_t size) {
1742 // Map whatever we can. 1709 // Map whatever we can.
1743 const size_t cur_pages = std::min(pg_pages, map_pages); 1710 const size_t cur_pages = std::min(pg_pages, map_pages);
1744 R_TRY(Operate(cur_address, cur_pages, KMemoryPermission::UserReadWrite, 1711 R_TRY(Operate(cur_address, cur_pages, KMemoryPermission::UserReadWrite,
1745 OperationType::Map, pg_phys_addr)); 1712 OperationType::MapFirst, pg_phys_addr));
1746
1747 // HACK: Manually open the pages.
1748 HACK_OpenPages(pg_phys_addr, cur_pages);
1749 1713
1750 // Advance. 1714 // Advance.
1751 cur_address += cur_pages * PageSize; 1715 cur_address += cur_pages * PageSize;
@@ -1888,9 +1852,6 @@ Result KPageTable::UnmapPhysicalMemory(VAddr address, size_t size) {
1888 last_address + 1 - cur_address) / 1852 last_address + 1 - cur_address) /
1889 PageSize; 1853 PageSize;
1890 1854
1891 // HACK: Manually close the pages.
1892 HACK_ClosePages(cur_address, cur_pages);
1893
1894 // Unmap. 1855 // Unmap.
1895 ASSERT(Operate(cur_address, cur_pages, KMemoryPermission::None, OperationType::Unmap) 1856 ASSERT(Operate(cur_address, cur_pages, KMemoryPermission::None, OperationType::Unmap)
1896 .IsSuccess()); 1857 .IsSuccess());
@@ -1955,7 +1916,7 @@ Result KPageTable::MapMemory(VAddr dst_address, VAddr src_address, size_t size)
1955 R_TRY(dst_allocator_result); 1916 R_TRY(dst_allocator_result);
1956 1917
1957 // Map the memory. 1918 // Map the memory.
1958 KPageGroup page_linked_list; 1919 KPageGroup page_linked_list{m_kernel, m_block_info_manager};
1959 const size_t num_pages{size / PageSize}; 1920 const size_t num_pages{size / PageSize};
1960 const KMemoryPermission new_src_perm = static_cast<KMemoryPermission>( 1921 const KMemoryPermission new_src_perm = static_cast<KMemoryPermission>(
1961 KMemoryPermission::KernelRead | KMemoryPermission::NotMapped); 1922 KMemoryPermission::KernelRead | KMemoryPermission::NotMapped);
@@ -2022,14 +1983,14 @@ Result KPageTable::UnmapMemory(VAddr dst_address, VAddr src_address, size_t size
2022 num_dst_allocator_blocks); 1983 num_dst_allocator_blocks);
2023 R_TRY(dst_allocator_result); 1984 R_TRY(dst_allocator_result);
2024 1985
2025 KPageGroup src_pages; 1986 KPageGroup src_pages{m_kernel, m_block_info_manager};
2026 KPageGroup dst_pages; 1987 KPageGroup dst_pages{m_kernel, m_block_info_manager};
2027 const size_t num_pages{size / PageSize}; 1988 const size_t num_pages{size / PageSize};
2028 1989
2029 AddRegionToPages(src_address, num_pages, src_pages); 1990 AddRegionToPages(src_address, num_pages, src_pages);
2030 AddRegionToPages(dst_address, num_pages, dst_pages); 1991 AddRegionToPages(dst_address, num_pages, dst_pages);
2031 1992
2032 R_UNLESS(dst_pages.IsEqual(src_pages), ResultInvalidMemoryRegion); 1993 R_UNLESS(dst_pages.IsEquivalentTo(src_pages), ResultInvalidMemoryRegion);
2033 1994
2034 { 1995 {
2035 auto block_guard = detail::ScopeExit([&] { MapPages(dst_address, dst_pages, dst_perm); }); 1996 auto block_guard = detail::ScopeExit([&] { MapPages(dst_address, dst_pages, dst_perm); });
@@ -2060,7 +2021,7 @@ Result KPageTable::MapPages(VAddr addr, const KPageGroup& page_linked_list,
2060 2021
2061 VAddr cur_addr{addr}; 2022 VAddr cur_addr{addr};
2062 2023
2063 for (const auto& node : page_linked_list.Nodes()) { 2024 for (const auto& node : page_linked_list) {
2064 if (const auto result{ 2025 if (const auto result{
2065 Operate(cur_addr, node.GetNumPages(), perm, OperationType::Map, node.GetAddress())}; 2026 Operate(cur_addr, node.GetNumPages(), perm, OperationType::Map, node.GetAddress())};
2066 result.IsError()) { 2027 result.IsError()) {
@@ -2160,7 +2121,7 @@ Result KPageTable::UnmapPages(VAddr addr, const KPageGroup& page_linked_list) {
2160 2121
2161 VAddr cur_addr{addr}; 2122 VAddr cur_addr{addr};
2162 2123
2163 for (const auto& node : page_linked_list.Nodes()) { 2124 for (const auto& node : page_linked_list) {
2164 if (const auto result{Operate(cur_addr, node.GetNumPages(), KMemoryPermission::None, 2125 if (const auto result{Operate(cur_addr, node.GetNumPages(), KMemoryPermission::None,
2165 OperationType::Unmap)}; 2126 OperationType::Unmap)};
2166 result.IsError()) { 2127 result.IsError()) {
@@ -2527,13 +2488,13 @@ Result KPageTable::SetHeapSize(VAddr* out, size_t size) {
2527 R_UNLESS(memory_reservation.Succeeded(), ResultLimitReached); 2488 R_UNLESS(memory_reservation.Succeeded(), ResultLimitReached);
2528 2489
2529 // Allocate pages for the heap extension. 2490 // Allocate pages for the heap extension.
2530 KPageGroup pg; 2491 KPageGroup pg{m_kernel, m_block_info_manager};
2531 R_TRY(m_system.Kernel().MemoryManager().AllocateAndOpen( 2492 R_TRY(m_system.Kernel().MemoryManager().AllocateAndOpen(
2532 &pg, allocation_size / PageSize, 2493 &pg, allocation_size / PageSize,
2533 KMemoryManager::EncodeOption(m_memory_pool, m_allocation_option))); 2494 KMemoryManager::EncodeOption(m_memory_pool, m_allocation_option)));
2534 2495
2535 // Clear all the newly allocated pages. 2496 // Clear all the newly allocated pages.
2536 for (const auto& it : pg.Nodes()) { 2497 for (const auto& it : pg) {
2537 std::memset(m_system.DeviceMemory().GetPointer<void>(it.GetAddress()), m_heap_fill_value, 2498 std::memset(m_system.DeviceMemory().GetPointer<void>(it.GetAddress()), m_heap_fill_value,
2538 it.GetSize()); 2499 it.GetSize());
2539 } 2500 }
@@ -2610,11 +2571,23 @@ ResultVal<VAddr> KPageTable::AllocateAndMapMemory(size_t needed_num_pages, size_
2610 if (is_map_only) { 2571 if (is_map_only) {
2611 R_TRY(Operate(addr, needed_num_pages, perm, OperationType::Map, map_addr)); 2572 R_TRY(Operate(addr, needed_num_pages, perm, OperationType::Map, map_addr));
2612 } else { 2573 } else {
2613 KPageGroup page_group; 2574 // Create a page group tohold the pages we allocate.
2614 R_TRY(m_system.Kernel().MemoryManager().AllocateForProcess( 2575 KPageGroup pg{m_kernel, m_block_info_manager};
2615 &page_group, needed_num_pages, 2576
2616 KMemoryManager::EncodeOption(m_memory_pool, m_allocation_option), 0, 0)); 2577 R_TRY(m_system.Kernel().MemoryManager().AllocateAndOpen(
2617 R_TRY(Operate(addr, needed_num_pages, page_group, OperationType::MapGroup)); 2578 &pg, needed_num_pages,
2579 KMemoryManager::EncodeOption(m_memory_pool, m_allocation_option)));
2580
2581 // Ensure that the page group is closed when we're done working with it.
2582 SCOPE_EXIT({ pg.Close(); });
2583
2584 // Clear all pages.
2585 for (const auto& it : pg) {
2586 std::memset(m_system.DeviceMemory().GetPointer<void>(it.GetAddress()),
2587 m_heap_fill_value, it.GetSize());
2588 }
2589
2590 R_TRY(Operate(addr, needed_num_pages, pg, OperationType::MapGroup));
2618 } 2591 }
2619 2592
2620 // Update the blocks. 2593 // Update the blocks.
@@ -2795,19 +2768,28 @@ Result KPageTable::Operate(VAddr addr, size_t num_pages, const KPageGroup& page_
2795 ASSERT(num_pages > 0); 2768 ASSERT(num_pages > 0);
2796 ASSERT(num_pages == page_group.GetNumPages()); 2769 ASSERT(num_pages == page_group.GetNumPages());
2797 2770
2798 for (const auto& node : page_group.Nodes()) { 2771 switch (operation) {
2799 const size_t size{node.GetNumPages() * PageSize}; 2772 case OperationType::MapGroup: {
2773 // We want to maintain a new reference to every page in the group.
2774 KScopedPageGroup spg(page_group);
2775
2776 for (const auto& node : page_group) {
2777 const size_t size{node.GetNumPages() * PageSize};
2800 2778
2801 switch (operation) { 2779 // Map the pages.
2802 case OperationType::MapGroup:
2803 m_system.Memory().MapMemoryRegion(*m_page_table_impl, addr, size, node.GetAddress()); 2780 m_system.Memory().MapMemoryRegion(*m_page_table_impl, addr, size, node.GetAddress());
2804 break; 2781
2805 default: 2782 addr += size;
2806 ASSERT(false);
2807 break;
2808 } 2783 }
2809 2784
2810 addr += size; 2785 // We succeeded! We want to persist the reference to the pages.
2786 spg.CancelClose();
2787
2788 break;
2789 }
2790 default:
2791 ASSERT(false);
2792 break;
2811 } 2793 }
2812 2794
2813 R_SUCCEED(); 2795 R_SUCCEED();
@@ -2822,13 +2804,29 @@ Result KPageTable::Operate(VAddr addr, size_t num_pages, KMemoryPermission perm,
2822 ASSERT(ContainsPages(addr, num_pages)); 2804 ASSERT(ContainsPages(addr, num_pages));
2823 2805
2824 switch (operation) { 2806 switch (operation) {
2825 case OperationType::Unmap: 2807 case OperationType::Unmap: {
2808 // Ensure that any pages we track close on exit.
2809 KPageGroup pages_to_close{m_kernel, this->GetBlockInfoManager()};
2810 SCOPE_EXIT({ pages_to_close.CloseAndReset(); });
2811
2812 this->AddRegionToPages(addr, num_pages, pages_to_close);
2826 m_system.Memory().UnmapRegion(*m_page_table_impl, addr, num_pages * PageSize); 2813 m_system.Memory().UnmapRegion(*m_page_table_impl, addr, num_pages * PageSize);
2827 break; 2814 break;
2815 }
2816 case OperationType::MapFirst:
2828 case OperationType::Map: { 2817 case OperationType::Map: {
2829 ASSERT(map_addr); 2818 ASSERT(map_addr);
2830 ASSERT(Common::IsAligned(map_addr, PageSize)); 2819 ASSERT(Common::IsAligned(map_addr, PageSize));
2831 m_system.Memory().MapMemoryRegion(*m_page_table_impl, addr, num_pages * PageSize, map_addr); 2820 m_system.Memory().MapMemoryRegion(*m_page_table_impl, addr, num_pages * PageSize, map_addr);
2821
2822 // Open references to pages, if we should.
2823 if (IsHeapPhysicalAddress(m_kernel.MemoryLayout(), map_addr)) {
2824 if (operation == OperationType::MapFirst) {
2825 m_kernel.MemoryManager().OpenFirst(map_addr, num_pages);
2826 } else {
2827 m_kernel.MemoryManager().Open(map_addr, num_pages);
2828 }
2829 }
2832 break; 2830 break;
2833 } 2831 }
2834 case OperationType::Separate: { 2832 case OperationType::Separate: {
diff --git a/src/core/hle/kernel/k_page_table.h b/src/core/hle/kernel/k_page_table.h
index f1ca785d7..0a454b05b 100644
--- a/src/core/hle/kernel/k_page_table.h
+++ b/src/core/hle/kernel/k_page_table.h
@@ -107,6 +107,10 @@ public:
107 return *m_page_table_impl; 107 return *m_page_table_impl;
108 } 108 }
109 109
110 KBlockInfoManager* GetBlockInfoManager() {
111 return m_block_info_manager;
112 }
113
110 bool CanContain(VAddr addr, size_t size, KMemoryState state) const; 114 bool CanContain(VAddr addr, size_t size, KMemoryState state) const;
111 115
112protected: 116protected:
@@ -261,10 +265,6 @@ private:
261 void CleanupForIpcClientOnServerSetupFailure(PageLinkedList* page_list, VAddr address, 265 void CleanupForIpcClientOnServerSetupFailure(PageLinkedList* page_list, VAddr address,
262 size_t size, KMemoryPermission prot_perm); 266 size_t size, KMemoryPermission prot_perm);
263 267
264 // HACK: These will be removed once we automatically manage page reference counts.
265 void HACK_OpenPages(PAddr phys_addr, size_t num_pages);
266 void HACK_ClosePages(VAddr virt_addr, size_t num_pages);
267
268 mutable KLightLock m_general_lock; 268 mutable KLightLock m_general_lock;
269 mutable KLightLock m_map_physical_memory_lock; 269 mutable KLightLock m_map_physical_memory_lock;
270 270
@@ -488,6 +488,7 @@ private:
488 std::unique_ptr<Common::PageTable> m_page_table_impl; 488 std::unique_ptr<Common::PageTable> m_page_table_impl;
489 489
490 Core::System& m_system; 490 Core::System& m_system;
491 KernelCore& m_kernel;
491}; 492};
492 493
493} // namespace Kernel 494} // namespace Kernel
diff --git a/src/core/hle/kernel/k_shared_memory.cpp b/src/core/hle/kernel/k_shared_memory.cpp
index 0aa68103c..3cf2b5d91 100644
--- a/src/core/hle/kernel/k_shared_memory.cpp
+++ b/src/core/hle/kernel/k_shared_memory.cpp
@@ -13,10 +13,7 @@
13namespace Kernel { 13namespace Kernel {
14 14
15KSharedMemory::KSharedMemory(KernelCore& kernel_) : KAutoObjectWithSlabHeapAndContainer{kernel_} {} 15KSharedMemory::KSharedMemory(KernelCore& kernel_) : KAutoObjectWithSlabHeapAndContainer{kernel_} {}
16 16KSharedMemory::~KSharedMemory() = default;
17KSharedMemory::~KSharedMemory() {
18 kernel.GetSystemResourceLimit()->Release(LimitableResource::PhysicalMemoryMax, size);
19}
20 17
21Result KSharedMemory::Initialize(Core::DeviceMemory& device_memory_, KProcess* owner_process_, 18Result KSharedMemory::Initialize(Core::DeviceMemory& device_memory_, KProcess* owner_process_,
22 Svc::MemoryPermission owner_permission_, 19 Svc::MemoryPermission owner_permission_,
@@ -49,7 +46,8 @@ Result KSharedMemory::Initialize(Core::DeviceMemory& device_memory_, KProcess* o
49 R_UNLESS(physical_address != 0, ResultOutOfMemory); 46 R_UNLESS(physical_address != 0, ResultOutOfMemory);
50 47
51 //! Insert the result into our page group. 48 //! Insert the result into our page group.
52 page_group.emplace(physical_address, num_pages); 49 page_group.emplace(kernel, &kernel.GetSystemSystemResource().GetBlockInfoManager());
50 page_group->AddBlock(physical_address, num_pages);
53 51
54 // Commit our reservation. 52 // Commit our reservation.
55 memory_reservation.Commit(); 53 memory_reservation.Commit();
@@ -62,7 +60,7 @@ Result KSharedMemory::Initialize(Core::DeviceMemory& device_memory_, KProcess* o
62 is_initialized = true; 60 is_initialized = true;
63 61
64 // Clear all pages in the memory. 62 // Clear all pages in the memory.
65 for (const auto& block : page_group->Nodes()) { 63 for (const auto& block : *page_group) {
66 std::memset(device_memory_.GetPointer<void>(block.GetAddress()), 0, block.GetSize()); 64 std::memset(device_memory_.GetPointer<void>(block.GetAddress()), 0, block.GetSize());
67 } 65 }
68 66
@@ -71,13 +69,8 @@ Result KSharedMemory::Initialize(Core::DeviceMemory& device_memory_, KProcess* o
71 69
72void KSharedMemory::Finalize() { 70void KSharedMemory::Finalize() {
73 // Close and finalize the page group. 71 // Close and finalize the page group.
74 // page_group->Close(); 72 page_group->Close();
75 // page_group->Finalize(); 73 page_group->Finalize();
76
77 //! HACK: Manually close.
78 for (const auto& block : page_group->Nodes()) {
79 kernel.MemoryManager().Close(block.GetAddress(), block.GetNumPages());
80 }
81 74
82 // Release the memory reservation. 75 // Release the memory reservation.
83 resource_limit->Release(LimitableResource::PhysicalMemoryMax, size); 76 resource_limit->Release(LimitableResource::PhysicalMemoryMax, size);
diff --git a/src/core/hle/kernel/memory_types.h b/src/core/hle/kernel/memory_types.h
index 3975507bd..92b8b37ac 100644
--- a/src/core/hle/kernel/memory_types.h
+++ b/src/core/hle/kernel/memory_types.h
@@ -14,4 +14,7 @@ constexpr std::size_t PageSize{1 << PageBits};
14 14
15using Page = std::array<u8, PageSize>; 15using Page = std::array<u8, PageSize>;
16 16
17using KPhysicalAddress = PAddr;
18using KProcessAddress = VAddr;
19
17} // namespace Kernel 20} // namespace Kernel
diff --git a/src/core/hle/kernel/svc.cpp b/src/core/hle/kernel/svc.cpp
index 788ee2160..aca442196 100644
--- a/src/core/hle/kernel/svc.cpp
+++ b/src/core/hle/kernel/svc.cpp
@@ -1485,7 +1485,7 @@ static Result MapProcessMemory(Core::System& system, VAddr dst_address, Handle p
1485 ResultInvalidMemoryRegion); 1485 ResultInvalidMemoryRegion);
1486 1486
1487 // Create a new page group. 1487 // Create a new page group.
1488 KPageGroup pg; 1488 KPageGroup pg{system.Kernel(), dst_pt.GetBlockInfoManager()};
1489 R_TRY(src_pt.MakeAndOpenPageGroup( 1489 R_TRY(src_pt.MakeAndOpenPageGroup(
1490 std::addressof(pg), src_address, size / PageSize, KMemoryState::FlagCanMapProcess, 1490 std::addressof(pg), src_address, size / PageSize, KMemoryState::FlagCanMapProcess,
1491 KMemoryState::FlagCanMapProcess, KMemoryPermission::None, KMemoryPermission::None, 1491 KMemoryState::FlagCanMapProcess, KMemoryPermission::None, KMemoryPermission::None,
diff --git a/src/input_common/drivers/tas_input.cpp b/src/input_common/drivers/tas_input.cpp
index f3ade90da..f3cb14c56 100644
--- a/src/input_common/drivers/tas_input.cpp
+++ b/src/input_common/drivers/tas_input.cpp
@@ -156,10 +156,12 @@ void Tas::RecordInput(u64 buttons, TasAnalog left_axis, TasAnalog right_axis) {
156 }; 156 };
157} 157}
158 158
159std::tuple<TasState, size_t, size_t> Tas::GetStatus() const { 159std::tuple<TasState, size_t, std::array<size_t, PLAYER_NUMBER>> Tas::GetStatus() const {
160 TasState state; 160 TasState state;
161 std::array<size_t, PLAYER_NUMBER> lengths{0};
161 if (is_recording) { 162 if (is_recording) {
162 return {TasState::Recording, 0, record_commands.size()}; 163 lengths[0] = record_commands.size();
164 return {TasState::Recording, record_commands.size(), lengths};
163 } 165 }
164 166
165 if (is_running) { 167 if (is_running) {
@@ -168,7 +170,11 @@ std::tuple<TasState, size_t, size_t> Tas::GetStatus() const {
168 state = TasState::Stopped; 170 state = TasState::Stopped;
169 } 171 }
170 172
171 return {state, current_command, script_length}; 173 for (size_t i = 0; i < PLAYER_NUMBER; i++) {
174 lengths[i] = commands[i].size();
175 }
176
177 return {state, current_command, lengths};
172} 178}
173 179
174void Tas::UpdateThread() { 180void Tas::UpdateThread() {
diff --git a/src/input_common/drivers/tas_input.h b/src/input_common/drivers/tas_input.h
index 38a27a230..5be66d142 100644
--- a/src/input_common/drivers/tas_input.h
+++ b/src/input_common/drivers/tas_input.h
@@ -124,7 +124,7 @@ public:
124 * Current playback progress ; 124 * Current playback progress ;
125 * Total length of script file currently loaded or being recorded 125 * Total length of script file currently loaded or being recorded
126 */ 126 */
127 std::tuple<TasState, size_t, size_t> GetStatus() const; 127 std::tuple<TasState, size_t, std::array<size_t, PLAYER_NUMBER>> GetStatus() const;
128 128
129private: 129private:
130 enum class TasAxis : u8; 130 enum class TasAxis : u8;
diff --git a/src/video_core/renderer_vulkan/renderer_vulkan.cpp b/src/video_core/renderer_vulkan/renderer_vulkan.cpp
index 52855120c..2a8d9e377 100644
--- a/src/video_core/renderer_vulkan/renderer_vulkan.cpp
+++ b/src/video_core/renderer_vulkan/renderer_vulkan.cpp
@@ -60,22 +60,9 @@ std::string GetDriverVersion(const Device& device) {
60 return GetReadableVersion(version); 60 return GetReadableVersion(version);
61} 61}
62 62
63std::string BuildCommaSeparatedExtensions(std::vector<std::string> available_extensions) { 63std::string BuildCommaSeparatedExtensions(
64 std::sort(std::begin(available_extensions), std::end(available_extensions)); 64 const std::set<std::string, std::less<>>& available_extensions) {
65 65 return fmt::format("{}", fmt::join(available_extensions, ","));
66 static constexpr std::size_t AverageExtensionSize = 64;
67 std::string separated_extensions;
68 separated_extensions.reserve(available_extensions.size() * AverageExtensionSize);
69
70 const auto end = std::end(available_extensions);
71 for (auto extension = std::begin(available_extensions); extension != end; ++extension) {
72 if (const bool is_last = extension + 1 == end; is_last) {
73 separated_extensions += *extension;
74 } else {
75 separated_extensions += fmt::format("{},", *extension);
76 }
77 }
78 return separated_extensions;
79} 66}
80 67
81} // Anonymous namespace 68} // Anonymous namespace
@@ -112,6 +99,7 @@ RendererVulkan::RendererVulkan(Core::TelemetrySession& telemetry_session_,
112 state_tracker, scheduler) { 99 state_tracker, scheduler) {
113 if (Settings::values.renderer_force_max_clock.GetValue() && device.ShouldBoostClocks()) { 100 if (Settings::values.renderer_force_max_clock.GetValue() && device.ShouldBoostClocks()) {
114 turbo_mode.emplace(instance, dld); 101 turbo_mode.emplace(instance, dld);
102 scheduler.RegisterOnSubmit([this] { turbo_mode->QueueSubmitted(); });
115 } 103 }
116 Report(); 104 Report();
117} catch (const vk::Exception& exception) { 105} catch (const vk::Exception& exception) {
@@ -120,6 +108,7 @@ RendererVulkan::RendererVulkan(Core::TelemetrySession& telemetry_session_,
120} 108}
121 109
122RendererVulkan::~RendererVulkan() { 110RendererVulkan::~RendererVulkan() {
111 scheduler.RegisterOnSubmit([] {});
123 void(device.GetLogical().WaitIdle()); 112 void(device.GetLogical().WaitIdle());
124} 113}
125 114
diff --git a/src/video_core/renderer_vulkan/vk_scheduler.cpp b/src/video_core/renderer_vulkan/vk_scheduler.cpp
index c2e53a5d5..e03685af1 100644
--- a/src/video_core/renderer_vulkan/vk_scheduler.cpp
+++ b/src/video_core/renderer_vulkan/vk_scheduler.cpp
@@ -213,6 +213,11 @@ void Scheduler::SubmitExecution(VkSemaphore signal_semaphore, VkSemaphore wait_s
213 .signalSemaphoreCount = num_signal_semaphores, 213 .signalSemaphoreCount = num_signal_semaphores,
214 .pSignalSemaphores = signal_semaphores.data(), 214 .pSignalSemaphores = signal_semaphores.data(),
215 }; 215 };
216
217 if (on_submit) {
218 on_submit();
219 }
220
216 switch (const VkResult result = device.GetGraphicsQueue().Submit(submit_info)) { 221 switch (const VkResult result = device.GetGraphicsQueue().Submit(submit_info)) {
217 case VK_SUCCESS: 222 case VK_SUCCESS:
218 break; 223 break;
diff --git a/src/video_core/renderer_vulkan/vk_scheduler.h b/src/video_core/renderer_vulkan/vk_scheduler.h
index 3858c506c..bd4cb0f7e 100644
--- a/src/video_core/renderer_vulkan/vk_scheduler.h
+++ b/src/video_core/renderer_vulkan/vk_scheduler.h
@@ -5,6 +5,7 @@
5 5
6#include <condition_variable> 6#include <condition_variable>
7#include <cstddef> 7#include <cstddef>
8#include <functional>
8#include <memory> 9#include <memory>
9#include <thread> 10#include <thread>
10#include <utility> 11#include <utility>
@@ -66,6 +67,11 @@ public:
66 query_cache = &query_cache_; 67 query_cache = &query_cache_;
67 } 68 }
68 69
70 // Registers a callback to perform on queue submission.
71 void RegisterOnSubmit(std::function<void()>&& func) {
72 on_submit = std::move(func);
73 }
74
69 /// Send work to a separate thread. 75 /// Send work to a separate thread.
70 template <typename T> 76 template <typename T>
71 void Record(T&& command) { 77 void Record(T&& command) {
@@ -216,6 +222,7 @@ private:
216 vk::CommandBuffer current_cmdbuf; 222 vk::CommandBuffer current_cmdbuf;
217 223
218 std::unique_ptr<CommandChunk> chunk; 224 std::unique_ptr<CommandChunk> chunk;
225 std::function<void()> on_submit;
219 226
220 State state; 227 State state;
221 228
diff --git a/src/video_core/renderer_vulkan/vk_turbo_mode.cpp b/src/video_core/renderer_vulkan/vk_turbo_mode.cpp
index 852b86f84..c42594149 100644
--- a/src/video_core/renderer_vulkan/vk_turbo_mode.cpp
+++ b/src/video_core/renderer_vulkan/vk_turbo_mode.cpp
@@ -14,11 +14,21 @@ using namespace Common::Literals;
14 14
15TurboMode::TurboMode(const vk::Instance& instance, const vk::InstanceDispatch& dld) 15TurboMode::TurboMode(const vk::Instance& instance, const vk::InstanceDispatch& dld)
16 : m_device{CreateDevice(instance, dld, VK_NULL_HANDLE)}, m_allocator{m_device, false} { 16 : m_device{CreateDevice(instance, dld, VK_NULL_HANDLE)}, m_allocator{m_device, false} {
17 {
18 std::scoped_lock lk{m_submission_lock};
19 m_submission_time = std::chrono::steady_clock::now();
20 }
17 m_thread = std::jthread([&](auto stop_token) { Run(stop_token); }); 21 m_thread = std::jthread([&](auto stop_token) { Run(stop_token); });
18} 22}
19 23
20TurboMode::~TurboMode() = default; 24TurboMode::~TurboMode() = default;
21 25
26void TurboMode::QueueSubmitted() {
27 std::scoped_lock lk{m_submission_lock};
28 m_submission_time = std::chrono::steady_clock::now();
29 m_submission_cv.notify_one();
30}
31
22void TurboMode::Run(std::stop_token stop_token) { 32void TurboMode::Run(std::stop_token stop_token) {
23 auto& dld = m_device.GetLogical(); 33 auto& dld = m_device.GetLogical();
24 34
@@ -199,6 +209,13 @@ void TurboMode::Run(std::stop_token stop_token) {
199 209
200 // Wait for completion. 210 // Wait for completion.
201 fence.Wait(); 211 fence.Wait();
212
213 // Wait for the next graphics queue submission if necessary.
214 std::unique_lock lk{m_submission_lock};
215 Common::CondvarWait(m_submission_cv, lk, stop_token, [this] {
216 return (std::chrono::steady_clock::now() - m_submission_time) <=
217 std::chrono::milliseconds{100};
218 });
202 } 219 }
203} 220}
204 221
diff --git a/src/video_core/renderer_vulkan/vk_turbo_mode.h b/src/video_core/renderer_vulkan/vk_turbo_mode.h
index 2060e2395..99b5ac50b 100644
--- a/src/video_core/renderer_vulkan/vk_turbo_mode.h
+++ b/src/video_core/renderer_vulkan/vk_turbo_mode.h
@@ -3,6 +3,9 @@
3 3
4#pragma once 4#pragma once
5 5
6#include <chrono>
7#include <mutex>
8
6#include "common/polyfill_thread.h" 9#include "common/polyfill_thread.h"
7#include "video_core/vulkan_common/vulkan_device.h" 10#include "video_core/vulkan_common/vulkan_device.h"
8#include "video_core/vulkan_common/vulkan_memory_allocator.h" 11#include "video_core/vulkan_common/vulkan_memory_allocator.h"
@@ -15,11 +18,17 @@ public:
15 explicit TurboMode(const vk::Instance& instance, const vk::InstanceDispatch& dld); 18 explicit TurboMode(const vk::Instance& instance, const vk::InstanceDispatch& dld);
16 ~TurboMode(); 19 ~TurboMode();
17 20
21 void QueueSubmitted();
22
18private: 23private:
19 void Run(std::stop_token stop_token); 24 void Run(std::stop_token stop_token);
20 25
21 Device m_device; 26 Device m_device;
22 MemoryAllocator m_allocator; 27 MemoryAllocator m_allocator;
28 std::mutex m_submission_lock;
29 std::condition_variable_any m_submission_cv;
30 std::chrono::time_point<std::chrono::steady_clock> m_submission_time{};
31
23 std::jthread m_thread; 32 std::jthread m_thread;
24}; 33};
25 34
diff --git a/src/video_core/vulkan_common/vulkan_device.cpp b/src/video_core/vulkan_common/vulkan_device.cpp
index fd1c5a683..23d922e5d 100644
--- a/src/video_core/vulkan_common/vulkan_device.cpp
+++ b/src/video_core/vulkan_common/vulkan_device.cpp
@@ -74,30 +74,6 @@ enum class NvidiaArchitecture {
74 VoltaOrOlder, 74 VoltaOrOlder,
75}; 75};
76 76
77constexpr std::array REQUIRED_EXTENSIONS{
78 VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME,
79 VK_EXT_ROBUSTNESS_2_EXTENSION_NAME,
80#ifdef _WIN32
81 VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME,
82#endif
83#ifdef __unix__
84 VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME,
85#endif
86};
87
88constexpr std::array REQUIRED_EXTENSIONS_BEFORE_1_2{
89 VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME,
90 VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME,
91 VK_KHR_8BIT_STORAGE_EXTENSION_NAME,
92 VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME,
93 VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME,
94 VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME,
95};
96
97constexpr std::array REQUIRED_EXTENSIONS_BEFORE_1_3{
98 VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME,
99};
100
101template <typename T> 77template <typename T>
102void SetNext(void**& next, T& data) { 78void SetNext(void**& next, T& data) {
103 *next = &data; 79 *next = &data;
@@ -286,24 +262,9 @@ std::unordered_map<VkFormat, VkFormatProperties> GetFormatProperties(vk::Physica
286 return format_properties; 262 return format_properties;
287} 263}
288 264
289std::vector<std::string> GetSupportedExtensions(vk::PhysicalDevice physical) {
290 const std::vector extensions = physical.EnumerateDeviceExtensionProperties();
291 std::vector<std::string> supported_extensions;
292 supported_extensions.reserve(extensions.size());
293 for (const auto& extension : extensions) {
294 supported_extensions.emplace_back(extension.extensionName);
295 }
296 return supported_extensions;
297}
298
299bool IsExtensionSupported(std::span<const std::string> supported_extensions,
300 std::string_view extension) {
301 return std::ranges::find(supported_extensions, extension) != supported_extensions.end();
302}
303
304NvidiaArchitecture GetNvidiaArchitecture(vk::PhysicalDevice physical, 265NvidiaArchitecture GetNvidiaArchitecture(vk::PhysicalDevice physical,
305 std::span<const std::string> exts) { 266 const std::set<std::string, std::less<>>& exts) {
306 if (IsExtensionSupported(exts, VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME)) { 267 if (exts.contains(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME)) {
307 VkPhysicalDeviceFragmentShadingRatePropertiesKHR shading_rate_props{}; 268 VkPhysicalDeviceFragmentShadingRatePropertiesKHR shading_rate_props{};
308 shading_rate_props.sType = 269 shading_rate_props.sType =
309 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR; 270 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR;
@@ -316,423 +277,55 @@ NvidiaArchitecture GetNvidiaArchitecture(vk::PhysicalDevice physical,
316 return NvidiaArchitecture::AmpereOrNewer; 277 return NvidiaArchitecture::AmpereOrNewer;
317 } 278 }
318 } 279 }
319 if (IsExtensionSupported(exts, VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME)) { 280 if (exts.contains(VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME)) {
320 return NvidiaArchitecture::Turing; 281 return NvidiaArchitecture::Turing;
321 } 282 }
322 return NvidiaArchitecture::VoltaOrOlder; 283 return NvidiaArchitecture::VoltaOrOlder;
323} 284}
285
286std::vector<const char*> ExtensionListForVulkan(
287 const std::set<std::string, std::less<>>& extensions) {
288 std::vector<const char*> output;
289 for (const auto& extension : extensions) {
290 output.push_back(extension.c_str());
291 }
292 return output;
293}
294
324} // Anonymous namespace 295} // Anonymous namespace
325 296
326Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR surface, 297Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR surface,
327 const vk::InstanceDispatch& dld_) 298 const vk::InstanceDispatch& dld_)
328 : instance{instance_}, dld{dld_}, physical{physical_}, properties{physical.GetProperties()}, 299 : instance{instance_}, dld{dld_}, physical{physical_},
329 instance_version{properties.apiVersion}, supported_extensions{GetSupportedExtensions(
330 physical)},
331 format_properties(GetFormatProperties(physical)) { 300 format_properties(GetFormatProperties(physical)) {
332 CheckSuitability(surface != nullptr); 301 // Get suitability and device properties.
333 SetupFamilies(surface); 302 const bool is_suitable = GetSuitability(surface != nullptr);
334 SetupFeatures();
335 SetupProperties();
336
337 const auto queue_cis = GetDeviceQueueCreateInfos();
338 const std::vector extensions = LoadExtensions(surface != nullptr);
339
340 VkPhysicalDeviceFeatures2 features2{
341 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
342 .pNext = nullptr,
343 .features{
344 .robustBufferAccess = true,
345 .fullDrawIndexUint32 = false,
346 .imageCubeArray = true,
347 .independentBlend = true,
348 .geometryShader = true,
349 .tessellationShader = true,
350 .sampleRateShading = true,
351 .dualSrcBlend = true,
352 .logicOp = true,
353 .multiDrawIndirect = true,
354 .drawIndirectFirstInstance = true,
355 .depthClamp = true,
356 .depthBiasClamp = true,
357 .fillModeNonSolid = true,
358 .depthBounds = is_depth_bounds_supported,
359 .wideLines = true,
360 .largePoints = true,
361 .alphaToOne = false,
362 .multiViewport = true,
363 .samplerAnisotropy = true,
364 .textureCompressionETC2 = false,
365 .textureCompressionASTC_LDR = is_optimal_astc_supported,
366 .textureCompressionBC = false,
367 .occlusionQueryPrecise = true,
368 .pipelineStatisticsQuery = false,
369 .vertexPipelineStoresAndAtomics = true,
370 .fragmentStoresAndAtomics = true,
371 .shaderTessellationAndGeometryPointSize = false,
372 .shaderImageGatherExtended = true,
373 .shaderStorageImageExtendedFormats = false,
374 .shaderStorageImageMultisample = is_shader_storage_image_multisample,
375 .shaderStorageImageReadWithoutFormat = is_formatless_image_load_supported,
376 .shaderStorageImageWriteWithoutFormat = true,
377 .shaderUniformBufferArrayDynamicIndexing = false,
378 .shaderSampledImageArrayDynamicIndexing = false,
379 .shaderStorageBufferArrayDynamicIndexing = false,
380 .shaderStorageImageArrayDynamicIndexing = false,
381 .shaderClipDistance = true,
382 .shaderCullDistance = true,
383 .shaderFloat64 = is_shader_float64_supported,
384 .shaderInt64 = is_shader_int64_supported,
385 .shaderInt16 = is_shader_int16_supported,
386 .shaderResourceResidency = false,
387 .shaderResourceMinLod = false,
388 .sparseBinding = false,
389 .sparseResidencyBuffer = false,
390 .sparseResidencyImage2D = false,
391 .sparseResidencyImage3D = false,
392 .sparseResidency2Samples = false,
393 .sparseResidency4Samples = false,
394 .sparseResidency8Samples = false,
395 .sparseResidency16Samples = false,
396 .sparseResidencyAliased = false,
397 .variableMultisampleRate = false,
398 .inheritedQueries = false,
399 },
400 };
401 const void* first_next = &features2;
402 void** next = &features2.pNext;
403
404 VkPhysicalDeviceTimelineSemaphoreFeatures timeline_semaphore{
405 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES,
406 .pNext = nullptr,
407 .timelineSemaphore = true,
408 };
409 SetNext(next, timeline_semaphore);
410
411 VkPhysicalDevice16BitStorageFeatures bit16_storage{
412 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
413 .pNext = nullptr,
414 .storageBuffer16BitAccess = true,
415 .uniformAndStorageBuffer16BitAccess = true,
416 .storagePushConstant16 = false,
417 .storageInputOutput16 = false,
418 };
419 SetNext(next, bit16_storage);
420
421 VkPhysicalDevice8BitStorageFeatures bit8_storage{
422 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES,
423 .pNext = nullptr,
424 .storageBuffer8BitAccess = true,
425 .uniformAndStorageBuffer8BitAccess = true,
426 .storagePushConstant8 = false,
427 };
428 SetNext(next, bit8_storage);
429
430 VkPhysicalDeviceRobustness2FeaturesEXT robustness2{
431 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT,
432 .pNext = nullptr,
433 .robustBufferAccess2 = true,
434 .robustImageAccess2 = true,
435 .nullDescriptor = true,
436 };
437 SetNext(next, robustness2);
438
439 VkPhysicalDeviceHostQueryResetFeatures host_query_reset{
440 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES,
441 .pNext = nullptr,
442 .hostQueryReset = true,
443 };
444 SetNext(next, host_query_reset);
445
446 VkPhysicalDeviceVariablePointerFeatures variable_pointers{
447 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
448 .pNext = nullptr,
449 .variablePointersStorageBuffer = VK_TRUE,
450 .variablePointers = VK_TRUE,
451 };
452 SetNext(next, variable_pointers);
453
454 VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures demote{
455 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES,
456 .pNext = nullptr,
457 .shaderDemoteToHelperInvocation = true,
458 };
459 SetNext(next, demote);
460
461 VkPhysicalDeviceShaderDrawParametersFeatures draw_parameters{
462 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES,
463 .pNext = nullptr,
464 .shaderDrawParameters = true,
465 };
466 SetNext(next, draw_parameters);
467
468 VkPhysicalDeviceShaderFloat16Int8Features float16_int8;
469 if (is_int8_supported || is_float16_supported) {
470 float16_int8 = {
471 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES,
472 .pNext = nullptr,
473 .shaderFloat16 = is_float16_supported,
474 .shaderInt8 = is_int8_supported,
475 };
476 SetNext(next, float16_int8);
477 }
478 if (!is_float16_supported) {
479 LOG_INFO(Render_Vulkan, "Device doesn't support float16 natively");
480 }
481 if (!is_int8_supported) {
482 LOG_INFO(Render_Vulkan, "Device doesn't support int8 natively");
483 }
484
485 if (!nv_viewport_swizzle) {
486 LOG_INFO(Render_Vulkan, "Device doesn't support viewport swizzles");
487 }
488
489 if (!nv_viewport_array2) {
490 LOG_INFO(Render_Vulkan, "Device doesn't support viewport masks");
491 }
492
493 if (!nv_geometry_shader_passthrough) {
494 LOG_INFO(Render_Vulkan, "Device doesn't support passthrough geometry shaders");
495 }
496 303
497 VkPhysicalDeviceUniformBufferStandardLayoutFeatures std430_layout; 304 const VkDriverId driver_id = properties.driver.driverID;
498 if (khr_uniform_buffer_standard_layout) { 305 const bool is_radv = driver_id == VK_DRIVER_ID_MESA_RADV;
499 std430_layout = { 306 const bool is_amd_driver =
500 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, 307 driver_id == VK_DRIVER_ID_AMD_PROPRIETARY || driver_id == VK_DRIVER_ID_AMD_OPEN_SOURCE;
501 .pNext = nullptr, 308 const bool is_amd = is_amd_driver || is_radv;
502 .uniformBufferStandardLayout = true, 309 const bool is_intel_windows = driver_id == VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS;
503 }; 310 const bool is_intel_anv = driver_id == VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA;
504 SetNext(next, std430_layout); 311 const bool is_nvidia = driver_id == VK_DRIVER_ID_NVIDIA_PROPRIETARY;
505 } else { 312 const bool is_mvk = driver_id == VK_DRIVER_ID_MOLTENVK;
506 LOG_INFO(Render_Vulkan, "Device doesn't support packed UBOs");
507 }
508
509 VkPhysicalDeviceIndexTypeUint8FeaturesEXT index_type_uint8;
510 if (ext_index_type_uint8) {
511 index_type_uint8 = {
512 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT,
513 .pNext = nullptr,
514 .indexTypeUint8 = true,
515 };
516 SetNext(next, index_type_uint8);
517 } else {
518 LOG_INFO(Render_Vulkan, "Device doesn't support uint8 indexes");
519 }
520
521 VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT primitive_topology_list_restart;
522 if (is_topology_list_restart_supported || is_patch_list_restart_supported) {
523 primitive_topology_list_restart = {
524 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT,
525 .pNext = nullptr,
526 .primitiveTopologyListRestart = is_topology_list_restart_supported,
527 .primitiveTopologyPatchListRestart = is_patch_list_restart_supported,
528 };
529 SetNext(next, primitive_topology_list_restart);
530 } else {
531 LOG_INFO(Render_Vulkan, "Device doesn't support list topology primitive restart");
532 }
533
534 VkPhysicalDeviceTransformFeedbackFeaturesEXT transform_feedback;
535 if (ext_transform_feedback) {
536 transform_feedback = {
537 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT,
538 .pNext = nullptr,
539 .transformFeedback = true,
540 .geometryStreams = true,
541 };
542 SetNext(next, transform_feedback);
543 } else {
544 LOG_INFO(Render_Vulkan, "Device doesn't support transform feedbacks");
545 }
546
547 VkPhysicalDeviceCustomBorderColorFeaturesEXT custom_border;
548 if (ext_custom_border_color) {
549 custom_border = {
550 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT,
551 .pNext = nullptr,
552 .customBorderColors = VK_TRUE,
553 .customBorderColorWithoutFormat = VK_TRUE,
554 };
555 SetNext(next, custom_border);
556 } else {
557 LOG_INFO(Render_Vulkan, "Device doesn't support custom border colors");
558 }
559
560 VkPhysicalDeviceExtendedDynamicStateFeaturesEXT dynamic_state;
561 if (ext_extended_dynamic_state) {
562 dynamic_state = {
563 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT,
564 .pNext = nullptr,
565 .extendedDynamicState = VK_TRUE,
566 };
567 SetNext(next, dynamic_state);
568 } else {
569 LOG_INFO(Render_Vulkan, "Device doesn't support extended dynamic state");
570 }
571
572 VkPhysicalDeviceExtendedDynamicState2FeaturesEXT dynamic_state_2;
573 if (ext_extended_dynamic_state_2) {
574 dynamic_state_2 = {
575 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT,
576 .pNext = nullptr,
577 .extendedDynamicState2 = VK_TRUE,
578 .extendedDynamicState2LogicOp = ext_extended_dynamic_state_2_extra ? VK_TRUE : VK_FALSE,
579 .extendedDynamicState2PatchControlPoints = VK_FALSE,
580 };
581 SetNext(next, dynamic_state_2);
582 } else {
583 LOG_INFO(Render_Vulkan, "Device doesn't support extended dynamic state 2");
584 }
585
586 VkPhysicalDeviceExtendedDynamicState3FeaturesEXT dynamic_state_3;
587 if (ext_extended_dynamic_state_3) {
588 dynamic_state_3 = {
589 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT,
590 .pNext = nullptr,
591 .extendedDynamicState3TessellationDomainOrigin = VK_FALSE,
592 .extendedDynamicState3DepthClampEnable =
593 ext_extended_dynamic_state_3_enables ? VK_TRUE : VK_FALSE,
594 .extendedDynamicState3PolygonMode = VK_FALSE,
595 .extendedDynamicState3RasterizationSamples = VK_FALSE,
596 .extendedDynamicState3SampleMask = VK_FALSE,
597 .extendedDynamicState3AlphaToCoverageEnable = VK_FALSE,
598 .extendedDynamicState3AlphaToOneEnable = VK_FALSE,
599 .extendedDynamicState3LogicOpEnable =
600 ext_extended_dynamic_state_3_enables ? VK_TRUE : VK_FALSE,
601 .extendedDynamicState3ColorBlendEnable =
602 ext_extended_dynamic_state_3_blend ? VK_TRUE : VK_FALSE,
603 .extendedDynamicState3ColorBlendEquation =
604 ext_extended_dynamic_state_3_blend ? VK_TRUE : VK_FALSE,
605 .extendedDynamicState3ColorWriteMask =
606 ext_extended_dynamic_state_3_blend ? VK_TRUE : VK_FALSE,
607 .extendedDynamicState3RasterizationStream = VK_FALSE,
608 .extendedDynamicState3ConservativeRasterizationMode = VK_FALSE,
609 .extendedDynamicState3ExtraPrimitiveOverestimationSize = VK_FALSE,
610 .extendedDynamicState3DepthClipEnable = VK_FALSE,
611 .extendedDynamicState3SampleLocationsEnable = VK_FALSE,
612 .extendedDynamicState3ColorBlendAdvanced = VK_FALSE,
613 .extendedDynamicState3ProvokingVertexMode = VK_FALSE,
614 .extendedDynamicState3LineRasterizationMode = VK_FALSE,
615 .extendedDynamicState3LineStippleEnable = VK_FALSE,
616 .extendedDynamicState3DepthClipNegativeOneToOne = VK_FALSE,
617 .extendedDynamicState3ViewportWScalingEnable = VK_FALSE,
618 .extendedDynamicState3ViewportSwizzle = VK_FALSE,
619 .extendedDynamicState3CoverageToColorEnable = VK_FALSE,
620 .extendedDynamicState3CoverageToColorLocation = VK_FALSE,
621 .extendedDynamicState3CoverageModulationMode = VK_FALSE,
622 .extendedDynamicState3CoverageModulationTableEnable = VK_FALSE,
623 .extendedDynamicState3CoverageModulationTable = VK_FALSE,
624 .extendedDynamicState3CoverageReductionMode = VK_FALSE,
625 .extendedDynamicState3RepresentativeFragmentTestEnable = VK_FALSE,
626 .extendedDynamicState3ShadingRateImageEnable = VK_FALSE,
627 };
628 SetNext(next, dynamic_state_3);
629 } else {
630 LOG_INFO(Render_Vulkan, "Device doesn't support extended dynamic state 3");
631 }
632
633 VkPhysicalDeviceLineRasterizationFeaturesEXT line_raster;
634 if (ext_line_rasterization) {
635 line_raster = {
636 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT,
637 .pNext = nullptr,
638 .rectangularLines = VK_TRUE,
639 .bresenhamLines = VK_FALSE,
640 .smoothLines = VK_TRUE,
641 .stippledRectangularLines = VK_FALSE,
642 .stippledBresenhamLines = VK_FALSE,
643 .stippledSmoothLines = VK_FALSE,
644 };
645 SetNext(next, line_raster);
646 } else {
647 LOG_INFO(Render_Vulkan, "Device doesn't support smooth lines");
648 }
649
650 if (!ext_conservative_rasterization) {
651 LOG_INFO(Render_Vulkan, "Device doesn't support conservative rasterization");
652 }
653
654 VkPhysicalDeviceProvokingVertexFeaturesEXT provoking_vertex;
655 if (ext_provoking_vertex) {
656 provoking_vertex = {
657 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT,
658 .pNext = nullptr,
659 .provokingVertexLast = VK_TRUE,
660 .transformFeedbackPreservesProvokingVertex = VK_TRUE,
661 };
662 SetNext(next, provoking_vertex);
663 } else {
664 LOG_INFO(Render_Vulkan, "Device doesn't support provoking vertex last");
665 }
666
667 VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT vertex_input_dynamic;
668 if (ext_vertex_input_dynamic_state) {
669 vertex_input_dynamic = {
670 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT,
671 .pNext = nullptr,
672 .vertexInputDynamicState = VK_TRUE,
673 };
674 SetNext(next, vertex_input_dynamic);
675 } else {
676 LOG_INFO(Render_Vulkan, "Device doesn't support vertex input dynamic state");
677 }
678
679 VkPhysicalDeviceShaderAtomicInt64Features atomic_int64;
680 if (ext_shader_atomic_int64) {
681 atomic_int64 = {
682 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES,
683 .pNext = nullptr,
684 .shaderBufferInt64Atomics = VK_TRUE,
685 .shaderSharedInt64Atomics = VK_TRUE,
686 };
687 SetNext(next, atomic_int64);
688 }
689 313
690 VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR workgroup_layout; 314 if (is_mvk && !is_suitable) {
691 if (khr_workgroup_memory_explicit_layout && is_shader_int16_supported) { 315 LOG_WARNING(Render_Vulkan, "Unsuitable driver is MoltenVK, continuing anyway");
692 workgroup_layout = { 316 } else if (!is_suitable) {
693 .sType = 317 throw vk::Exception(VK_ERROR_INCOMPATIBLE_DRIVER);
694 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR,
695 .pNext = nullptr,
696 .workgroupMemoryExplicitLayout = VK_TRUE,
697 .workgroupMemoryExplicitLayoutScalarBlockLayout = VK_TRUE,
698 .workgroupMemoryExplicitLayout8BitAccess = VK_TRUE,
699 .workgroupMemoryExplicitLayout16BitAccess = VK_TRUE,
700 };
701 SetNext(next, workgroup_layout);
702 } else if (khr_workgroup_memory_explicit_layout) {
703 // TODO(lat9nq): Find a proper fix for this
704 LOG_WARNING(Render_Vulkan, "Disabling VK_KHR_workgroup_memory_explicit_layout due to a "
705 "yuzu bug when host driver does not support 16-bit integers");
706 khr_workgroup_memory_explicit_layout = false;
707 } 318 }
708 319
709 VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR executable_properties; 320 SetupFamilies(surface);
710 if (khr_pipeline_executable_properties) { 321 const auto queue_cis = GetDeviceQueueCreateInfos();
711 LOG_INFO(Render_Vulkan, "Enabling shader feedback, expect slower shader build times");
712 executable_properties = {
713 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR,
714 .pNext = nullptr,
715 .pipelineExecutableInfo = VK_TRUE,
716 };
717 SetNext(next, executable_properties);
718 }
719
720 if (!ext_depth_range_unrestricted) {
721 LOG_INFO(Render_Vulkan, "Device doesn't support depth range unrestricted");
722 }
723 322
724 VkPhysicalDeviceDepthClipControlFeaturesEXT depth_clip_control_features; 323 // GetSuitability has already configured the linked list of features for us.
725 if (ext_depth_clip_control) { 324 // Reuse it here.
726 depth_clip_control_features = { 325 const void* first_next = &features2;
727 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT,
728 .pNext = nullptr,
729 .depthClipControl = VK_TRUE,
730 };
731 SetNext(next, depth_clip_control_features);
732 }
733 326
734 VkDeviceDiagnosticsConfigCreateInfoNV diagnostics_nv; 327 VkDeviceDiagnosticsConfigCreateInfoNV diagnostics_nv{};
735 if (Settings::values.enable_nsight_aftermath && nv_device_diagnostics_config) { 328 if (Settings::values.enable_nsight_aftermath && extensions.device_diagnostics_config) {
736 nsight_aftermath_tracker = std::make_unique<NsightAftermathTracker>(); 329 nsight_aftermath_tracker = std::make_unique<NsightAftermathTracker>();
737 330
738 diagnostics_nv = { 331 diagnostics_nv = {
@@ -744,33 +337,39 @@ Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR
744 }; 337 };
745 first_next = &diagnostics_nv; 338 first_next = &diagnostics_nv;
746 } 339 }
747 logical = vk::Device::Create(physical, queue_cis, extensions, first_next, dld);
748 340
749 is_integrated = properties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU; 341 is_blit_depth_stencil_supported = TestDepthStencilBlits();
750 is_virtual = properties.deviceType == VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU; 342 is_optimal_astc_supported = ComputeIsOptimalAstcSupported();
751 is_non_gpu = properties.deviceType == VK_PHYSICAL_DEVICE_TYPE_OTHER || 343 is_warp_potentially_bigger = !extensions.subgroup_size_control ||
752 properties.deviceType == VK_PHYSICAL_DEVICE_TYPE_CPU; 344 properties.subgroup_size_control.maxSubgroupSize > GuestWarpSize;
345
346 is_integrated = properties.properties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
347 is_virtual = properties.properties.deviceType == VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU;
348 is_non_gpu = properties.properties.deviceType == VK_PHYSICAL_DEVICE_TYPE_OTHER ||
349 properties.properties.deviceType == VK_PHYSICAL_DEVICE_TYPE_CPU;
350
351 supports_d24_depth =
352 IsFormatSupported(VK_FORMAT_D24_UNORM_S8_UINT,
353 VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, FormatType::Optimal);
753 354
754 CollectPhysicalMemoryInfo(); 355 CollectPhysicalMemoryInfo();
755 CollectTelemetryParameters();
756 CollectToolingInfo(); 356 CollectToolingInfo();
757 357
758 if (driver_id == VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR) { 358 if (is_nvidia) {
759 const u32 nv_major_version = (properties.driverVersion >> 22) & 0x3ff; 359 const u32 nv_major_version = (properties.properties.driverVersion >> 22) & 0x3ff;
760
761 const auto arch = GetNvidiaArchitecture(physical, supported_extensions); 360 const auto arch = GetNvidiaArchitecture(physical, supported_extensions);
762 switch (arch) { 361 switch (arch) {
763 case NvidiaArchitecture::AmpereOrNewer: 362 case NvidiaArchitecture::AmpereOrNewer:
764 LOG_WARNING(Render_Vulkan, "Blacklisting Ampere devices from float16 math"); 363 LOG_WARNING(Render_Vulkan, "Ampere and newer have broken float16 math");
765 is_float16_supported = false; 364 features.shader_float16_int8.shaderFloat16 = false;
766 break; 365 break;
767 case NvidiaArchitecture::Turing: 366 case NvidiaArchitecture::Turing:
768 break; 367 break;
769 case NvidiaArchitecture::VoltaOrOlder: 368 case NvidiaArchitecture::VoltaOrOlder:
770 if (nv_major_version < 527) { 369 if (nv_major_version < 527) {
771 LOG_WARNING(Render_Vulkan, 370 LOG_WARNING(Render_Vulkan, "Volta and older have broken VK_KHR_push_descriptor");
772 "Blacklisting Volta and older from VK_KHR_push_descriptor"); 371 extensions.push_descriptor = false;
773 khr_push_descriptor = false; 372 loaded_extensions.erase(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
774 } 373 }
775 break; 374 break;
776 } 375 }
@@ -779,75 +378,75 @@ Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR
779 cant_blit_msaa = true; 378 cant_blit_msaa = true;
780 } 379 }
781 } 380 }
782 const bool is_radv = driver_id == VK_DRIVER_ID_MESA_RADV; 381 if (extensions.extended_dynamic_state && is_radv) {
783 if (ext_extended_dynamic_state && is_radv) {
784 // Mask driver version variant 382 // Mask driver version variant
785 const u32 version = (properties.driverVersion << 3) >> 3; 383 const u32 version = (properties.properties.driverVersion << 3) >> 3;
786 if (version < VK_MAKE_API_VERSION(0, 21, 2, 0)) { 384 if (version < VK_MAKE_API_VERSION(0, 21, 2, 0)) {
787 LOG_WARNING(Render_Vulkan, 385 LOG_WARNING(Render_Vulkan,
788 "RADV versions older than 21.2 have broken VK_EXT_extended_dynamic_state"); 386 "RADV versions older than 21.2 have broken VK_EXT_extended_dynamic_state");
789 ext_extended_dynamic_state = false; 387 extensions.extended_dynamic_state = false;
388 loaded_extensions.erase(VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME);
790 } 389 }
791 } 390 }
792 if (ext_vertex_input_dynamic_state && is_radv) { 391 if (extensions.extended_dynamic_state2 && is_radv) {
392 const u32 version = (properties.properties.driverVersion << 3) >> 3;
393 if (version < VK_MAKE_API_VERSION(0, 22, 3, 1)) {
394 LOG_WARNING(
395 Render_Vulkan,
396 "RADV versions older than 22.3.1 have broken VK_EXT_extended_dynamic_state2");
397 features.extended_dynamic_state2.extendedDynamicState2 = false;
398 features.extended_dynamic_state2.extendedDynamicState2LogicOp = false;
399 features.extended_dynamic_state2.extendedDynamicState2PatchControlPoints = false;
400 extensions.extended_dynamic_state2 = false;
401 loaded_extensions.erase(VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME);
402 }
403 }
404 if (extensions.vertex_input_dynamic_state && is_radv) {
793 // TODO(ameerj): Blacklist only offending driver versions 405 // TODO(ameerj): Blacklist only offending driver versions
794 // TODO(ameerj): Confirm if RDNA1 is affected 406 // TODO(ameerj): Confirm if RDNA1 is affected
795 const bool is_rdna2 = 407 const bool is_rdna2 =
796 IsExtensionSupported(supported_extensions, VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME); 408 supported_extensions.contains(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME);
797 if (is_rdna2) { 409 if (is_rdna2) {
798 LOG_WARNING(Render_Vulkan, 410 LOG_WARNING(Render_Vulkan,
799 "RADV has broken VK_EXT_vertex_input_dynamic_state on RDNA2 hardware"); 411 "RADV has broken VK_EXT_vertex_input_dynamic_state on RDNA2 hardware");
800 ext_vertex_input_dynamic_state = false; 412 extensions.vertex_input_dynamic_state = false;
413 loaded_extensions.erase(VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME);
801 } 414 }
802 } 415 }
803 if (ext_extended_dynamic_state_2 && is_radv) {
804 const u32 version = (properties.driverVersion << 3) >> 3;
805 if (version < VK_MAKE_API_VERSION(0, 22, 3, 1)) {
806 LOG_WARNING(
807 Render_Vulkan,
808 "RADV versions older than 22.3.1 have broken VK_EXT_extended_dynamic_state2");
809 ext_extended_dynamic_state_2 = false;
810 ext_extended_dynamic_state_2_extra = false;
811 }
812 }
813 sets_per_pool = 64;
814 416
815 const bool is_amd = 417 sets_per_pool = 64;
816 driver_id == VK_DRIVER_ID_AMD_PROPRIETARY || driver_id == VK_DRIVER_ID_AMD_OPEN_SOURCE; 418 if (is_amd_driver) {
817 if (is_amd) {
818 // AMD drivers need a higher amount of Sets per Pool in certain circunstances like in XC2. 419 // AMD drivers need a higher amount of Sets per Pool in certain circunstances like in XC2.
819 sets_per_pool = 96; 420 sets_per_pool = 96;
820 // Disable VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT on AMD GCN4 and lower as it is broken. 421 // Disable VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT on AMD GCN4 and lower as it is broken.
821 if (!is_float16_supported) { 422 if (!features.shader_float16_int8.shaderFloat16) {
822 LOG_WARNING( 423 LOG_WARNING(Render_Vulkan,
823 Render_Vulkan, 424 "AMD GCN4 and earlier have broken VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT");
824 "AMD GCN4 and earlier do not properly support VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT");
825 has_broken_cube_compatibility = true; 425 has_broken_cube_compatibility = true;
826 } 426 }
827 } 427 }
828 const bool is_amd_or_radv = is_amd || is_radv; 428 if (extensions.sampler_filter_minmax && is_amd) {
829 if (ext_sampler_filter_minmax && is_amd_or_radv) {
830 // Disable ext_sampler_filter_minmax on AMD GCN4 and lower as it is broken. 429 // Disable ext_sampler_filter_minmax on AMD GCN4 and lower as it is broken.
831 if (!is_float16_supported) { 430 if (!features.shader_float16_int8.shaderFloat16) {
832 LOG_WARNING(Render_Vulkan, 431 LOG_WARNING(Render_Vulkan,
833 "Blacklisting AMD GCN4 and earlier for VK_EXT_sampler_filter_minmax"); 432 "AMD GCN4 and earlier have broken VK_EXT_sampler_filter_minmax");
834 ext_sampler_filter_minmax = false; 433 extensions.sampler_filter_minmax = false;
434 loaded_extensions.erase(VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME);
835 } 435 }
836 } 436 }
837 437
838 const bool is_intel_windows = driver_id == VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS; 438 if (extensions.vertex_input_dynamic_state && is_intel_windows) {
839 const bool is_intel_anv = driver_id == VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA; 439 const u32 version = (properties.properties.driverVersion << 3) >> 3;
840 if (ext_vertex_input_dynamic_state && is_intel_windows) {
841 const u32 version = (properties.driverVersion << 3) >> 3;
842 if (version < VK_MAKE_API_VERSION(27, 20, 100, 0)) { 440 if (version < VK_MAKE_API_VERSION(27, 20, 100, 0)) {
843 LOG_WARNING(Render_Vulkan, "Blacklisting Intel for VK_EXT_vertex_input_dynamic_state"); 441 LOG_WARNING(Render_Vulkan, "Intel has broken VK_EXT_vertex_input_dynamic_state");
844 ext_vertex_input_dynamic_state = false; 442 extensions.vertex_input_dynamic_state = false;
443 loaded_extensions.erase(VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME);
845 } 444 }
846 } 445 }
847 if (is_float16_supported && is_intel_windows) { 446 if (features.shader_float16_int8.shaderFloat16 && is_intel_windows) {
848 // Intel's compiler crashes when using fp16 on Astral Chain, disable it for the time being. 447 // Intel's compiler crashes when using fp16 on Astral Chain, disable it for the time being.
849 LOG_WARNING(Render_Vulkan, "Blacklisting Intel proprietary from float16 math"); 448 LOG_WARNING(Render_Vulkan, "Intel has broken float16 math");
850 is_float16_supported = false; 449 features.shader_float16_int8.shaderFloat16 = false;
851 } 450 }
852 if (is_intel_windows) { 451 if (is_intel_windows) {
853 LOG_WARNING(Render_Vulkan, "Intel proprietary drivers do not support MSAA image blits"); 452 LOG_WARNING(Render_Vulkan, "Intel proprietary drivers do not support MSAA image blits");
@@ -857,10 +456,17 @@ Device::Device(VkInstance instance_, vk::PhysicalDevice physical_, VkSurfaceKHR
857 LOG_WARNING(Render_Vulkan, "ANV driver does not support native BGR format"); 456 LOG_WARNING(Render_Vulkan, "ANV driver does not support native BGR format");
858 must_emulate_bgr565 = true; 457 must_emulate_bgr565 = true;
859 } 458 }
459 if (is_mvk) {
460 LOG_WARNING(Render_Vulkan,
461 "MVK driver breaks when using more than 16 vertex attributes/bindings");
462 properties.properties.limits.maxVertexInputAttributes =
463 std::min(properties.properties.limits.maxVertexInputAttributes, 16U);
464 properties.properties.limits.maxVertexInputBindings =
465 std::min(properties.properties.limits.maxVertexInputBindings, 16U);
466 }
860 467
861 supports_d24_depth = 468 logical = vk::Device::Create(physical, queue_cis, ExtensionListForVulkan(loaded_extensions),
862 IsFormatSupported(VK_FORMAT_D24_UNORM_S8_UINT, 469 first_next, dld);
863 VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, FormatType::Optimal);
864 470
865 graphics_queue = logical.GetQueue(graphics_family); 471 graphics_queue = logical.GetQueue(graphics_family);
866 present_queue = logical.GetQueue(present_family); 472 present_queue = logical.GetQueue(present_family);
@@ -915,7 +521,7 @@ void Device::SaveShader(std::span<const u32> spirv) const {
915 } 521 }
916} 522}
917 523
918bool Device::IsOptimalAstcSupported(const VkPhysicalDeviceFeatures& features) const { 524bool Device::ComputeIsOptimalAstcSupported() const {
919 // Disable for now to avoid converting ASTC twice. 525 // Disable for now to avoid converting ASTC twice.
920 static constexpr std::array astc_formats = { 526 static constexpr std::array astc_formats = {
921 VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_4x4_SRGB_BLOCK, 527 VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
@@ -933,7 +539,7 @@ bool Device::IsOptimalAstcSupported(const VkPhysicalDeviceFeatures& features) co
933 VK_FORMAT_ASTC_12x10_UNORM_BLOCK, VK_FORMAT_ASTC_12x10_SRGB_BLOCK, 539 VK_FORMAT_ASTC_12x10_UNORM_BLOCK, VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
934 VK_FORMAT_ASTC_12x12_UNORM_BLOCK, VK_FORMAT_ASTC_12x12_SRGB_BLOCK, 540 VK_FORMAT_ASTC_12x12_UNORM_BLOCK, VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
935 }; 541 };
936 if (!features.textureCompressionASTC_LDR) { 542 if (!features.features.textureCompressionASTC_LDR) {
937 return false; 543 return false;
938 } 544 }
939 const auto format_feature_usage{ 545 const auto format_feature_usage{
@@ -971,7 +577,7 @@ bool Device::IsFormatSupported(VkFormat wanted_format, VkFormatFeatureFlags want
971} 577}
972 578
973std::string Device::GetDriverName() const { 579std::string Device::GetDriverName() const {
974 switch (driver_id) { 580 switch (properties.driver.driverID) {
975 case VK_DRIVER_ID_AMD_PROPRIETARY: 581 case VK_DRIVER_ID_AMD_PROPRIETARY:
976 return "AMD"; 582 return "AMD";
977 case VK_DRIVER_ID_AMD_OPEN_SOURCE: 583 case VK_DRIVER_ID_AMD_OPEN_SOURCE:
@@ -987,522 +593,336 @@ std::string Device::GetDriverName() const {
987 case VK_DRIVER_ID_MESA_LLVMPIPE: 593 case VK_DRIVER_ID_MESA_LLVMPIPE:
988 return "LAVAPIPE"; 594 return "LAVAPIPE";
989 default: 595 default:
990 return vendor_name; 596 return properties.driver.driverName;
991 } 597 }
992} 598}
993 599
994bool Device::ShouldBoostClocks() const { 600bool Device::ShouldBoostClocks() const {
601 const auto driver_id = properties.driver.driverID;
602 const auto vendor_id = properties.properties.vendorID;
603 const auto device_id = properties.properties.deviceID;
604
995 const bool validated_driver = 605 const bool validated_driver =
996 driver_id == VK_DRIVER_ID_AMD_PROPRIETARY || driver_id == VK_DRIVER_ID_AMD_OPEN_SOURCE || 606 driver_id == VK_DRIVER_ID_AMD_PROPRIETARY || driver_id == VK_DRIVER_ID_AMD_OPEN_SOURCE ||
997 driver_id == VK_DRIVER_ID_MESA_RADV || driver_id == VK_DRIVER_ID_NVIDIA_PROPRIETARY || 607 driver_id == VK_DRIVER_ID_MESA_RADV || driver_id == VK_DRIVER_ID_NVIDIA_PROPRIETARY ||
998 driver_id == VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS || 608 driver_id == VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS ||
999 driver_id == VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA; 609 driver_id == VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA;
1000 610
1001 const bool is_steam_deck = properties.vendorID == 0x1002 && properties.deviceID == 0x163F; 611 const bool is_steam_deck = vendor_id == 0x1002 && device_id == 0x163F;
1002 612
1003 return validated_driver && !is_steam_deck; 613 return validated_driver && !is_steam_deck;
1004} 614}
1005 615
1006static std::vector<const char*> ExtensionsRequiredForInstanceVersion(u32 available_version) { 616bool Device::GetSuitability(bool requires_swapchain) {
1007 std::vector<const char*> extensions{REQUIRED_EXTENSIONS.begin(), REQUIRED_EXTENSIONS.end()}; 617 // Assume we will be suitable.
618 bool suitable = true;
1008 619
1009 if (available_version < VK_API_VERSION_1_2) { 620 // Configure properties.
1010 extensions.insert(extensions.end(), REQUIRED_EXTENSIONS_BEFORE_1_2.begin(), 621 properties.properties = physical.GetProperties();
1011 REQUIRED_EXTENSIONS_BEFORE_1_2.end());
1012 }
1013
1014 if (available_version < VK_API_VERSION_1_3) {
1015 extensions.insert(extensions.end(), REQUIRED_EXTENSIONS_BEFORE_1_3.begin(),
1016 REQUIRED_EXTENSIONS_BEFORE_1_3.end());
1017 }
1018
1019 return extensions;
1020}
1021 622
1022void Device::CheckSuitability(bool requires_swapchain) const { 623 // Set instance version.
1023 std::vector<const char*> required_extensions = 624 instance_version = properties.properties.apiVersion;
1024 ExtensionsRequiredForInstanceVersion(instance_version);
1025 std::vector<const char*> available_extensions;
1026 625
1027 if (requires_swapchain) { 626 // Minimum of API version 1.1 is required. (This is well-supported.)
1028 required_extensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME); 627 ASSERT(instance_version >= VK_API_VERSION_1_1);
1029 }
1030 628
629 // Get available extensions.
1031 auto extension_properties = physical.EnumerateDeviceExtensionProperties(); 630 auto extension_properties = physical.EnumerateDeviceExtensionProperties();
1032 631
632 // Get the set of supported extensions.
633 supported_extensions.clear();
1033 for (const VkExtensionProperties& property : extension_properties) { 634 for (const VkExtensionProperties& property : extension_properties) {
1034 available_extensions.push_back(property.extensionName); 635 supported_extensions.insert(property.extensionName);
1035 } 636 }
1036 637
1037 bool has_all_required_extensions = true; 638 // Generate list of extensions to load.
1038 for (const char* requirement_name : required_extensions) { 639 loaded_extensions.clear();
1039 const bool found =
1040 std::ranges::any_of(available_extensions, [&](const char* extension_name) {
1041 return std::strcmp(requirement_name, extension_name) == 0;
1042 });
1043 640
1044 if (!found) { 641#define EXTENSION(prefix, macro_name, var_name) \
1045 LOG_ERROR(Render_Vulkan, "Missing required extension: {}", requirement_name); 642 if (supported_extensions.contains(VK_##prefix##_##macro_name##_EXTENSION_NAME)) { \
1046 has_all_required_extensions = false; 643 loaded_extensions.insert(VK_##prefix##_##macro_name##_EXTENSION_NAME); \
1047 } 644 extensions.var_name = true; \
1048 } 645 }
1049 646#define FEATURE_EXTENSION(prefix, struct_name, macro_name, var_name) \
1050 if (!has_all_required_extensions) { 647 if (supported_extensions.contains(VK_##prefix##_##macro_name##_EXTENSION_NAME)) { \
1051 throw vk::Exception(VK_ERROR_EXTENSION_NOT_PRESENT); 648 loaded_extensions.insert(VK_##prefix##_##macro_name##_EXTENSION_NAME); \
649 extensions.var_name = true; \
1052 } 650 }
1053 651
1054 struct LimitTuple { 652 if (instance_version < VK_API_VERSION_1_2) {
1055 u32 minimum; 653 FOR_EACH_VK_FEATURE_1_2(FEATURE_EXTENSION);
1056 u32 value; 654 }
1057 const char* name; 655 if (instance_version < VK_API_VERSION_1_3) {
1058 }; 656 FOR_EACH_VK_FEATURE_1_3(FEATURE_EXTENSION);
1059 const VkPhysicalDeviceLimits& limits{properties.limits};
1060 const std::array limits_report{
1061 LimitTuple{65536, limits.maxUniformBufferRange, "maxUniformBufferRange"},
1062 LimitTuple{16, limits.maxViewports, "maxViewports"},
1063 LimitTuple{8, limits.maxColorAttachments, "maxColorAttachments"},
1064 LimitTuple{8, limits.maxClipDistances, "maxClipDistances"},
1065 };
1066 for (const auto& tuple : limits_report) {
1067 if (tuple.value < tuple.minimum) {
1068 LOG_ERROR(Render_Vulkan, "{} has to be {} or greater but it is {}", tuple.name,
1069 tuple.minimum, tuple.value);
1070 throw vk::Exception(VK_ERROR_FEATURE_NOT_PRESENT);
1071 }
1072 } 657 }
1073 VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures demote{};
1074 demote.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES;
1075 demote.pNext = nullptr;
1076 658
1077 VkPhysicalDeviceVariablePointerFeatures variable_pointers{}; 659 FOR_EACH_VK_FEATURE_EXT(FEATURE_EXTENSION);
1078 variable_pointers.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES; 660 FOR_EACH_VK_EXTENSION(EXTENSION);
1079 variable_pointers.pNext = &demote; 661#ifdef _WIN32
662 FOR_EACH_VK_EXTENSION_WIN32(EXTENSION);
663#endif
1080 664
1081 VkPhysicalDeviceRobustness2FeaturesEXT robustness2{}; 665#undef FEATURE_EXTENSION
1082 robustness2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT; 666#undef EXTENSION
1083 robustness2.pNext = &variable_pointers;
1084 667
1085 VkPhysicalDeviceTimelineSemaphoreFeatures timeline_semaphore{}; 668 // Some extensions are mandatory. Check those.
1086 timeline_semaphore.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES; 669#define CHECK_EXTENSION(extension_name) \
1087 timeline_semaphore.pNext = &robustness2; 670 if (!loaded_extensions.contains(extension_name)) { \
671 LOG_ERROR(Render_Vulkan, "Missing required extension {}", extension_name); \
672 suitable = false; \
673 }
1088 674
1089 VkPhysicalDevice16BitStorageFeatures bit16_storage{}; 675#define LOG_EXTENSION(extension_name) \
1090 bit16_storage.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES; 676 if (!loaded_extensions.contains(extension_name)) { \
1091 bit16_storage.pNext = &timeline_semaphore; 677 LOG_INFO(Render_Vulkan, "Device doesn't support extension {}", extension_name); \
678 }
1092 679
1093 VkPhysicalDevice8BitStorageFeatures bit8_storage{}; 680 FOR_EACH_VK_RECOMMENDED_EXTENSION(LOG_EXTENSION);
1094 bit8_storage.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES; 681 FOR_EACH_VK_MANDATORY_EXTENSION(CHECK_EXTENSION);
1095 bit8_storage.pNext = &bit16_storage; 682#ifdef _WIN32
683 FOR_EACH_VK_MANDATORY_EXTENSION_WIN32(CHECK_EXTENSION);
684#else
685 FOR_EACH_VK_MANDATORY_EXTENSION_GENERIC(CHECK_EXTENSION);
686#endif
1096 687
1097 VkPhysicalDeviceHostQueryResetFeatures host_query_reset{}; 688 if (requires_swapchain) {
1098 host_query_reset.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES; 689 CHECK_EXTENSION(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
1099 host_query_reset.pNext = &bit8_storage; 690 }
1100 691
1101 VkPhysicalDeviceShaderDrawParametersFeatures draw_parameters{}; 692#undef LOG_EXTENSION
1102 draw_parameters.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES; 693#undef CHECK_EXTENSION
1103 draw_parameters.pNext = &host_query_reset;
1104 694
1105 VkPhysicalDeviceFeatures2 features2{}; 695 // Generate the linked list of features to test.
1106 features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; 696 features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
1107 features2.pNext = &draw_parameters;
1108 697
1109 physical.GetFeatures2(features2); 698 // Set next pointer.
699 void** next = &features2.pNext;
1110 700
1111 const VkPhysicalDeviceFeatures& features{features2.features}; 701 // Test all features we know about. If the feature is not available in core at our
1112 std::array feature_report{ 702 // current API version, and was not enabled by an extension, skip testing the feature.
1113 std::make_pair(features.robustBufferAccess, "robustBufferAccess"), 703 // We set the structure sType explicitly here as it is zeroed by the constructor.
1114 std::make_pair(features.vertexPipelineStoresAndAtomics, "vertexPipelineStoresAndAtomics"), 704#define FEATURE(prefix, struct_name, macro_name, var_name) \
1115 std::make_pair(features.imageCubeArray, "imageCubeArray"), 705 features.var_name.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_##macro_name##_FEATURES; \
1116 std::make_pair(features.independentBlend, "independentBlend"), 706 SetNext(next, features.var_name);
1117 std::make_pair(features.multiDrawIndirect, "multiDrawIndirect"),
1118 std::make_pair(features.drawIndirectFirstInstance, "drawIndirectFirstInstance"),
1119 std::make_pair(features.depthClamp, "depthClamp"),
1120 std::make_pair(features.samplerAnisotropy, "samplerAnisotropy"),
1121 std::make_pair(features.largePoints, "largePoints"),
1122 std::make_pair(features.multiViewport, "multiViewport"),
1123 std::make_pair(features.depthBiasClamp, "depthBiasClamp"),
1124 std::make_pair(features.fillModeNonSolid, "fillModeNonSolid"),
1125 std::make_pair(features.wideLines, "wideLines"),
1126 std::make_pair(features.geometryShader, "geometryShader"),
1127 std::make_pair(features.tessellationShader, "tessellationShader"),
1128 std::make_pair(features.sampleRateShading, "sampleRateShading"),
1129 std::make_pair(features.dualSrcBlend, "dualSrcBlend"),
1130 std::make_pair(features.logicOp, "logicOp"),
1131 std::make_pair(features.occlusionQueryPrecise, "occlusionQueryPrecise"),
1132 std::make_pair(features.fragmentStoresAndAtomics, "fragmentStoresAndAtomics"),
1133 std::make_pair(features.shaderImageGatherExtended, "shaderImageGatherExtended"),
1134 std::make_pair(features.shaderStorageImageWriteWithoutFormat,
1135 "shaderStorageImageWriteWithoutFormat"),
1136 std::make_pair(features.shaderClipDistance, "shaderClipDistance"),
1137 std::make_pair(features.shaderCullDistance, "shaderCullDistance"),
1138 std::make_pair(variable_pointers.variablePointers, "variablePointers"),
1139 std::make_pair(variable_pointers.variablePointersStorageBuffer,
1140 "variablePointersStorageBuffer"),
1141 std::make_pair(robustness2.robustBufferAccess2, "robustBufferAccess2"),
1142 std::make_pair(robustness2.robustImageAccess2, "robustImageAccess2"),
1143 std::make_pair(robustness2.nullDescriptor, "nullDescriptor"),
1144 std::make_pair(demote.shaderDemoteToHelperInvocation, "shaderDemoteToHelperInvocation"),
1145 std::make_pair(timeline_semaphore.timelineSemaphore, "timelineSemaphore"),
1146 std::make_pair(bit16_storage.storageBuffer16BitAccess, "storageBuffer16BitAccess"),
1147 std::make_pair(bit16_storage.uniformAndStorageBuffer16BitAccess,
1148 "uniformAndStorageBuffer16BitAccess"),
1149 std::make_pair(bit8_storage.storageBuffer8BitAccess, "storageBuffer8BitAccess"),
1150 std::make_pair(bit8_storage.uniformAndStorageBuffer8BitAccess,
1151 "uniformAndStorageBuffer8BitAccess"),
1152 std::make_pair(host_query_reset.hostQueryReset, "hostQueryReset"),
1153 std::make_pair(draw_parameters.shaderDrawParameters, "shaderDrawParameters"),
1154 };
1155 707
1156 bool has_all_required_features = true; 708#define EXT_FEATURE(prefix, struct_name, macro_name, var_name) \
1157 for (const auto& [is_supported, name] : feature_report) { 709 if (extensions.var_name) { \
1158 if (!is_supported) { 710 features.var_name.sType = \
1159 LOG_ERROR(Render_Vulkan, "Missing required feature: {}", name); 711 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_##macro_name##_FEATURES_##prefix; \
1160 has_all_required_features = false; 712 SetNext(next, features.var_name); \
1161 }
1162 } 713 }
1163 714
1164 if (!has_all_required_features) { 715 FOR_EACH_VK_FEATURE_1_1(FEATURE);
1165 throw vk::Exception(VK_ERROR_FEATURE_NOT_PRESENT); 716 FOR_EACH_VK_FEATURE_EXT(EXT_FEATURE);
717 if (instance_version >= VK_API_VERSION_1_2) {
718 FOR_EACH_VK_FEATURE_1_2(FEATURE);
719 } else {
720 FOR_EACH_VK_FEATURE_1_2(EXT_FEATURE);
1166 } 721 }
1167} 722 if (instance_version >= VK_API_VERSION_1_3) {
1168 723 FOR_EACH_VK_FEATURE_1_3(FEATURE);
1169std::vector<const char*> Device::LoadExtensions(bool requires_surface) { 724 } else {
1170 std::vector<const char*> extensions = ExtensionsRequiredForInstanceVersion(instance_version); 725 FOR_EACH_VK_FEATURE_1_3(EXT_FEATURE);
1171 if (requires_surface) {
1172 extensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
1173 } 726 }
1174 727
1175 bool has_khr_shader_float16_int8{}; 728#undef EXT_FEATURE
1176 bool has_khr_workgroup_memory_explicit_layout{}; 729#undef FEATURE
1177 bool has_khr_pipeline_executable_properties{};
1178 bool has_khr_image_format_list{};
1179 bool has_khr_swapchain_mutable_format{};
1180 bool has_ext_subgroup_size_control{};
1181 bool has_ext_transform_feedback{};
1182 bool has_ext_custom_border_color{};
1183 bool has_ext_extended_dynamic_state{};
1184 bool has_ext_extended_dynamic_state_2{};
1185 bool has_ext_extended_dynamic_state_3{};
1186 bool has_ext_shader_atomic_int64{};
1187 bool has_ext_provoking_vertex{};
1188 bool has_ext_vertex_input_dynamic_state{};
1189 bool has_ext_line_rasterization{};
1190 bool has_ext_primitive_topology_list_restart{};
1191 bool has_ext_depth_clip_control{};
1192 for (const std::string& extension : supported_extensions) {
1193 const auto test = [&](std::optional<std::reference_wrapper<bool>> status, const char* name,
1194 bool push) {
1195 if (extension != name) {
1196 return;
1197 }
1198 if (push) {
1199 extensions.push_back(name);
1200 }
1201 if (status) {
1202 status->get() = true;
1203 }
1204 };
1205 test(nv_viewport_swizzle, VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME, true);
1206 test(nv_viewport_array2, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME, true);
1207 test(nv_geometry_shader_passthrough, VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME,
1208 true);
1209 test(khr_uniform_buffer_standard_layout,
1210 VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME, true);
1211 test(khr_spirv_1_4, VK_KHR_SPIRV_1_4_EXTENSION_NAME, true);
1212 test(khr_push_descriptor, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME, true);
1213 test(has_khr_shader_float16_int8, VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME, false);
1214 test(khr_draw_indirect_count, VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME, true);
1215 test(ext_depth_range_unrestricted, VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME, true);
1216 test(ext_index_type_uint8, VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME, true);
1217 test(has_ext_primitive_topology_list_restart,
1218 VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME, true);
1219 test(ext_sampler_filter_minmax, VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME, true);
1220 test(ext_shader_viewport_index_layer, VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME,
1221 true);
1222 test(ext_tooling_info, VK_EXT_TOOLING_INFO_EXTENSION_NAME, true);
1223 test(ext_shader_stencil_export, VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, true);
1224 test(ext_conservative_rasterization, VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME,
1225 true);
1226 test(has_ext_depth_clip_control, VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME, false);
1227 test(has_ext_transform_feedback, VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME, false);
1228 test(has_ext_custom_border_color, VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME, false);
1229 test(has_ext_extended_dynamic_state, VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME, false);
1230 test(has_ext_extended_dynamic_state_2, VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME,
1231 false);
1232 test(has_ext_extended_dynamic_state_3, VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME,
1233 false);
1234 test(has_ext_subgroup_size_control, VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME, true);
1235 test(has_ext_provoking_vertex, VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, false);
1236 test(has_ext_vertex_input_dynamic_state, VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME,
1237 false);
1238 test(has_ext_shader_atomic_int64, VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME, false);
1239 test(has_khr_workgroup_memory_explicit_layout,
1240 VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME, false);
1241 test(has_khr_image_format_list, VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME, false);
1242 test(has_khr_swapchain_mutable_format, VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME,
1243 false);
1244 test(has_ext_line_rasterization, VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME, false);
1245 test(ext_memory_budget, VK_EXT_MEMORY_BUDGET_EXTENSION_NAME, true);
1246 if (Settings::values.enable_nsight_aftermath) {
1247 test(nv_device_diagnostics_config, VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME,
1248 true);
1249 }
1250 if (Settings::values.renderer_shader_feedback) {
1251 test(has_khr_pipeline_executable_properties,
1252 VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME, false);
1253 }
1254 }
1255 VkPhysicalDeviceFeatures2 features{};
1256 features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
1257
1258 VkPhysicalDeviceProperties2 physical_properties{};
1259 physical_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
1260
1261 if (has_khr_shader_float16_int8) {
1262 VkPhysicalDeviceShaderFloat16Int8Features float16_int8_features;
1263 float16_int8_features.sType =
1264 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES;
1265 float16_int8_features.pNext = nullptr;
1266 features.pNext = &float16_int8_features;
1267
1268 physical.GetFeatures2(features);
1269 is_float16_supported = float16_int8_features.shaderFloat16;
1270 is_int8_supported = float16_int8_features.shaderInt8;
1271 extensions.push_back(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME);
1272 }
1273 if (has_ext_subgroup_size_control) {
1274 VkPhysicalDeviceSubgroupSizeControlFeatures subgroup_features;
1275 subgroup_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES;
1276 subgroup_features.pNext = nullptr;
1277 features.pNext = &subgroup_features;
1278 physical.GetFeatures2(features);
1279
1280 VkPhysicalDeviceSubgroupSizeControlProperties subgroup_properties;
1281 subgroup_properties.sType =
1282 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES;
1283 subgroup_properties.pNext = nullptr;
1284 physical_properties.pNext = &subgroup_properties;
1285 physical.GetProperties2(physical_properties);
1286 730
1287 is_warp_potentially_bigger = subgroup_properties.maxSubgroupSize > GuestWarpSize; 731 // Perform the feature test.
732 physical.GetFeatures2(features2);
733 features.features = features2.features;
1288 734
1289 if (subgroup_features.subgroupSizeControl && 735 // Some features are mandatory. Check those.
1290 subgroup_properties.minSubgroupSize <= GuestWarpSize && 736#define CHECK_FEATURE(feature, name) \
1291 subgroup_properties.maxSubgroupSize >= GuestWarpSize) { 737 if (!features.feature.name) { \
1292 extensions.push_back(VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME); 738 LOG_ERROR(Render_Vulkan, "Missing required feature {}", #name); \
1293 guest_warp_stages = subgroup_properties.requiredSubgroupSizeStages; 739 suitable = false; \
1294 ext_subgroup_size_control = true;
1295 }
1296 } else {
1297 is_warp_potentially_bigger = true;
1298 }
1299 if (has_ext_provoking_vertex) {
1300 VkPhysicalDeviceProvokingVertexFeaturesEXT provoking_vertex;
1301 provoking_vertex.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT;
1302 provoking_vertex.pNext = nullptr;
1303 features.pNext = &provoking_vertex;
1304 physical.GetFeatures2(features);
1305
1306 if (provoking_vertex.provokingVertexLast &&
1307 provoking_vertex.transformFeedbackPreservesProvokingVertex) {
1308 extensions.push_back(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME);
1309 ext_provoking_vertex = true;
1310 }
1311 } 740 }
1312 if (has_ext_vertex_input_dynamic_state) {
1313 VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT vertex_input;
1314 vertex_input.sType =
1315 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT;
1316 vertex_input.pNext = nullptr;
1317 features.pNext = &vertex_input;
1318 physical.GetFeatures2(features);
1319
1320 if (vertex_input.vertexInputDynamicState) {
1321 extensions.push_back(VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME);
1322 ext_vertex_input_dynamic_state = true;
1323 }
1324 }
1325 if (has_ext_shader_atomic_int64) {
1326 VkPhysicalDeviceShaderAtomicInt64Features atomic_int64;
1327 atomic_int64.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES;
1328 atomic_int64.pNext = nullptr;
1329 features.pNext = &atomic_int64;
1330 physical.GetFeatures2(features);
1331
1332 if (atomic_int64.shaderBufferInt64Atomics && atomic_int64.shaderSharedInt64Atomics) {
1333 extensions.push_back(VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME);
1334 ext_shader_atomic_int64 = true;
1335 }
1336 }
1337 if (has_ext_transform_feedback) {
1338 VkPhysicalDeviceTransformFeedbackFeaturesEXT tfb_features;
1339 tfb_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT;
1340 tfb_features.pNext = nullptr;
1341 features.pNext = &tfb_features;
1342 physical.GetFeatures2(features);
1343
1344 VkPhysicalDeviceTransformFeedbackPropertiesEXT tfb_properties;
1345 tfb_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT;
1346 tfb_properties.pNext = nullptr;
1347 physical_properties.pNext = &tfb_properties;
1348 physical.GetProperties2(physical_properties);
1349 741
1350 if (tfb_features.transformFeedback && tfb_features.geometryStreams && 742#define LOG_FEATURE(feature, name) \
1351 tfb_properties.maxTransformFeedbackStreams >= 4 && 743 if (!features.feature.name) { \
1352 tfb_properties.maxTransformFeedbackBuffers && tfb_properties.transformFeedbackQueries && 744 LOG_INFO(Render_Vulkan, "Device doesn't support feature {}", #name); \
1353 tfb_properties.transformFeedbackDraw) {
1354 extensions.push_back(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
1355 ext_transform_feedback = true;
1356 }
1357 }
1358 if (has_ext_custom_border_color) {
1359 VkPhysicalDeviceCustomBorderColorFeaturesEXT border_features;
1360 border_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT;
1361 border_features.pNext = nullptr;
1362 features.pNext = &border_features;
1363 physical.GetFeatures2(features);
1364
1365 if (border_features.customBorderColors && border_features.customBorderColorWithoutFormat) {
1366 extensions.push_back(VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME);
1367 ext_custom_border_color = true;
1368 }
1369 }
1370 if (has_ext_extended_dynamic_state) {
1371 VkPhysicalDeviceExtendedDynamicStateFeaturesEXT extended_dynamic_state;
1372 extended_dynamic_state.sType =
1373 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT;
1374 extended_dynamic_state.pNext = nullptr;
1375 features.pNext = &extended_dynamic_state;
1376 physical.GetFeatures2(features);
1377
1378 if (extended_dynamic_state.extendedDynamicState) {
1379 extensions.push_back(VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME);
1380 ext_extended_dynamic_state = true;
1381 }
1382 }
1383 if (has_ext_extended_dynamic_state_2) {
1384 VkPhysicalDeviceExtendedDynamicState2FeaturesEXT extended_dynamic_state_2;
1385 extended_dynamic_state_2.sType =
1386 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT;
1387 extended_dynamic_state_2.pNext = nullptr;
1388 features.pNext = &extended_dynamic_state_2;
1389 physical.GetFeatures2(features);
1390
1391 if (extended_dynamic_state_2.extendedDynamicState2) {
1392 extensions.push_back(VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME);
1393 ext_extended_dynamic_state_2 = true;
1394 ext_extended_dynamic_state_2_extra =
1395 extended_dynamic_state_2.extendedDynamicState2LogicOp;
1396 }
1397 } 745 }
1398 if (has_ext_extended_dynamic_state_3) { 746
1399 VkPhysicalDeviceExtendedDynamicState3FeaturesEXT extended_dynamic_state_3; 747 FOR_EACH_VK_RECOMMENDED_FEATURE(LOG_FEATURE);
1400 extended_dynamic_state_3.sType = 748 FOR_EACH_VK_MANDATORY_FEATURE(CHECK_FEATURE);
1401 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT; 749
1402 extended_dynamic_state_3.pNext = nullptr; 750#undef LOG_FEATURE
1403 features.pNext = &extended_dynamic_state_3; 751#undef CHECK_FEATURE
1404 physical.GetFeatures2(features); 752
1405 753 // Generate linked list of properties.
1406 ext_extended_dynamic_state_3_blend = 754 properties2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
1407 extended_dynamic_state_3.extendedDynamicState3ColorBlendEnable && 755
1408 extended_dynamic_state_3.extendedDynamicState3ColorBlendEquation && 756 // Set next pointer.
1409 extended_dynamic_state_3.extendedDynamicState3ColorWriteMask; 757 next = &properties2.pNext;
1410 758
1411 ext_extended_dynamic_state_3_enables = 759 // Get driver info.
1412 extended_dynamic_state_3.extendedDynamicState3DepthClampEnable && 760 properties.driver.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES;
1413 extended_dynamic_state_3.extendedDynamicState3LogicOpEnable; 761 SetNext(next, properties.driver);
1414 762
1415 ext_extended_dynamic_state_3 = 763 // Retrieve relevant extension properties.
1416 ext_extended_dynamic_state_3_blend || ext_extended_dynamic_state_3_enables; 764 if (extensions.shader_float_controls) {
1417 if (ext_extended_dynamic_state_3) { 765 properties.float_controls.sType =
1418 extensions.push_back(VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME); 766 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES;
1419 } 767 SetNext(next, properties.float_controls);
1420 } 768 }
1421 if (has_ext_line_rasterization) { 769 if (extensions.push_descriptor) {
1422 VkPhysicalDeviceLineRasterizationFeaturesEXT line_raster; 770 properties.push_descriptor.sType =
1423 line_raster.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT; 771 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR;
1424 line_raster.pNext = nullptr; 772 SetNext(next, properties.push_descriptor);
1425 features.pNext = &line_raster;
1426 physical.GetFeatures2(features);
1427 if (line_raster.rectangularLines && line_raster.smoothLines) {
1428 extensions.push_back(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME);
1429 ext_line_rasterization = true;
1430 }
1431 } 773 }
1432 if (has_ext_depth_clip_control) { 774 if (extensions.subgroup_size_control) {
1433 VkPhysicalDeviceDepthClipControlFeaturesEXT depth_clip_control_features; 775 properties.subgroup_size_control.sType =
1434 depth_clip_control_features.sType = 776 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES;
1435 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT; 777 SetNext(next, properties.subgroup_size_control);
1436 depth_clip_control_features.pNext = nullptr;
1437 features.pNext = &depth_clip_control_features;
1438 physical.GetFeatures2(features);
1439
1440 if (depth_clip_control_features.depthClipControl) {
1441 extensions.push_back(VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME);
1442 ext_depth_clip_control = true;
1443 }
1444 } 778 }
1445 if (has_khr_workgroup_memory_explicit_layout) { 779 if (extensions.transform_feedback) {
1446 VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR layout; 780 properties.transform_feedback.sType =
1447 layout.sType = 781 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT;
1448 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR; 782 SetNext(next, properties.transform_feedback);
1449 layout.pNext = nullptr;
1450 features.pNext = &layout;
1451 physical.GetFeatures2(features);
1452
1453 if (layout.workgroupMemoryExplicitLayout &&
1454 layout.workgroupMemoryExplicitLayout8BitAccess &&
1455 layout.workgroupMemoryExplicitLayout16BitAccess &&
1456 layout.workgroupMemoryExplicitLayoutScalarBlockLayout) {
1457 extensions.push_back(VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME);
1458 khr_workgroup_memory_explicit_layout = true;
1459 }
1460 } 783 }
1461 if (has_khr_pipeline_executable_properties) { 784
1462 VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR executable_properties; 785 // Perform the property fetch.
1463 executable_properties.sType = 786 physical.GetProperties2(properties2);
1464 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR; 787 properties.properties = properties2.properties;
1465 executable_properties.pNext = nullptr; 788
1466 features.pNext = &executable_properties; 789 // Unload extensions if feature support is insufficient.
1467 physical.GetFeatures2(features); 790 RemoveUnsuitableExtensions();
1468 791
1469 if (executable_properties.pipelineExecutableInfo) { 792 // Check limits.
1470 extensions.push_back(VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME); 793 struct Limit {
1471 khr_pipeline_executable_properties = true; 794 u32 minimum;
795 u32 value;
796 const char* name;
797 };
798
799 const VkPhysicalDeviceLimits& limits{properties.properties.limits};
800 const std::array limits_report{
801 Limit{65536, limits.maxUniformBufferRange, "maxUniformBufferRange"},
802 Limit{16, limits.maxViewports, "maxViewports"},
803 Limit{8, limits.maxColorAttachments, "maxColorAttachments"},
804 Limit{8, limits.maxClipDistances, "maxClipDistances"},
805 };
806
807 for (const auto& [min, value, name] : limits_report) {
808 if (value < min) {
809 LOG_ERROR(Render_Vulkan, "{} has to be {} or greater but it is {}", name, min, value);
810 suitable = false;
1472 } 811 }
1473 } 812 }
1474 if (has_ext_primitive_topology_list_restart) {
1475 VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT primitive_topology_list_restart{};
1476 primitive_topology_list_restart.sType =
1477 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT;
1478 primitive_topology_list_restart.pNext = nullptr;
1479 features.pNext = &primitive_topology_list_restart;
1480 physical.GetFeatures2(features);
1481
1482 is_topology_list_restart_supported =
1483 primitive_topology_list_restart.primitiveTopologyListRestart;
1484 is_patch_list_restart_supported =
1485 primitive_topology_list_restart.primitiveTopologyPatchListRestart;
1486 }
1487 if (requires_surface && has_khr_image_format_list && has_khr_swapchain_mutable_format) {
1488 extensions.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
1489 extensions.push_back(VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME);
1490 khr_swapchain_mutable_format = true;
1491 }
1492 if (khr_push_descriptor) {
1493 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor;
1494 push_descriptor.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR;
1495 push_descriptor.pNext = nullptr;
1496 813
1497 physical_properties.pNext = &push_descriptor; 814 // Return whether we were suitable.
1498 physical.GetProperties2(physical_properties); 815 return suitable;
816}
1499 817
1500 max_push_descriptors = push_descriptor.maxPushDescriptors; 818void Device::RemoveExtensionIfUnsuitable(bool is_suitable, const std::string& extension_name) {
819 if (loaded_extensions.contains(extension_name) && !is_suitable) {
820 LOG_WARNING(Render_Vulkan, "Removing unsuitable extension {}", extension_name);
821 loaded_extensions.erase(extension_name);
1501 } 822 }
823}
1502 824
1503 has_null_descriptor = true; 825void Device::RemoveUnsuitableExtensions() {
1504 826 // VK_EXT_custom_border_color
1505 return extensions; 827 extensions.custom_border_color = features.custom_border_color.customBorderColors &&
828 features.custom_border_color.customBorderColorWithoutFormat;
829 RemoveExtensionIfUnsuitable(extensions.custom_border_color,
830 VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME);
831
832 // VK_EXT_depth_clip_control
833 extensions.depth_clip_control = features.depth_clip_control.depthClipControl;
834 RemoveExtensionIfUnsuitable(extensions.depth_clip_control,
835 VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME);
836
837 // VK_EXT_extended_dynamic_state
838 extensions.extended_dynamic_state = features.extended_dynamic_state.extendedDynamicState;
839 RemoveExtensionIfUnsuitable(extensions.extended_dynamic_state,
840 VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME);
841
842 // VK_EXT_extended_dynamic_state2
843 extensions.extended_dynamic_state2 = features.extended_dynamic_state2.extendedDynamicState2;
844 RemoveExtensionIfUnsuitable(extensions.extended_dynamic_state2,
845 VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME);
846
847 // VK_EXT_extended_dynamic_state3
848 dynamic_state3_blending =
849 features.extended_dynamic_state3.extendedDynamicState3ColorBlendEnable &&
850 features.extended_dynamic_state3.extendedDynamicState3ColorBlendEquation &&
851 features.extended_dynamic_state3.extendedDynamicState3ColorWriteMask;
852 dynamic_state3_enables =
853 features.extended_dynamic_state3.extendedDynamicState3DepthClampEnable &&
854 features.extended_dynamic_state3.extendedDynamicState3LogicOpEnable;
855
856 extensions.extended_dynamic_state3 = dynamic_state3_blending || dynamic_state3_enables;
857 dynamic_state3_blending = dynamic_state3_blending && extensions.extended_dynamic_state3;
858 dynamic_state3_enables = dynamic_state3_enables && extensions.extended_dynamic_state3;
859 RemoveExtensionIfUnsuitable(extensions.extended_dynamic_state3,
860 VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME);
861
862 // VK_EXT_provoking_vertex
863 extensions.provoking_vertex =
864 features.provoking_vertex.provokingVertexLast &&
865 features.provoking_vertex.transformFeedbackPreservesProvokingVertex;
866 RemoveExtensionIfUnsuitable(extensions.provoking_vertex,
867 VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME);
868
869 // VK_KHR_shader_atomic_int64
870 extensions.shader_atomic_int64 = features.shader_atomic_int64.shaderBufferInt64Atomics &&
871 features.shader_atomic_int64.shaderSharedInt64Atomics;
872 RemoveExtensionIfUnsuitable(extensions.shader_atomic_int64,
873 VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME);
874
875 // VK_EXT_shader_demote_to_helper_invocation
876 extensions.shader_demote_to_helper_invocation =
877 features.shader_demote_to_helper_invocation.shaderDemoteToHelperInvocation;
878 RemoveExtensionIfUnsuitable(extensions.shader_demote_to_helper_invocation,
879 VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME);
880
881 // VK_EXT_subgroup_size_control
882 extensions.subgroup_size_control =
883 features.subgroup_size_control.subgroupSizeControl &&
884 properties.subgroup_size_control.minSubgroupSize <= GuestWarpSize &&
885 properties.subgroup_size_control.maxSubgroupSize >= GuestWarpSize;
886 RemoveExtensionIfUnsuitable(extensions.subgroup_size_control,
887 VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME);
888
889 // VK_EXT_transform_feedback
890 extensions.transform_feedback =
891 features.transform_feedback.transformFeedback &&
892 features.transform_feedback.geometryStreams &&
893 properties.transform_feedback.maxTransformFeedbackStreams >= 4 &&
894 properties.transform_feedback.maxTransformFeedbackBuffers > 0 &&
895 properties.transform_feedback.transformFeedbackQueries &&
896 properties.transform_feedback.transformFeedbackDraw;
897 RemoveExtensionIfUnsuitable(extensions.transform_feedback,
898 VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
899
900 // VK_EXT_vertex_input_dynamic_state
901 extensions.vertex_input_dynamic_state =
902 features.vertex_input_dynamic_state.vertexInputDynamicState;
903 RemoveExtensionIfUnsuitable(extensions.vertex_input_dynamic_state,
904 VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME);
905
906 // VK_KHR_pipeline_executable_properties
907 if (Settings::values.renderer_shader_feedback.GetValue()) {
908 extensions.pipeline_executable_properties =
909 features.pipeline_executable_properties.pipelineExecutableInfo;
910 RemoveExtensionIfUnsuitable(extensions.pipeline_executable_properties,
911 VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME);
912 } else {
913 extensions.pipeline_executable_properties = false;
914 loaded_extensions.erase(VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME);
915 }
916
917 // VK_KHR_workgroup_memory_explicit_layout
918 extensions.workgroup_memory_explicit_layout =
919 features.features.shaderInt16 &&
920 features.workgroup_memory_explicit_layout.workgroupMemoryExplicitLayout &&
921 features.workgroup_memory_explicit_layout.workgroupMemoryExplicitLayout8BitAccess &&
922 features.workgroup_memory_explicit_layout.workgroupMemoryExplicitLayout16BitAccess &&
923 features.workgroup_memory_explicit_layout.workgroupMemoryExplicitLayoutScalarBlockLayout;
924 RemoveExtensionIfUnsuitable(extensions.workgroup_memory_explicit_layout,
925 VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME);
1506} 926}
1507 927
1508void Device::SetupFamilies(VkSurfaceKHR surface) { 928void Device::SetupFamilies(VkSurfaceKHR surface) {
@@ -1540,53 +960,6 @@ void Device::SetupFamilies(VkSurfaceKHR surface) {
1540 } 960 }
1541} 961}
1542 962
1543void Device::SetupFeatures() {
1544 const VkPhysicalDeviceFeatures features{physical.GetFeatures()};
1545 is_depth_bounds_supported = features.depthBounds;
1546 is_formatless_image_load_supported = features.shaderStorageImageReadWithoutFormat;
1547 is_shader_float64_supported = features.shaderFloat64;
1548 is_shader_int64_supported = features.shaderInt64;
1549 is_shader_int16_supported = features.shaderInt16;
1550 is_shader_storage_image_multisample = features.shaderStorageImageMultisample;
1551 is_blit_depth_stencil_supported = TestDepthStencilBlits();
1552 is_optimal_astc_supported = IsOptimalAstcSupported(features);
1553
1554 const VkPhysicalDeviceLimits& limits{properties.limits};
1555 max_vertex_input_attributes = limits.maxVertexInputAttributes;
1556 max_vertex_input_bindings = limits.maxVertexInputBindings;
1557}
1558
1559void Device::SetupProperties() {
1560 float_controls.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES;
1561
1562 VkPhysicalDeviceProperties2KHR properties2{};
1563 properties2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
1564 properties2.pNext = &float_controls;
1565
1566 physical.GetProperties2(properties2);
1567}
1568
1569void Device::CollectTelemetryParameters() {
1570 VkPhysicalDeviceDriverProperties driver{
1571 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES,
1572 .pNext = nullptr,
1573 .driverID = {},
1574 .driverName = {},
1575 .driverInfo = {},
1576 .conformanceVersion = {},
1577 };
1578
1579 VkPhysicalDeviceProperties2 device_properties{
1580 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
1581 .pNext = &driver,
1582 .properties = {},
1583 };
1584 physical.GetProperties2(device_properties);
1585
1586 driver_id = driver.driverID;
1587 vendor_name = driver.driverName;
1588}
1589
1590u64 Device::GetDeviceMemoryUsage() const { 963u64 Device::GetDeviceMemoryUsage() const {
1591 VkPhysicalDeviceMemoryBudgetPropertiesEXT budget; 964 VkPhysicalDeviceMemoryBudgetPropertiesEXT budget;
1592 budget.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT; 965 budget.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT;
@@ -1602,7 +975,8 @@ u64 Device::GetDeviceMemoryUsage() const {
1602void Device::CollectPhysicalMemoryInfo() { 975void Device::CollectPhysicalMemoryInfo() {
1603 VkPhysicalDeviceMemoryBudgetPropertiesEXT budget{}; 976 VkPhysicalDeviceMemoryBudgetPropertiesEXT budget{};
1604 budget.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT; 977 budget.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT;
1605 const auto mem_info = physical.GetMemoryProperties(ext_memory_budget ? &budget : nullptr); 978 const auto mem_info =
979 physical.GetMemoryProperties(extensions.memory_budget ? &budget : nullptr);
1606 const auto& mem_properties = mem_info.memoryProperties; 980 const auto& mem_properties = mem_info.memoryProperties;
1607 const size_t num_properties = mem_properties.memoryHeapCount; 981 const size_t num_properties = mem_properties.memoryHeapCount;
1608 device_access_memory = 0; 982 device_access_memory = 0;
@@ -1618,7 +992,7 @@ void Device::CollectPhysicalMemoryInfo() {
1618 if (is_heap_local) { 992 if (is_heap_local) {
1619 local_memory += mem_properties.memoryHeaps[element].size; 993 local_memory += mem_properties.memoryHeaps[element].size;
1620 } 994 }
1621 if (ext_memory_budget) { 995 if (extensions.memory_budget) {
1622 device_initial_usage += budget.heapUsage[element]; 996 device_initial_usage += budget.heapUsage[element];
1623 device_access_memory += budget.heapBudget[element]; 997 device_access_memory += budget.heapBudget[element];
1624 continue; 998 continue;
@@ -1634,7 +1008,7 @@ void Device::CollectPhysicalMemoryInfo() {
1634} 1008}
1635 1009
1636void Device::CollectToolingInfo() { 1010void Device::CollectToolingInfo() {
1637 if (!ext_tooling_info) { 1011 if (!extensions.tooling_info) {
1638 return; 1012 return;
1639 } 1013 }
1640 auto tools{physical.GetPhysicalDeviceToolProperties()}; 1014 auto tools{physical.GetPhysicalDeviceToolProperties()};
diff --git a/src/video_core/vulkan_common/vulkan_device.h b/src/video_core/vulkan_common/vulkan_device.h
index 4bc267163..0662a2d9f 100644
--- a/src/video_core/vulkan_common/vulkan_device.h
+++ b/src/video_core/vulkan_common/vulkan_device.h
@@ -3,6 +3,7 @@
3 3
4#pragma once 4#pragma once
5 5
6#include <set>
6#include <span> 7#include <span>
7#include <string> 8#include <string>
8#include <unordered_map> 9#include <unordered_map>
@@ -11,6 +12,156 @@
11#include "common/common_types.h" 12#include "common/common_types.h"
12#include "video_core/vulkan_common/vulkan_wrapper.h" 13#include "video_core/vulkan_common/vulkan_wrapper.h"
13 14
15// Define all features which may be used by the implementation here.
16// Vulkan version in the macro describes the minimum version required for feature availability.
17// If the Vulkan version is lower than the required version, the named extension is required.
18#define FOR_EACH_VK_FEATURE_1_1(FEATURE) \
19 FEATURE(EXT, SubgroupSizeControl, SUBGROUP_SIZE_CONTROL, subgroup_size_control) \
20 FEATURE(KHR, 16BitStorage, 16BIT_STORAGE, bit16_storage) \
21 FEATURE(KHR, ShaderAtomicInt64, SHADER_ATOMIC_INT64, shader_atomic_int64) \
22 FEATURE(KHR, ShaderDrawParameters, SHADER_DRAW_PARAMETERS, shader_draw_parameters) \
23 FEATURE(KHR, ShaderFloat16Int8, SHADER_FLOAT16_INT8, shader_float16_int8) \
24 FEATURE(KHR, UniformBufferStandardLayout, UNIFORM_BUFFER_STANDARD_LAYOUT, \
25 uniform_buffer_standard_layout) \
26 FEATURE(KHR, VariablePointer, VARIABLE_POINTERS, variable_pointer)
27
28#define FOR_EACH_VK_FEATURE_1_2(FEATURE) \
29 FEATURE(EXT, HostQueryReset, HOST_QUERY_RESET, host_query_reset) \
30 FEATURE(KHR, 8BitStorage, 8BIT_STORAGE, bit8_storage) \
31 FEATURE(KHR, TimelineSemaphore, TIMELINE_SEMAPHORE, timeline_semaphore)
32
33#define FOR_EACH_VK_FEATURE_1_3(FEATURE) \
34 FEATURE(EXT, ShaderDemoteToHelperInvocation, SHADER_DEMOTE_TO_HELPER_INVOCATION, \
35 shader_demote_to_helper_invocation)
36
37// Define all features which may be used by the implementation and require an extension here.
38#define FOR_EACH_VK_FEATURE_EXT(FEATURE) \
39 FEATURE(EXT, CustomBorderColor, CUSTOM_BORDER_COLOR, custom_border_color) \
40 FEATURE(EXT, DepthClipControl, DEPTH_CLIP_CONTROL, depth_clip_control) \
41 FEATURE(EXT, ExtendedDynamicState, EXTENDED_DYNAMIC_STATE, extended_dynamic_state) \
42 FEATURE(EXT, ExtendedDynamicState2, EXTENDED_DYNAMIC_STATE_2, extended_dynamic_state2) \
43 FEATURE(EXT, ExtendedDynamicState3, EXTENDED_DYNAMIC_STATE_3, extended_dynamic_state3) \
44 FEATURE(EXT, IndexTypeUint8, INDEX_TYPE_UINT8, index_type_uint8) \
45 FEATURE(EXT, LineRasterization, LINE_RASTERIZATION, line_rasterization) \
46 FEATURE(EXT, PrimitiveTopologyListRestart, PRIMITIVE_TOPOLOGY_LIST_RESTART, \
47 primitive_topology_list_restart) \
48 FEATURE(EXT, ProvokingVertex, PROVOKING_VERTEX, provoking_vertex) \
49 FEATURE(EXT, Robustness2, ROBUSTNESS_2, robustness2) \
50 FEATURE(EXT, TransformFeedback, TRANSFORM_FEEDBACK, transform_feedback) \
51 FEATURE(EXT, VertexInputDynamicState, VERTEX_INPUT_DYNAMIC_STATE, vertex_input_dynamic_state) \
52 FEATURE(KHR, PipelineExecutableProperties, PIPELINE_EXECUTABLE_PROPERTIES, \
53 pipeline_executable_properties) \
54 FEATURE(KHR, WorkgroupMemoryExplicitLayout, WORKGROUP_MEMORY_EXPLICIT_LAYOUT, \
55 workgroup_memory_explicit_layout)
56
57// Define miscellaneous extensions which may be used by the implementation here.
58#define FOR_EACH_VK_EXTENSION(EXTENSION) \
59 EXTENSION(EXT, CONSERVATIVE_RASTERIZATION, conservative_rasterization) \
60 EXTENSION(EXT, DEPTH_RANGE_UNRESTRICTED, depth_range_unrestricted) \
61 EXTENSION(EXT, MEMORY_BUDGET, memory_budget) \
62 EXTENSION(EXT, ROBUSTNESS_2, robustness_2) \
63 EXTENSION(EXT, SAMPLER_FILTER_MINMAX, sampler_filter_minmax) \
64 EXTENSION(EXT, SHADER_STENCIL_EXPORT, shader_stencil_export) \
65 EXTENSION(EXT, SHADER_VIEWPORT_INDEX_LAYER, shader_viewport_index_layer) \
66 EXTENSION(EXT, TOOLING_INFO, tooling_info) \
67 EXTENSION(EXT, VERTEX_ATTRIBUTE_DIVISOR, vertex_attribute_divisor) \
68 EXTENSION(KHR, DRAW_INDIRECT_COUNT, draw_indirect_count) \
69 EXTENSION(KHR, DRIVER_PROPERTIES, driver_properties) \
70 EXTENSION(KHR, EXTERNAL_MEMORY_FD, external_memory_fd) \
71 EXTENSION(KHR, PUSH_DESCRIPTOR, push_descriptor) \
72 EXTENSION(KHR, SAMPLER_MIRROR_CLAMP_TO_EDGE, sampler_mirror_clamp_to_edge) \
73 EXTENSION(KHR, SHADER_FLOAT_CONTROLS, shader_float_controls) \
74 EXTENSION(KHR, SPIRV_1_4, spirv_1_4) \
75 EXTENSION(KHR, SWAPCHAIN, swapchain) \
76 EXTENSION(KHR, SWAPCHAIN_MUTABLE_FORMAT, swapchain_mutable_format) \
77 EXTENSION(NV, DEVICE_DIAGNOSTICS_CONFIG, device_diagnostics_config) \
78 EXTENSION(NV, GEOMETRY_SHADER_PASSTHROUGH, geometry_shader_passthrough) \
79 EXTENSION(NV, VIEWPORT_ARRAY2, viewport_array2) \
80 EXTENSION(NV, VIEWPORT_SWIZZLE, viewport_swizzle)
81
82#define FOR_EACH_VK_EXTENSION_WIN32(EXTENSION) \
83 EXTENSION(KHR, EXTERNAL_MEMORY_WIN32, external_memory_win32)
84
85// Define extensions which must be supported.
86#define FOR_EACH_VK_MANDATORY_EXTENSION(EXTENSION_NAME) \
87 EXTENSION_NAME(VK_EXT_ROBUSTNESS_2_EXTENSION_NAME) \
88 EXTENSION_NAME(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME) \
89 EXTENSION_NAME(VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME) \
90 EXTENSION_NAME(VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME) \
91 EXTENSION_NAME(VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME)
92
93#define FOR_EACH_VK_MANDATORY_EXTENSION_GENERIC(EXTENSION_NAME) \
94 EXTENSION_NAME(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME)
95
96#define FOR_EACH_VK_MANDATORY_EXTENSION_WIN32(EXTENSION_NAME) \
97 EXTENSION_NAME(VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME)
98
99// Define extensions where the absence of the extension may result in a degraded experience.
100#define FOR_EACH_VK_RECOMMENDED_EXTENSION(EXTENSION_NAME) \
101 EXTENSION_NAME(VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME) \
102 EXTENSION_NAME(VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME) \
103 EXTENSION_NAME(VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME) \
104 EXTENSION_NAME(VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME) \
105 EXTENSION_NAME(VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME) \
106 EXTENSION_NAME(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME) \
107 EXTENSION_NAME(VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME) \
108 EXTENSION_NAME(VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME) \
109 EXTENSION_NAME(VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME) \
110 EXTENSION_NAME(VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME)
111
112// Define features which must be supported.
113#define FOR_EACH_VK_MANDATORY_FEATURE(FEATURE_NAME) \
114 FEATURE_NAME(bit16_storage, storageBuffer16BitAccess) \
115 FEATURE_NAME(bit16_storage, uniformAndStorageBuffer16BitAccess) \
116 FEATURE_NAME(bit8_storage, storageBuffer8BitAccess) \
117 FEATURE_NAME(bit8_storage, uniformAndStorageBuffer8BitAccess) \
118 FEATURE_NAME(features, depthBiasClamp) \
119 FEATURE_NAME(features, depthClamp) \
120 FEATURE_NAME(features, drawIndirectFirstInstance) \
121 FEATURE_NAME(features, dualSrcBlend) \
122 FEATURE_NAME(features, fillModeNonSolid) \
123 FEATURE_NAME(features, fragmentStoresAndAtomics) \
124 FEATURE_NAME(features, geometryShader) \
125 FEATURE_NAME(features, imageCubeArray) \
126 FEATURE_NAME(features, independentBlend) \
127 FEATURE_NAME(features, largePoints) \
128 FEATURE_NAME(features, logicOp) \
129 FEATURE_NAME(features, multiDrawIndirect) \
130 FEATURE_NAME(features, multiViewport) \
131 FEATURE_NAME(features, occlusionQueryPrecise) \
132 FEATURE_NAME(features, robustBufferAccess) \
133 FEATURE_NAME(features, samplerAnisotropy) \
134 FEATURE_NAME(features, sampleRateShading) \
135 FEATURE_NAME(features, shaderClipDistance) \
136 FEATURE_NAME(features, shaderCullDistance) \
137 FEATURE_NAME(features, shaderImageGatherExtended) \
138 FEATURE_NAME(features, shaderStorageImageWriteWithoutFormat) \
139 FEATURE_NAME(features, tessellationShader) \
140 FEATURE_NAME(features, vertexPipelineStoresAndAtomics) \
141 FEATURE_NAME(features, wideLines) \
142 FEATURE_NAME(host_query_reset, hostQueryReset) \
143 FEATURE_NAME(robustness2, nullDescriptor) \
144 FEATURE_NAME(robustness2, robustBufferAccess2) \
145 FEATURE_NAME(robustness2, robustImageAccess2) \
146 FEATURE_NAME(shader_demote_to_helper_invocation, shaderDemoteToHelperInvocation) \
147 FEATURE_NAME(shader_draw_parameters, shaderDrawParameters) \
148 FEATURE_NAME(timeline_semaphore, timelineSemaphore) \
149 FEATURE_NAME(variable_pointer, variablePointers) \
150 FEATURE_NAME(variable_pointer, variablePointersStorageBuffer)
151
152// Define features where the absence of the feature may result in a degraded experience.
153#define FOR_EACH_VK_RECOMMENDED_FEATURE(FEATURE_NAME) \
154 FEATURE_NAME(custom_border_color, customBorderColors) \
155 FEATURE_NAME(extended_dynamic_state, extendedDynamicState) \
156 FEATURE_NAME(index_type_uint8, indexTypeUint8) \
157 FEATURE_NAME(primitive_topology_list_restart, primitiveTopologyListRestart) \
158 FEATURE_NAME(provoking_vertex, provokingVertexLast) \
159 FEATURE_NAME(shader_float16_int8, shaderFloat16) \
160 FEATURE_NAME(shader_float16_int8, shaderInt8) \
161 FEATURE_NAME(transform_feedback, transformFeedback) \
162 FEATURE_NAME(uniform_buffer_standard_layout, uniformBufferStandardLayout) \
163 FEATURE_NAME(vertex_input_dynamic_state, vertexInputDynamicState)
164
14namespace Vulkan { 165namespace Vulkan {
15 166
16class NsightAftermathTracker; 167class NsightAftermathTracker;
@@ -88,69 +239,69 @@ public:
88 239
89 /// Returns the current Vulkan API version provided in Vulkan-formatted version numbers. 240 /// Returns the current Vulkan API version provided in Vulkan-formatted version numbers.
90 u32 ApiVersion() const { 241 u32 ApiVersion() const {
91 return properties.apiVersion; 242 return properties.properties.apiVersion;
92 } 243 }
93 244
94 /// Returns the current driver version provided in Vulkan-formatted version numbers. 245 /// Returns the current driver version provided in Vulkan-formatted version numbers.
95 u32 GetDriverVersion() const { 246 u32 GetDriverVersion() const {
96 return properties.driverVersion; 247 return properties.properties.driverVersion;
97 } 248 }
98 249
99 /// Returns the device name. 250 /// Returns the device name.
100 std::string_view GetModelName() const { 251 std::string_view GetModelName() const {
101 return properties.deviceName; 252 return properties.properties.deviceName;
102 } 253 }
103 254
104 /// Returns the driver ID. 255 /// Returns the driver ID.
105 VkDriverIdKHR GetDriverID() const { 256 VkDriverIdKHR GetDriverID() const {
106 return driver_id; 257 return properties.driver.driverID;
107 } 258 }
108 259
109 bool ShouldBoostClocks() const; 260 bool ShouldBoostClocks() const;
110 261
111 /// Returns uniform buffer alignment requeriment. 262 /// Returns uniform buffer alignment requeriment.
112 VkDeviceSize GetUniformBufferAlignment() const { 263 VkDeviceSize GetUniformBufferAlignment() const {
113 return properties.limits.minUniformBufferOffsetAlignment; 264 return properties.properties.limits.minUniformBufferOffsetAlignment;
114 } 265 }
115 266
116 /// Returns storage alignment requeriment. 267 /// Returns storage alignment requeriment.
117 VkDeviceSize GetStorageBufferAlignment() const { 268 VkDeviceSize GetStorageBufferAlignment() const {
118 return properties.limits.minStorageBufferOffsetAlignment; 269 return properties.properties.limits.minStorageBufferOffsetAlignment;
119 } 270 }
120 271
121 /// Returns the maximum range for storage buffers. 272 /// Returns the maximum range for storage buffers.
122 VkDeviceSize GetMaxStorageBufferRange() const { 273 VkDeviceSize GetMaxStorageBufferRange() const {
123 return properties.limits.maxStorageBufferRange; 274 return properties.properties.limits.maxStorageBufferRange;
124 } 275 }
125 276
126 /// Returns the maximum size for push constants. 277 /// Returns the maximum size for push constants.
127 VkDeviceSize GetMaxPushConstantsSize() const { 278 VkDeviceSize GetMaxPushConstantsSize() const {
128 return properties.limits.maxPushConstantsSize; 279 return properties.properties.limits.maxPushConstantsSize;
129 } 280 }
130 281
131 /// Returns the maximum size for shared memory. 282 /// Returns the maximum size for shared memory.
132 u32 GetMaxComputeSharedMemorySize() const { 283 u32 GetMaxComputeSharedMemorySize() const {
133 return properties.limits.maxComputeSharedMemorySize; 284 return properties.properties.limits.maxComputeSharedMemorySize;
134 } 285 }
135 286
136 /// Returns float control properties of the device. 287 /// Returns float control properties of the device.
137 const VkPhysicalDeviceFloatControlsPropertiesKHR& FloatControlProperties() const { 288 const VkPhysicalDeviceFloatControlsPropertiesKHR& FloatControlProperties() const {
138 return float_controls; 289 return properties.float_controls;
139 } 290 }
140 291
141 /// Returns true if ASTC is natively supported. 292 /// Returns true if ASTC is natively supported.
142 bool IsOptimalAstcSupported() const { 293 bool IsOptimalAstcSupported() const {
143 return is_optimal_astc_supported; 294 return features.features.textureCompressionASTC_LDR;
144 } 295 }
145 296
146 /// Returns true if the device supports float16 natively. 297 /// Returns true if the device supports float16 natively.
147 bool IsFloat16Supported() const { 298 bool IsFloat16Supported() const {
148 return is_float16_supported; 299 return features.shader_float16_int8.shaderFloat16;
149 } 300 }
150 301
151 /// Returns true if the device supports int8 natively. 302 /// Returns true if the device supports int8 natively.
152 bool IsInt8Supported() const { 303 bool IsInt8Supported() const {
153 return is_int8_supported; 304 return features.shader_float16_int8.shaderInt8;
154 } 305 }
155 306
156 /// Returns true if the device warp size can potentially be bigger than guest's warp size. 307 /// Returns true if the device warp size can potentially be bigger than guest's warp size.
@@ -160,32 +311,32 @@ public:
160 311
161 /// Returns true if the device can be forced to use the guest warp size. 312 /// Returns true if the device can be forced to use the guest warp size.
162 bool IsGuestWarpSizeSupported(VkShaderStageFlagBits stage) const { 313 bool IsGuestWarpSizeSupported(VkShaderStageFlagBits stage) const {
163 return guest_warp_stages & stage; 314 return properties.subgroup_size_control.requiredSubgroupSizeStages & stage;
164 } 315 }
165 316
166 /// Returns the maximum number of push descriptors. 317 /// Returns the maximum number of push descriptors.
167 u32 MaxPushDescriptors() const { 318 u32 MaxPushDescriptors() const {
168 return max_push_descriptors; 319 return properties.push_descriptor.maxPushDescriptors;
169 } 320 }
170 321
171 /// Returns true if formatless image load is supported. 322 /// Returns true if formatless image load is supported.
172 bool IsFormatlessImageLoadSupported() const { 323 bool IsFormatlessImageLoadSupported() const {
173 return is_formatless_image_load_supported; 324 return features.features.shaderStorageImageReadWithoutFormat;
174 } 325 }
175 326
176 /// Returns true if shader int64 is supported. 327 /// Returns true if shader int64 is supported.
177 bool IsShaderInt64Supported() const { 328 bool IsShaderInt64Supported() const {
178 return is_shader_int64_supported; 329 return features.features.shaderInt64;
179 } 330 }
180 331
181 /// Returns true if shader int16 is supported. 332 /// Returns true if shader int16 is supported.
182 bool IsShaderInt16Supported() const { 333 bool IsShaderInt16Supported() const {
183 return is_shader_int16_supported; 334 return features.features.shaderInt16;
184 } 335 }
185 336
186 // Returns true if depth bounds is supported. 337 // Returns true if depth bounds is supported.
187 bool IsDepthBoundsSupported() const { 338 bool IsDepthBoundsSupported() const {
188 return is_depth_bounds_supported; 339 return features.features.depthBounds;
189 } 340 }
190 341
191 /// Returns true when blitting from and to depth stencil images is supported. 342 /// Returns true when blitting from and to depth stencil images is supported.
@@ -195,151 +346,151 @@ public:
195 346
196 /// Returns true if the device supports VK_NV_viewport_swizzle. 347 /// Returns true if the device supports VK_NV_viewport_swizzle.
197 bool IsNvViewportSwizzleSupported() const { 348 bool IsNvViewportSwizzleSupported() const {
198 return nv_viewport_swizzle; 349 return extensions.viewport_swizzle;
199 } 350 }
200 351
201 /// Returns true if the device supports VK_NV_viewport_array2. 352 /// Returns true if the device supports VK_NV_viewport_array2.
202 bool IsNvViewportArray2Supported() const { 353 bool IsNvViewportArray2Supported() const {
203 return nv_viewport_array2; 354 return extensions.viewport_array2;
204 } 355 }
205 356
206 /// Returns true if the device supports VK_NV_geometry_shader_passthrough. 357 /// Returns true if the device supports VK_NV_geometry_shader_passthrough.
207 bool IsNvGeometryShaderPassthroughSupported() const { 358 bool IsNvGeometryShaderPassthroughSupported() const {
208 return nv_geometry_shader_passthrough; 359 return extensions.geometry_shader_passthrough;
209 } 360 }
210 361
211 /// Returns true if the device supports VK_KHR_uniform_buffer_standard_layout. 362 /// Returns true if the device supports VK_KHR_uniform_buffer_standard_layout.
212 bool IsKhrUniformBufferStandardLayoutSupported() const { 363 bool IsKhrUniformBufferStandardLayoutSupported() const {
213 return khr_uniform_buffer_standard_layout; 364 return extensions.uniform_buffer_standard_layout;
214 } 365 }
215 366
216 /// Returns true if the device supports VK_KHR_push_descriptor. 367 /// Returns true if the device supports VK_KHR_push_descriptor.
217 bool IsKhrPushDescriptorSupported() const { 368 bool IsKhrPushDescriptorSupported() const {
218 return khr_push_descriptor; 369 return extensions.push_descriptor;
219 } 370 }
220 371
221 /// Returns true if VK_KHR_pipeline_executable_properties is enabled. 372 /// Returns true if VK_KHR_pipeline_executable_properties is enabled.
222 bool IsKhrPipelineExecutablePropertiesEnabled() const { 373 bool IsKhrPipelineExecutablePropertiesEnabled() const {
223 return khr_pipeline_executable_properties; 374 return extensions.pipeline_executable_properties;
224 } 375 }
225 376
226 /// Returns true if VK_KHR_swapchain_mutable_format is enabled. 377 /// Returns true if VK_KHR_swapchain_mutable_format is enabled.
227 bool IsKhrSwapchainMutableFormatEnabled() const { 378 bool IsKhrSwapchainMutableFormatEnabled() const {
228 return khr_swapchain_mutable_format; 379 return extensions.swapchain_mutable_format;
229 } 380 }
230 381
231 /// Returns true if the device supports VK_KHR_workgroup_memory_explicit_layout. 382 /// Returns true if the device supports VK_KHR_workgroup_memory_explicit_layout.
232 bool IsKhrWorkgroupMemoryExplicitLayoutSupported() const { 383 bool IsKhrWorkgroupMemoryExplicitLayoutSupported() const {
233 return khr_workgroup_memory_explicit_layout; 384 return extensions.workgroup_memory_explicit_layout;
234 } 385 }
235 386
236 /// Returns true if the device supports VK_EXT_primitive_topology_list_restart. 387 /// Returns true if the device supports VK_EXT_primitive_topology_list_restart.
237 bool IsTopologyListPrimitiveRestartSupported() const { 388 bool IsTopologyListPrimitiveRestartSupported() const {
238 return is_topology_list_restart_supported; 389 return features.primitive_topology_list_restart.primitiveTopologyListRestart;
239 } 390 }
240 391
241 /// Returns true if the device supports VK_EXT_primitive_topology_list_restart. 392 /// Returns true if the device supports VK_EXT_primitive_topology_list_restart.
242 bool IsPatchListPrimitiveRestartSupported() const { 393 bool IsPatchListPrimitiveRestartSupported() const {
243 return is_patch_list_restart_supported; 394 return features.primitive_topology_list_restart.primitiveTopologyPatchListRestart;
244 } 395 }
245 396
246 /// Returns true if the device supports VK_EXT_index_type_uint8. 397 /// Returns true if the device supports VK_EXT_index_type_uint8.
247 bool IsExtIndexTypeUint8Supported() const { 398 bool IsExtIndexTypeUint8Supported() const {
248 return ext_index_type_uint8; 399 return extensions.index_type_uint8;
249 } 400 }
250 401
251 /// Returns true if the device supports VK_EXT_sampler_filter_minmax. 402 /// Returns true if the device supports VK_EXT_sampler_filter_minmax.
252 bool IsExtSamplerFilterMinmaxSupported() const { 403 bool IsExtSamplerFilterMinmaxSupported() const {
253 return ext_sampler_filter_minmax; 404 return extensions.sampler_filter_minmax;
254 } 405 }
255 406
256 /// Returns true if the device supports VK_EXT_depth_range_unrestricted. 407 /// Returns true if the device supports VK_EXT_depth_range_unrestricted.
257 bool IsExtDepthRangeUnrestrictedSupported() const { 408 bool IsExtDepthRangeUnrestrictedSupported() const {
258 return ext_depth_range_unrestricted; 409 return extensions.depth_range_unrestricted;
259 } 410 }
260 411
261 /// Returns true if the device supports VK_EXT_depth_clip_control. 412 /// Returns true if the device supports VK_EXT_depth_clip_control.
262 bool IsExtDepthClipControlSupported() const { 413 bool IsExtDepthClipControlSupported() const {
263 return ext_depth_clip_control; 414 return extensions.depth_clip_control;
264 } 415 }
265 416
266 /// Returns true if the device supports VK_EXT_shader_viewport_index_layer. 417 /// Returns true if the device supports VK_EXT_shader_viewport_index_layer.
267 bool IsExtShaderViewportIndexLayerSupported() const { 418 bool IsExtShaderViewportIndexLayerSupported() const {
268 return ext_shader_viewport_index_layer; 419 return extensions.shader_viewport_index_layer;
269 } 420 }
270 421
271 /// Returns true if the device supports VK_EXT_subgroup_size_control. 422 /// Returns true if the device supports VK_EXT_subgroup_size_control.
272 bool IsExtSubgroupSizeControlSupported() const { 423 bool IsExtSubgroupSizeControlSupported() const {
273 return ext_subgroup_size_control; 424 return extensions.subgroup_size_control;
274 } 425 }
275 426
276 /// Returns true if the device supports VK_EXT_transform_feedback. 427 /// Returns true if the device supports VK_EXT_transform_feedback.
277 bool IsExtTransformFeedbackSupported() const { 428 bool IsExtTransformFeedbackSupported() const {
278 return ext_transform_feedback; 429 return extensions.transform_feedback;
279 } 430 }
280 431
281 /// Returns true if the device supports VK_EXT_custom_border_color. 432 /// Returns true if the device supports VK_EXT_custom_border_color.
282 bool IsExtCustomBorderColorSupported() const { 433 bool IsExtCustomBorderColorSupported() const {
283 return ext_custom_border_color; 434 return extensions.custom_border_color;
284 } 435 }
285 436
286 /// Returns true if the device supports VK_EXT_extended_dynamic_state. 437 /// Returns true if the device supports VK_EXT_extended_dynamic_state.
287 bool IsExtExtendedDynamicStateSupported() const { 438 bool IsExtExtendedDynamicStateSupported() const {
288 return ext_extended_dynamic_state; 439 return extensions.extended_dynamic_state;
289 } 440 }
290 441
291 /// Returns true if the device supports VK_EXT_extended_dynamic_state2. 442 /// Returns true if the device supports VK_EXT_extended_dynamic_state2.
292 bool IsExtExtendedDynamicState2Supported() const { 443 bool IsExtExtendedDynamicState2Supported() const {
293 return ext_extended_dynamic_state_2; 444 return extensions.extended_dynamic_state2;
294 } 445 }
295 446
296 bool IsExtExtendedDynamicState2ExtrasSupported() const { 447 bool IsExtExtendedDynamicState2ExtrasSupported() const {
297 return ext_extended_dynamic_state_2_extra; 448 return features.extended_dynamic_state2.extendedDynamicState2LogicOp;
298 } 449 }
299 450
300 /// Returns true if the device supports VK_EXT_extended_dynamic_state3. 451 /// Returns true if the device supports VK_EXT_extended_dynamic_state3.
301 bool IsExtExtendedDynamicState3Supported() const { 452 bool IsExtExtendedDynamicState3Supported() const {
302 return ext_extended_dynamic_state_3; 453 return extensions.extended_dynamic_state3;
303 } 454 }
304 455
305 /// Returns true if the device supports VK_EXT_extended_dynamic_state3. 456 /// Returns true if the device supports VK_EXT_extended_dynamic_state3.
306 bool IsExtExtendedDynamicState3BlendingSupported() const { 457 bool IsExtExtendedDynamicState3BlendingSupported() const {
307 return ext_extended_dynamic_state_3_blend; 458 return dynamic_state3_blending;
308 } 459 }
309 460
310 /// Returns true if the device supports VK_EXT_extended_dynamic_state3. 461 /// Returns true if the device supports VK_EXT_extended_dynamic_state3.
311 bool IsExtExtendedDynamicState3EnablesSupported() const { 462 bool IsExtExtendedDynamicState3EnablesSupported() const {
312 return ext_extended_dynamic_state_3_enables; 463 return dynamic_state3_enables;
313 } 464 }
314 465
315 /// Returns true if the device supports VK_EXT_line_rasterization. 466 /// Returns true if the device supports VK_EXT_line_rasterization.
316 bool IsExtLineRasterizationSupported() const { 467 bool IsExtLineRasterizationSupported() const {
317 return ext_line_rasterization; 468 return extensions.line_rasterization;
318 } 469 }
319 470
320 /// Returns true if the device supports VK_EXT_vertex_input_dynamic_state. 471 /// Returns true if the device supports VK_EXT_vertex_input_dynamic_state.
321 bool IsExtVertexInputDynamicStateSupported() const { 472 bool IsExtVertexInputDynamicStateSupported() const {
322 return ext_vertex_input_dynamic_state; 473 return extensions.vertex_input_dynamic_state;
323 } 474 }
324 475
325 /// Returns true if the device supports VK_EXT_shader_stencil_export. 476 /// Returns true if the device supports VK_EXT_shader_stencil_export.
326 bool IsExtShaderStencilExportSupported() const { 477 bool IsExtShaderStencilExportSupported() const {
327 return ext_shader_stencil_export; 478 return extensions.shader_stencil_export;
328 } 479 }
329 480
330 /// Returns true if the device supports VK_EXT_conservative_rasterization. 481 /// Returns true if the device supports VK_EXT_conservative_rasterization.
331 bool IsExtConservativeRasterizationSupported() const { 482 bool IsExtConservativeRasterizationSupported() const {
332 return ext_conservative_rasterization; 483 return extensions.conservative_rasterization;
333 } 484 }
334 485
335 /// Returns true if the device supports VK_EXT_provoking_vertex. 486 /// Returns true if the device supports VK_EXT_provoking_vertex.
336 bool IsExtProvokingVertexSupported() const { 487 bool IsExtProvokingVertexSupported() const {
337 return ext_provoking_vertex; 488 return extensions.provoking_vertex;
338 } 489 }
339 490
340 /// Returns true if the device supports VK_KHR_shader_atomic_int64. 491 /// Returns true if the device supports VK_KHR_shader_atomic_int64.
341 bool IsExtShaderAtomicInt64Supported() const { 492 bool IsExtShaderAtomicInt64Supported() const {
342 return ext_shader_atomic_int64; 493 return extensions.shader_atomic_int64;
343 } 494 }
344 495
345 /// Returns the minimum supported version of SPIR-V. 496 /// Returns the minimum supported version of SPIR-V.
@@ -347,7 +498,7 @@ public:
347 if (instance_version >= VK_API_VERSION_1_3) { 498 if (instance_version >= VK_API_VERSION_1_3) {
348 return 0x00010600U; 499 return 0x00010600U;
349 } 500 }
350 if (khr_spirv_1_4) { 501 if (extensions.spirv_1_4) {
351 return 0x00010400U; 502 return 0x00010400U;
352 } 503 }
353 return 0x00010000U; 504 return 0x00010000U;
@@ -365,11 +516,11 @@ public:
365 516
366 /// Returns the vendor name reported from Vulkan. 517 /// Returns the vendor name reported from Vulkan.
367 std::string_view GetVendorName() const { 518 std::string_view GetVendorName() const {
368 return vendor_name; 519 return properties.driver.driverName;
369 } 520 }
370 521
371 /// Returns the list of available extensions. 522 /// Returns the list of available extensions.
372 const std::vector<std::string>& GetAvailableExtensions() const { 523 const std::set<std::string, std::less<>>& GetAvailableExtensions() const {
373 return supported_extensions; 524 return supported_extensions;
374 } 525 }
375 526
@@ -378,7 +529,7 @@ public:
378 } 529 }
379 530
380 bool CanReportMemoryUsage() const { 531 bool CanReportMemoryUsage() const {
381 return ext_memory_budget; 532 return extensions.memory_budget;
382 } 533 }
383 534
384 u64 GetDeviceMemoryUsage() const; 535 u64 GetDeviceMemoryUsage() const;
@@ -400,36 +551,29 @@ public:
400 } 551 }
401 552
402 bool HasNullDescriptor() const { 553 bool HasNullDescriptor() const {
403 return has_null_descriptor; 554 return features.robustness2.nullDescriptor;
404 } 555 }
405 556
406 u32 GetMaxVertexInputAttributes() const { 557 u32 GetMaxVertexInputAttributes() const {
407 return max_vertex_input_attributes; 558 return properties.properties.limits.maxVertexInputAttributes;
408 } 559 }
409 560
410 u32 GetMaxVertexInputBindings() const { 561 u32 GetMaxVertexInputBindings() const {
411 return max_vertex_input_bindings; 562 return properties.properties.limits.maxVertexInputBindings;
412 } 563 }
413 564
414private: 565private:
415 /// Checks if the physical device is suitable. 566 /// Checks if the physical device is suitable and configures the object state
416 void CheckSuitability(bool requires_swapchain) const; 567 /// with all necessary info about its properties.
568 bool GetSuitability(bool requires_swapchain);
417 569
418 /// Loads extensions into a vector and stores available ones in this object. 570 // Remove extensions which have incomplete feature support.
419 std::vector<const char*> LoadExtensions(bool requires_surface); 571 void RemoveUnsuitableExtensions();
572 void RemoveExtensionIfUnsuitable(bool is_suitable, const std::string& extension_name);
420 573
421 /// Sets up queue families. 574 /// Sets up queue families.
422 void SetupFamilies(VkSurfaceKHR surface); 575 void SetupFamilies(VkSurfaceKHR surface);
423 576
424 /// Sets up device features.
425 void SetupFeatures();
426
427 /// Sets up device properties.
428 void SetupProperties();
429
430 /// Collects telemetry information from the device.
431 void CollectTelemetryParameters();
432
433 /// Collects information about attached tools. 577 /// Collects information about attached tools.
434 void CollectToolingInfo(); 578 void CollectToolingInfo();
435 579
@@ -440,91 +584,93 @@ private:
440 std::vector<VkDeviceQueueCreateInfo> GetDeviceQueueCreateInfos() const; 584 std::vector<VkDeviceQueueCreateInfo> GetDeviceQueueCreateInfos() const;
441 585
442 /// Returns true if ASTC textures are natively supported. 586 /// Returns true if ASTC textures are natively supported.
443 bool IsOptimalAstcSupported(const VkPhysicalDeviceFeatures& features) const; 587 bool ComputeIsOptimalAstcSupported() const;
444 588
445 /// Returns true if the device natively supports blitting depth stencil images. 589 /// Returns true if the device natively supports blitting depth stencil images.
446 bool TestDepthStencilBlits() const; 590 bool TestDepthStencilBlits() const;
447 591
448 VkInstance instance; ///< Vulkan instance. 592private:
449 vk::DeviceDispatch dld; ///< Device function pointers. 593 VkInstance instance; ///< Vulkan instance.
450 vk::PhysicalDevice physical; ///< Physical device. 594 vk::DeviceDispatch dld; ///< Device function pointers.
451 VkPhysicalDeviceProperties properties; ///< Device properties. 595 vk::PhysicalDevice physical; ///< Physical device.
452 VkPhysicalDeviceFloatControlsPropertiesKHR float_controls{}; ///< Float control properties. 596 vk::Device logical; ///< Logical device.
453 vk::Device logical; ///< Logical device. 597 vk::Queue graphics_queue; ///< Main graphics queue.
454 vk::Queue graphics_queue; ///< Main graphics queue. 598 vk::Queue present_queue; ///< Main present queue.
455 vk::Queue present_queue; ///< Main present queue. 599 u32 instance_version{}; ///< Vulkan instance version.
456 u32 instance_version{}; ///< Vulkan onstance version. 600 u32 graphics_family{}; ///< Main graphics queue family index.
457 u32 graphics_family{}; ///< Main graphics queue family index. 601 u32 present_family{}; ///< Main present queue family index.
458 u32 present_family{}; ///< Main present queue family index. 602
459 VkDriverIdKHR driver_id{}; ///< Driver ID. 603 struct Extensions {
460 VkShaderStageFlags guest_warp_stages{}; ///< Stages where the guest warp size can be forced. 604#define EXTENSION(prefix, macro_name, var_name) bool var_name{};
461 u64 device_access_memory{}; ///< Total size of device local memory in bytes. 605#define FEATURE(prefix, struct_name, macro_name, var_name) bool var_name{};
462 u32 max_push_descriptors{}; ///< Maximum number of push descriptors 606
463 u32 sets_per_pool{}; ///< Sets per Description Pool 607 FOR_EACH_VK_FEATURE_1_1(FEATURE);
464 bool is_optimal_astc_supported{}; ///< Support for native ASTC. 608 FOR_EACH_VK_FEATURE_1_2(FEATURE);
465 bool is_float16_supported{}; ///< Support for float16 arithmetic. 609 FOR_EACH_VK_FEATURE_1_3(FEATURE);
466 bool is_int8_supported{}; ///< Support for int8 arithmetic. 610 FOR_EACH_VK_FEATURE_EXT(FEATURE);
467 bool is_warp_potentially_bigger{}; ///< Host warp size can be bigger than guest. 611 FOR_EACH_VK_EXTENSION(EXTENSION);
468 bool is_formatless_image_load_supported{}; ///< Support for shader image read without format. 612 FOR_EACH_VK_EXTENSION_WIN32(EXTENSION);
469 bool is_depth_bounds_supported{}; ///< Support for depth bounds. 613
470 bool is_shader_float64_supported{}; ///< Support for float64. 614#undef EXTENSION
471 bool is_shader_int64_supported{}; ///< Support for int64. 615#undef FEATURE
472 bool is_shader_int16_supported{}; ///< Support for int16. 616 };
473 bool is_shader_storage_image_multisample{}; ///< Support for image operations on MSAA images. 617
474 bool is_blit_depth_stencil_supported{}; ///< Support for blitting from and to depth stencil. 618 struct Features {
475 bool is_topology_list_restart_supported{}; ///< Support for primitive restart with list 619#define FEATURE_CORE(prefix, struct_name, macro_name, var_name) \
476 ///< topologies. 620 VkPhysicalDevice##struct_name##Features var_name{};
477 bool is_patch_list_restart_supported{}; ///< Support for primitive restart with list patch. 621#define FEATURE_EXT(prefix, struct_name, macro_name, var_name) \
478 bool is_integrated{}; ///< Is GPU an iGPU. 622 VkPhysicalDevice##struct_name##Features##prefix var_name{};
479 bool is_virtual{}; ///< Is GPU a virtual GPU. 623
480 bool is_non_gpu{}; ///< Is SoftwareRasterizer, FPGA, non-GPU device. 624 FOR_EACH_VK_FEATURE_1_1(FEATURE_CORE);
481 bool nv_viewport_swizzle{}; ///< Support for VK_NV_viewport_swizzle. 625 FOR_EACH_VK_FEATURE_1_2(FEATURE_CORE);
482 bool nv_viewport_array2{}; ///< Support for VK_NV_viewport_array2. 626 FOR_EACH_VK_FEATURE_1_3(FEATURE_CORE);
483 bool nv_geometry_shader_passthrough{}; ///< Support for VK_NV_geometry_shader_passthrough. 627 FOR_EACH_VK_FEATURE_EXT(FEATURE_EXT);
484 bool khr_draw_indirect_count{}; ///< Support for VK_KHR_draw_indirect_count. 628
485 bool khr_uniform_buffer_standard_layout{}; ///< Support for scalar uniform buffer layouts. 629#undef FEATURE_CORE
486 bool khr_spirv_1_4{}; ///< Support for VK_KHR_spirv_1_4. 630#undef FEATURE_EXT
487 bool khr_workgroup_memory_explicit_layout{}; ///< Support for explicit workgroup layouts. 631
488 bool khr_push_descriptor{}; ///< Support for VK_KHR_push_descritor. 632 VkPhysicalDeviceFeatures features{};
489 bool khr_pipeline_executable_properties{}; ///< Support for executable properties. 633 };
490 bool khr_swapchain_mutable_format{}; ///< Support for VK_KHR_swapchain_mutable_format. 634
491 bool ext_index_type_uint8{}; ///< Support for VK_EXT_index_type_uint8. 635 struct Properties {
492 bool ext_sampler_filter_minmax{}; ///< Support for VK_EXT_sampler_filter_minmax. 636 VkPhysicalDeviceDriverProperties driver{};
493 bool ext_depth_clip_control{}; ///< Support for VK_EXT_depth_clip_control 637 VkPhysicalDeviceFloatControlsProperties float_controls{};
494 bool ext_depth_range_unrestricted{}; ///< Support for VK_EXT_depth_range_unrestricted. 638 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor{};
495 bool ext_shader_viewport_index_layer{}; ///< Support for VK_EXT_shader_viewport_index_layer. 639 VkPhysicalDeviceSubgroupSizeControlProperties subgroup_size_control{};
496 bool ext_tooling_info{}; ///< Support for VK_EXT_tooling_info. 640 VkPhysicalDeviceTransformFeedbackPropertiesEXT transform_feedback{};
497 bool ext_subgroup_size_control{}; ///< Support for VK_EXT_subgroup_size_control. 641
498 bool ext_transform_feedback{}; ///< Support for VK_EXT_transform_feedback. 642 VkPhysicalDeviceProperties properties{};
499 bool ext_custom_border_color{}; ///< Support for VK_EXT_custom_border_color. 643 };
500 bool ext_extended_dynamic_state{}; ///< Support for VK_EXT_extended_dynamic_state. 644
501 bool ext_extended_dynamic_state_2{}; ///< Support for VK_EXT_extended_dynamic_state2. 645 Extensions extensions{};
502 bool ext_extended_dynamic_state_2_extra{}; ///< Support for VK_EXT_extended_dynamic_state2. 646 Features features{};
503 bool ext_extended_dynamic_state_3{}; ///< Support for VK_EXT_extended_dynamic_state3. 647 Properties properties{};
504 bool ext_extended_dynamic_state_3_blend{}; ///< Support for VK_EXT_extended_dynamic_state3. 648
505 bool ext_extended_dynamic_state_3_enables{}; ///< Support for VK_EXT_extended_dynamic_state3. 649 VkPhysicalDeviceFeatures2 features2{};
506 bool ext_line_rasterization{}; ///< Support for VK_EXT_line_rasterization. 650 VkPhysicalDeviceProperties2 properties2{};
507 bool ext_vertex_input_dynamic_state{}; ///< Support for VK_EXT_vertex_input_dynamic_state. 651
508 bool ext_shader_stencil_export{}; ///< Support for VK_EXT_shader_stencil_export. 652 // Misc features
509 bool ext_shader_atomic_int64{}; ///< Support for VK_KHR_shader_atomic_int64. 653 bool is_optimal_astc_supported{}; ///< Support for all guest ASTC formats.
510 bool ext_conservative_rasterization{}; ///< Support for VK_EXT_conservative_rasterization. 654 bool is_blit_depth_stencil_supported{}; ///< Support for blitting from and to depth stencil.
511 bool ext_provoking_vertex{}; ///< Support for VK_EXT_provoking_vertex. 655 bool is_warp_potentially_bigger{}; ///< Host warp size can be bigger than guest.
512 bool ext_memory_budget{}; ///< Support for VK_EXT_memory_budget. 656 bool is_integrated{}; ///< Is GPU an iGPU.
513 bool nv_device_diagnostics_config{}; ///< Support for VK_NV_device_diagnostics_config. 657 bool is_virtual{}; ///< Is GPU a virtual GPU.
514 bool has_broken_cube_compatibility{}; ///< Has broken cube compatiblity bit 658 bool is_non_gpu{}; ///< Is SoftwareRasterizer, FPGA, non-GPU device.
515 bool has_renderdoc{}; ///< Has RenderDoc attached 659 bool has_broken_cube_compatibility{}; ///< Has broken cube compatiblity bit
516 bool has_nsight_graphics{}; ///< Has Nsight Graphics attached 660 bool has_renderdoc{}; ///< Has RenderDoc attached
517 bool supports_d24_depth{}; ///< Supports D24 depth buffers. 661 bool has_nsight_graphics{}; ///< Has Nsight Graphics attached
518 bool cant_blit_msaa{}; ///< Does not support MSAA<->MSAA blitting. 662 bool supports_d24_depth{}; ///< Supports D24 depth buffers.
519 bool must_emulate_bgr565{}; ///< Emulates BGR565 by swizzling RGB565 format. 663 bool cant_blit_msaa{}; ///< Does not support MSAA<->MSAA blitting.
520 bool has_null_descriptor{}; ///< Has support for null descriptors. 664 bool must_emulate_bgr565{}; ///< Emulates BGR565 by swizzling RGB565 format.
521 u32 max_vertex_input_attributes{}; ///< Max vertex input attributes in pipeline 665 bool dynamic_state3_blending{}; ///< Has all blending features of dynamic_state3.
522 u32 max_vertex_input_bindings{}; ///< Max vertex input buffers in pipeline 666 bool dynamic_state3_enables{}; ///< Has all enables features of dynamic_state3.
667 u64 device_access_memory{}; ///< Total size of device local memory in bytes.
668 u32 sets_per_pool{}; ///< Sets per Description Pool
523 669
524 // Telemetry parameters 670 // Telemetry parameters
525 std::string vendor_name; ///< Device's driver name. 671 std::set<std::string, std::less<>> supported_extensions; ///< Reported Vulkan extensions.
526 std::vector<std::string> supported_extensions; ///< Reported Vulkan extensions. 672 std::set<std::string, std::less<>> loaded_extensions; ///< Loaded Vulkan extensions.
527 std::vector<size_t> valid_heap_memory; ///< Heaps used. 673 std::vector<size_t> valid_heap_memory; ///< Heaps used.
528 674
529 /// Format properties dictionary. 675 /// Format properties dictionary.
530 std::unordered_map<VkFormat, VkFormatProperties> format_properties; 676 std::unordered_map<VkFormat, VkFormatProperties> format_properties;
diff --git a/src/video_core/vulkan_common/vulkan_wrapper.cpp b/src/video_core/vulkan_common/vulkan_wrapper.cpp
index 61be1fce1..486d4dfaf 100644
--- a/src/video_core/vulkan_common/vulkan_wrapper.cpp
+++ b/src/video_core/vulkan_common/vulkan_wrapper.cpp
@@ -96,8 +96,8 @@ void Load(VkDevice device, DeviceDispatch& dld) noexcept {
96 X(vkCmdDrawIndexed); 96 X(vkCmdDrawIndexed);
97 X(vkCmdDrawIndirect); 97 X(vkCmdDrawIndirect);
98 X(vkCmdDrawIndexedIndirect); 98 X(vkCmdDrawIndexedIndirect);
99 X(vkCmdDrawIndirectCountKHR); 99 X(vkCmdDrawIndirectCount);
100 X(vkCmdDrawIndexedIndirectCountKHR); 100 X(vkCmdDrawIndexedIndirectCount);
101 X(vkCmdEndQuery); 101 X(vkCmdEndQuery);
102 X(vkCmdEndRenderPass); 102 X(vkCmdEndRenderPass);
103 X(vkCmdEndTransformFeedbackEXT); 103 X(vkCmdEndTransformFeedbackEXT);
@@ -221,6 +221,12 @@ void Load(VkDevice device, DeviceDispatch& dld) noexcept {
221 if (!dld.vkResetQueryPool) { 221 if (!dld.vkResetQueryPool) {
222 Proc(dld.vkResetQueryPool, dld, "vkResetQueryPoolEXT", device); 222 Proc(dld.vkResetQueryPool, dld, "vkResetQueryPoolEXT", device);
223 } 223 }
224
225 // Support for draw indirect with count is optional in Vulkan 1.2
226 if (!dld.vkCmdDrawIndirectCount) {
227 Proc(dld.vkCmdDrawIndirectCount, dld, "vkCmdDrawIndirectCountKHR", device);
228 Proc(dld.vkCmdDrawIndexedIndirectCount, dld, "vkCmdDrawIndexedIndirectCountKHR", device);
229 }
224#undef X 230#undef X
225} 231}
226 232
diff --git a/src/video_core/vulkan_common/vulkan_wrapper.h b/src/video_core/vulkan_common/vulkan_wrapper.h
index 412779b51..e86f661cb 100644
--- a/src/video_core/vulkan_common/vulkan_wrapper.h
+++ b/src/video_core/vulkan_common/vulkan_wrapper.h
@@ -215,8 +215,8 @@ struct DeviceDispatch : InstanceDispatch {
215 PFN_vkCmdDrawIndexed vkCmdDrawIndexed{}; 215 PFN_vkCmdDrawIndexed vkCmdDrawIndexed{};
216 PFN_vkCmdDrawIndirect vkCmdDrawIndirect{}; 216 PFN_vkCmdDrawIndirect vkCmdDrawIndirect{};
217 PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect{}; 217 PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect{};
218 PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR{}; 218 PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount{};
219 PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR{}; 219 PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount{};
220 PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT{}; 220 PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT{};
221 PFN_vkCmdEndQuery vkCmdEndQuery{}; 221 PFN_vkCmdEndQuery vkCmdEndQuery{};
222 PFN_vkCmdEndRenderPass vkCmdEndRenderPass{}; 222 PFN_vkCmdEndRenderPass vkCmdEndRenderPass{};
@@ -1065,15 +1065,15 @@ public:
1065 1065
1066 void DrawIndirectCount(VkBuffer src_buffer, VkDeviceSize src_offset, VkBuffer count_buffer, 1066 void DrawIndirectCount(VkBuffer src_buffer, VkDeviceSize src_offset, VkBuffer count_buffer,
1067 VkDeviceSize count_offset, u32 draw_count, u32 stride) const noexcept { 1067 VkDeviceSize count_offset, u32 draw_count, u32 stride) const noexcept {
1068 dld->vkCmdDrawIndirectCountKHR(handle, src_buffer, src_offset, count_buffer, count_offset, 1068 dld->vkCmdDrawIndirectCount(handle, src_buffer, src_offset, count_buffer, count_offset,
1069 draw_count, stride); 1069 draw_count, stride);
1070 } 1070 }
1071 1071
1072 void DrawIndexedIndirectCount(VkBuffer src_buffer, VkDeviceSize src_offset, 1072 void DrawIndexedIndirectCount(VkBuffer src_buffer, VkDeviceSize src_offset,
1073 VkBuffer count_buffer, VkDeviceSize count_offset, u32 draw_count, 1073 VkBuffer count_buffer, VkDeviceSize count_offset, u32 draw_count,
1074 u32 stride) const noexcept { 1074 u32 stride) const noexcept {
1075 dld->vkCmdDrawIndexedIndirectCountKHR(handle, src_buffer, src_offset, count_buffer, 1075 dld->vkCmdDrawIndexedIndirectCount(handle, src_buffer, src_offset, count_buffer,
1076 count_offset, draw_count, stride); 1076 count_offset, draw_count, stride);
1077 } 1077 }
1078 1078
1079 void ClearAttachments(Span<VkClearAttachment> attachments, 1079 void ClearAttachments(Span<VkClearAttachment> attachments,
diff --git a/src/yuzu/Info.plist b/src/yuzu/Info.plist
index 0eb377926..f05f3186c 100644
--- a/src/yuzu/Info.plist
+++ b/src/yuzu/Info.plist
@@ -34,6 +34,8 @@ SPDX-License-Identifier: GPL-2.0-or-later
34 <string></string> 34 <string></string>
35 <key>CSResourcesFileMapped</key> 35 <key>CSResourcesFileMapped</key>
36 <true/> 36 <true/>
37 <key>LSApplicationCategoryType</key>
38 <string>public.app-category.games</string>
37 <key>LSRequiresCarbon</key> 39 <key>LSRequiresCarbon</key>
38 <true/> 40 <true/>
39 <key>NSHumanReadableCopyright</key> 41 <key>NSHumanReadableCopyright</key>
diff --git a/src/yuzu/bootmanager.cpp b/src/yuzu/bootmanager.cpp
index 3d560f303..d65991734 100644
--- a/src/yuzu/bootmanager.cpp
+++ b/src/yuzu/bootmanager.cpp
@@ -96,9 +96,9 @@ void EmuThread::run() {
96 m_is_running.store(false); 96 m_is_running.store(false);
97 m_is_running.notify_all(); 97 m_is_running.notify_all();
98 98
99 emit DebugModeEntered(); 99 EmulationPaused(lk);
100 Common::CondvarWait(m_should_run_cv, lk, stop_token, [&] { return m_should_run; }); 100 Common::CondvarWait(m_should_run_cv, lk, stop_token, [&] { return m_should_run; });
101 emit DebugModeLeft(); 101 EmulationResumed(lk);
102 } 102 }
103 } 103 }
104 104
@@ -111,6 +111,21 @@ void EmuThread::run() {
111#endif 111#endif
112} 112}
113 113
114// Unlock while emitting signals so that the main thread can
115// continue pumping events.
116
117void EmuThread::EmulationPaused(std::unique_lock<std::mutex>& lk) {
118 lk.unlock();
119 emit DebugModeEntered();
120 lk.lock();
121}
122
123void EmuThread::EmulationResumed(std::unique_lock<std::mutex>& lk) {
124 lk.unlock();
125 emit DebugModeLeft();
126 lk.lock();
127}
128
114#ifdef HAS_OPENGL 129#ifdef HAS_OPENGL
115class OpenGLSharedContext : public Core::Frontend::GraphicsContext { 130class OpenGLSharedContext : public Core::Frontend::GraphicsContext {
116public: 131public:
diff --git a/src/yuzu/bootmanager.h b/src/yuzu/bootmanager.h
index eca16b313..092c6206f 100644
--- a/src/yuzu/bootmanager.h
+++ b/src/yuzu/bootmanager.h
@@ -92,6 +92,10 @@ public:
92 } 92 }
93 93
94private: 94private:
95 void EmulationPaused(std::unique_lock<std::mutex>& lk);
96 void EmulationResumed(std::unique_lock<std::mutex>& lk);
97
98private:
95 Core::System& m_system; 99 Core::System& m_system;
96 100
97 std::stop_source m_stop_source; 101 std::stop_source m_stop_source;
diff --git a/src/yuzu/configuration/configure_graphics_advanced.cpp b/src/yuzu/configuration/configure_graphics_advanced.cpp
index fdf8485ce..cc0155a2c 100644
--- a/src/yuzu/configuration/configure_graphics_advanced.cpp
+++ b/src/yuzu/configuration/configure_graphics_advanced.cpp
@@ -22,6 +22,7 @@ ConfigureGraphicsAdvanced::~ConfigureGraphicsAdvanced() = default;
22void ConfigureGraphicsAdvanced::SetConfiguration() { 22void ConfigureGraphicsAdvanced::SetConfiguration() {
23 const bool runtime_lock = !system.IsPoweredOn(); 23 const bool runtime_lock = !system.IsPoweredOn();
24 ui->use_vsync->setEnabled(runtime_lock); 24 ui->use_vsync->setEnabled(runtime_lock);
25 ui->renderer_force_max_clock->setEnabled(runtime_lock);
25 ui->use_asynchronous_shaders->setEnabled(runtime_lock); 26 ui->use_asynchronous_shaders->setEnabled(runtime_lock);
26 ui->anisotropic_filtering_combobox->setEnabled(runtime_lock); 27 ui->anisotropic_filtering_combobox->setEnabled(runtime_lock);
27 28
@@ -40,12 +41,12 @@ void ConfigureGraphicsAdvanced::SetConfiguration() {
40 Settings::values.max_anisotropy.GetValue()); 41 Settings::values.max_anisotropy.GetValue());
41 } else { 42 } else {
42 ConfigurationShared::SetPerGameSetting(ui->gpu_accuracy, &Settings::values.gpu_accuracy); 43 ConfigurationShared::SetPerGameSetting(ui->gpu_accuracy, &Settings::values.gpu_accuracy);
43 ConfigurationShared::SetPerGameSetting(ui->renderer_force_max_clock,
44 &Settings::values.renderer_force_max_clock);
45 ConfigurationShared::SetPerGameSetting(ui->anisotropic_filtering_combobox, 44 ConfigurationShared::SetPerGameSetting(ui->anisotropic_filtering_combobox,
46 &Settings::values.max_anisotropy); 45 &Settings::values.max_anisotropy);
47 ConfigurationShared::SetHighlight(ui->label_gpu_accuracy, 46 ConfigurationShared::SetHighlight(ui->label_gpu_accuracy,
48 !Settings::values.gpu_accuracy.UsingGlobal()); 47 !Settings::values.gpu_accuracy.UsingGlobal());
48 ConfigurationShared::SetHighlight(ui->renderer_force_max_clock,
49 !Settings::values.renderer_force_max_clock.UsingGlobal());
49 ConfigurationShared::SetHighlight(ui->af_label, 50 ConfigurationShared::SetHighlight(ui->af_label,
50 !Settings::values.max_anisotropy.UsingGlobal()); 51 !Settings::values.max_anisotropy.UsingGlobal());
51 } 52 }
diff --git a/src/yuzu/main.cpp b/src/yuzu/main.cpp
index c55f81c2f..571eacf9f 100644
--- a/src/yuzu/main.cpp
+++ b/src/yuzu/main.cpp
@@ -1839,9 +1839,11 @@ void GMainWindow::OnEmulationStopTimeExpired() {
1839 1839
1840void GMainWindow::OnEmulationStopped() { 1840void GMainWindow::OnEmulationStopped() {
1841 shutdown_timer.stop(); 1841 shutdown_timer.stop();
1842 emu_thread->disconnect(); 1842 if (emu_thread) {
1843 emu_thread->wait(); 1843 emu_thread->disconnect();
1844 emu_thread = nullptr; 1844 emu_thread->wait();
1845 emu_thread.reset();
1846 }
1845 1847
1846 if (shutdown_dialog) { 1848 if (shutdown_dialog) {
1847 shutdown_dialog->deleteLater(); 1849 shutdown_dialog->deleteLater();
@@ -3029,6 +3031,8 @@ void GMainWindow::OnStopGame() {
3029 3031
3030 if (OnShutdownBegin()) { 3032 if (OnShutdownBegin()) {
3031 OnShutdownBeginDialog(); 3033 OnShutdownBeginDialog();
3034 } else {
3035 OnEmulationStopped();
3032 } 3036 }
3033} 3037}
3034 3038
@@ -3726,15 +3730,36 @@ void GMainWindow::UpdateWindowTitle(std::string_view title_name, std::string_vie
3726 } 3730 }
3727} 3731}
3728 3732
3733std::string GMainWindow::CreateTASFramesString(
3734 std::array<size_t, InputCommon::TasInput::PLAYER_NUMBER> frames) const {
3735 std::string string = "";
3736 size_t maxPlayerIndex = 0;
3737 for (size_t i = 0; i < frames.size(); i++) {
3738 if (frames[i] != 0) {
3739 if (maxPlayerIndex != 0)
3740 string += ", ";
3741 while (maxPlayerIndex++ != i)
3742 string += "0, ";
3743 string += std::to_string(frames[i]);
3744 }
3745 }
3746 return string;
3747}
3748
3729QString GMainWindow::GetTasStateDescription() const { 3749QString GMainWindow::GetTasStateDescription() const {
3730 auto [tas_status, current_tas_frame, total_tas_frames] = input_subsystem->GetTas()->GetStatus(); 3750 auto [tas_status, current_tas_frame, total_tas_frames] = input_subsystem->GetTas()->GetStatus();
3751 std::string tas_frames_string = CreateTASFramesString(total_tas_frames);
3731 switch (tas_status) { 3752 switch (tas_status) {
3732 case InputCommon::TasInput::TasState::Running: 3753 case InputCommon::TasInput::TasState::Running:
3733 return tr("TAS state: Running %1/%2").arg(current_tas_frame).arg(total_tas_frames); 3754 return tr("TAS state: Running %1/%2")
3755 .arg(current_tas_frame)
3756 .arg(QString::fromStdString(tas_frames_string));
3734 case InputCommon::TasInput::TasState::Recording: 3757 case InputCommon::TasInput::TasState::Recording:
3735 return tr("TAS state: Recording %1").arg(total_tas_frames); 3758 return tr("TAS state: Recording %1").arg(total_tas_frames[0]);
3736 case InputCommon::TasInput::TasState::Stopped: 3759 case InputCommon::TasInput::TasState::Stopped:
3737 return tr("TAS state: Idle %1/%2").arg(current_tas_frame).arg(total_tas_frames); 3760 return tr("TAS state: Idle %1/%2")
3761 .arg(current_tas_frame)
3762 .arg(QString::fromStdString(tas_frames_string));
3738 default: 3763 default:
3739 return tr("TAS State: Invalid"); 3764 return tr("TAS State: Invalid");
3740 } 3765 }
diff --git a/src/yuzu/main.h b/src/yuzu/main.h
index f25ce65a8..0f61abc7a 100644
--- a/src/yuzu/main.h
+++ b/src/yuzu/main.h
@@ -12,6 +12,7 @@
12 12
13#include "common/announce_multiplayer_room.h" 13#include "common/announce_multiplayer_room.h"
14#include "common/common_types.h" 14#include "common/common_types.h"
15#include "input_common/drivers/tas_input.h"
15#include "yuzu/compatibility_list.h" 16#include "yuzu/compatibility_list.h"
16#include "yuzu/hotkeys.h" 17#include "yuzu/hotkeys.h"
17 18
@@ -266,6 +267,9 @@ private:
266 void changeEvent(QEvent* event) override; 267 void changeEvent(QEvent* event) override;
267 void closeEvent(QCloseEvent* event) override; 268 void closeEvent(QCloseEvent* event) override;
268 269
270 std::string CreateTASFramesString(
271 std::array<size_t, InputCommon::TasInput::PLAYER_NUMBER> frames) const;
272
269#ifdef __unix__ 273#ifdef __unix__
270 void SetupSigInterrupts(); 274 void SetupSigInterrupts();
271 static void HandleSigInterrupt(int); 275 static void HandleSigInterrupt(int);