summaryrefslogtreecommitdiff
path: root/src/video_core/renderer_vulkan
diff options
context:
space:
mode:
authorGravatar Ameer2020-07-04 00:59:40 -0400
committerGravatar Ameer2020-07-04 00:59:40 -0400
commitf829932ed191ad469df01342191bf2725e8a20bb (patch)
tree0ae185ce3ef43ef9b085aae7b9ad5abb04e3d239 /src/video_core/renderer_vulkan
parentFix for always firing triggers on some controllers, trigger threshold more un... (diff)
parentMerge pull request #4218 from ogniK5377/opus-external (diff)
downloadyuzu-f829932ed191ad469df01342191bf2725e8a20bb.tar.gz
yuzu-f829932ed191ad469df01342191bf2725e8a20bb.tar.xz
yuzu-f829932ed191ad469df01342191bf2725e8a20bb.zip
Fix merge conflicts?
Diffstat (limited to 'src/video_core/renderer_vulkan')
-rw-r--r--src/video_core/renderer_vulkan/maxwell_to_vk.cpp144
-rw-r--r--src/video_core/renderer_vulkan/renderer_vulkan.cpp28
-rw-r--r--src/video_core/renderer_vulkan/vk_buffer_cache.cpp89
-rw-r--r--src/video_core/renderer_vulkan/vk_buffer_cache.h29
-rw-r--r--src/video_core/renderer_vulkan/vk_rasterizer.cpp95
-rw-r--r--src/video_core/renderer_vulkan/vk_rasterizer.h5
-rw-r--r--src/video_core/renderer_vulkan/vk_scheduler.cpp2
-rw-r--r--src/video_core/renderer_vulkan/vk_stream_buffer.h6
-rw-r--r--src/video_core/renderer_vulkan/vk_update_descriptor.cpp36
-rw-r--r--src/video_core/renderer_vulkan/vk_update_descriptor.h32
-rw-r--r--src/video_core/renderer_vulkan/wrapper.cpp19
-rw-r--r--src/video_core/renderer_vulkan/wrapper.h6
12 files changed, 289 insertions, 202 deletions
diff --git a/src/video_core/renderer_vulkan/maxwell_to_vk.cpp b/src/video_core/renderer_vulkan/maxwell_to_vk.cpp
index 1f2b6734b..d7f1ae89f 100644
--- a/src/video_core/renderer_vulkan/maxwell_to_vk.cpp
+++ b/src/video_core/renderer_vulkan/maxwell_to_vk.cpp
@@ -294,6 +294,28 @@ VkPrimitiveTopology PrimitiveTopology([[maybe_unused]] const VKDevice& device,
294 294
295VkFormat VertexFormat(Maxwell::VertexAttribute::Type type, Maxwell::VertexAttribute::Size size) { 295VkFormat VertexFormat(Maxwell::VertexAttribute::Type type, Maxwell::VertexAttribute::Size size) {
296 switch (type) { 296 switch (type) {
297 case Maxwell::VertexAttribute::Type::UnsignedNorm:
298 switch (size) {
299 case Maxwell::VertexAttribute::Size::Size_8:
300 return VK_FORMAT_R8_UNORM;
301 case Maxwell::VertexAttribute::Size::Size_8_8:
302 return VK_FORMAT_R8G8_UNORM;
303 case Maxwell::VertexAttribute::Size::Size_8_8_8:
304 return VK_FORMAT_R8G8B8_UNORM;
305 case Maxwell::VertexAttribute::Size::Size_8_8_8_8:
306 return VK_FORMAT_R8G8B8A8_UNORM;
307 case Maxwell::VertexAttribute::Size::Size_16:
308 return VK_FORMAT_R16_UNORM;
309 case Maxwell::VertexAttribute::Size::Size_16_16:
310 return VK_FORMAT_R16G16_UNORM;
311 case Maxwell::VertexAttribute::Size::Size_16_16_16:
312 return VK_FORMAT_R16G16B16_UNORM;
313 case Maxwell::VertexAttribute::Size::Size_16_16_16_16:
314 return VK_FORMAT_R16G16B16A16_UNORM;
315 case Maxwell::VertexAttribute::Size::Size_10_10_10_2:
316 return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
317 }
318 break;
297 case Maxwell::VertexAttribute::Type::SignedNorm: 319 case Maxwell::VertexAttribute::Type::SignedNorm:
298 switch (size) { 320 switch (size) {
299 case Maxwell::VertexAttribute::Size::Size_8: 321 case Maxwell::VertexAttribute::Size::Size_8:
@@ -314,62 +336,50 @@ VkFormat VertexFormat(Maxwell::VertexAttribute::Type type, Maxwell::VertexAttrib
314 return VK_FORMAT_R16G16B16A16_SNORM; 336 return VK_FORMAT_R16G16B16A16_SNORM;
315 case Maxwell::VertexAttribute::Size::Size_10_10_10_2: 337 case Maxwell::VertexAttribute::Size::Size_10_10_10_2:
316 return VK_FORMAT_A2B10G10R10_SNORM_PACK32; 338 return VK_FORMAT_A2B10G10R10_SNORM_PACK32;
317 default:
318 break;
319 } 339 }
320 break; 340 break;
321 case Maxwell::VertexAttribute::Type::UnsignedNorm: 341 case Maxwell::VertexAttribute::Type::UnsignedScaled:
322 switch (size) { 342 switch (size) {
323 case Maxwell::VertexAttribute::Size::Size_8: 343 case Maxwell::VertexAttribute::Size::Size_8:
324 return VK_FORMAT_R8_UNORM; 344 return VK_FORMAT_R8_USCALED;
325 case Maxwell::VertexAttribute::Size::Size_8_8: 345 case Maxwell::VertexAttribute::Size::Size_8_8:
326 return VK_FORMAT_R8G8_UNORM; 346 return VK_FORMAT_R8G8_USCALED;
327 case Maxwell::VertexAttribute::Size::Size_8_8_8: 347 case Maxwell::VertexAttribute::Size::Size_8_8_8:
328 return VK_FORMAT_R8G8B8_UNORM; 348 return VK_FORMAT_R8G8B8_USCALED;
329 case Maxwell::VertexAttribute::Size::Size_8_8_8_8: 349 case Maxwell::VertexAttribute::Size::Size_8_8_8_8:
330 return VK_FORMAT_R8G8B8A8_UNORM; 350 return VK_FORMAT_R8G8B8A8_USCALED;
331 case Maxwell::VertexAttribute::Size::Size_16: 351 case Maxwell::VertexAttribute::Size::Size_16:
332 return VK_FORMAT_R16_UNORM; 352 return VK_FORMAT_R16_USCALED;
333 case Maxwell::VertexAttribute::Size::Size_16_16: 353 case Maxwell::VertexAttribute::Size::Size_16_16:
334 return VK_FORMAT_R16G16_UNORM; 354 return VK_FORMAT_R16G16_USCALED;
335 case Maxwell::VertexAttribute::Size::Size_16_16_16: 355 case Maxwell::VertexAttribute::Size::Size_16_16_16:
336 return VK_FORMAT_R16G16B16_UNORM; 356 return VK_FORMAT_R16G16B16_USCALED;
337 case Maxwell::VertexAttribute::Size::Size_16_16_16_16: 357 case Maxwell::VertexAttribute::Size::Size_16_16_16_16:
338 return VK_FORMAT_R16G16B16A16_UNORM; 358 return VK_FORMAT_R16G16B16A16_USCALED;
339 case Maxwell::VertexAttribute::Size::Size_10_10_10_2: 359 case Maxwell::VertexAttribute::Size::Size_10_10_10_2:
340 return VK_FORMAT_A2B10G10R10_UNORM_PACK32; 360 return VK_FORMAT_A2B10G10R10_USCALED_PACK32;
341 default:
342 break;
343 } 361 }
344 break; 362 break;
345 case Maxwell::VertexAttribute::Type::SignedInt: 363 case Maxwell::VertexAttribute::Type::SignedScaled:
346 switch (size) { 364 switch (size) {
347 case Maxwell::VertexAttribute::Size::Size_8: 365 case Maxwell::VertexAttribute::Size::Size_8:
348 return VK_FORMAT_R8_SINT; 366 return VK_FORMAT_R8_SSCALED;
349 case Maxwell::VertexAttribute::Size::Size_8_8: 367 case Maxwell::VertexAttribute::Size::Size_8_8:
350 return VK_FORMAT_R8G8_SINT; 368 return VK_FORMAT_R8G8_SSCALED;
351 case Maxwell::VertexAttribute::Size::Size_8_8_8: 369 case Maxwell::VertexAttribute::Size::Size_8_8_8:
352 return VK_FORMAT_R8G8B8_SINT; 370 return VK_FORMAT_R8G8B8_SSCALED;
353 case Maxwell::VertexAttribute::Size::Size_8_8_8_8: 371 case Maxwell::VertexAttribute::Size::Size_8_8_8_8:
354 return VK_FORMAT_R8G8B8A8_SINT; 372 return VK_FORMAT_R8G8B8A8_SSCALED;
355 case Maxwell::VertexAttribute::Size::Size_16: 373 case Maxwell::VertexAttribute::Size::Size_16:
356 return VK_FORMAT_R16_SINT; 374 return VK_FORMAT_R16_SSCALED;
357 case Maxwell::VertexAttribute::Size::Size_16_16: 375 case Maxwell::VertexAttribute::Size::Size_16_16:
358 return VK_FORMAT_R16G16_SINT; 376 return VK_FORMAT_R16G16_SSCALED;
359 case Maxwell::VertexAttribute::Size::Size_16_16_16: 377 case Maxwell::VertexAttribute::Size::Size_16_16_16:
360 return VK_FORMAT_R16G16B16_SINT; 378 return VK_FORMAT_R16G16B16_SSCALED;
361 case Maxwell::VertexAttribute::Size::Size_16_16_16_16: 379 case Maxwell::VertexAttribute::Size::Size_16_16_16_16:
362 return VK_FORMAT_R16G16B16A16_SINT; 380 return VK_FORMAT_R16G16B16A16_SSCALED;
363 case Maxwell::VertexAttribute::Size::Size_32: 381 case Maxwell::VertexAttribute::Size::Size_10_10_10_2:
364 return VK_FORMAT_R32_SINT; 382 return VK_FORMAT_A2B10G10R10_SSCALED_PACK32;
365 case Maxwell::VertexAttribute::Size::Size_32_32:
366 return VK_FORMAT_R32G32_SINT;
367 case Maxwell::VertexAttribute::Size::Size_32_32_32:
368 return VK_FORMAT_R32G32B32_SINT;
369 case Maxwell::VertexAttribute::Size::Size_32_32_32_32:
370 return VK_FORMAT_R32G32B32A32_SINT;
371 default:
372 break;
373 } 383 }
374 break; 384 break;
375 case Maxwell::VertexAttribute::Type::UnsignedInt: 385 case Maxwell::VertexAttribute::Type::UnsignedInt:
@@ -398,56 +408,50 @@ VkFormat VertexFormat(Maxwell::VertexAttribute::Type type, Maxwell::VertexAttrib
398 return VK_FORMAT_R32G32B32_UINT; 408 return VK_FORMAT_R32G32B32_UINT;
399 case Maxwell::VertexAttribute::Size::Size_32_32_32_32: 409 case Maxwell::VertexAttribute::Size::Size_32_32_32_32:
400 return VK_FORMAT_R32G32B32A32_UINT; 410 return VK_FORMAT_R32G32B32A32_UINT;
401 default: 411 case Maxwell::VertexAttribute::Size::Size_10_10_10_2:
402 break; 412 return VK_FORMAT_A2B10G10R10_UINT_PACK32;
403 } 413 }
404 break; 414 break;
405 case Maxwell::VertexAttribute::Type::UnsignedScaled: 415 case Maxwell::VertexAttribute::Type::SignedInt:
406 switch (size) { 416 switch (size) {
407 case Maxwell::VertexAttribute::Size::Size_8: 417 case Maxwell::VertexAttribute::Size::Size_8:
408 return VK_FORMAT_R8_USCALED; 418 return VK_FORMAT_R8_SINT;
409 case Maxwell::VertexAttribute::Size::Size_8_8: 419 case Maxwell::VertexAttribute::Size::Size_8_8:
410 return VK_FORMAT_R8G8_USCALED; 420 return VK_FORMAT_R8G8_SINT;
411 case Maxwell::VertexAttribute::Size::Size_8_8_8: 421 case Maxwell::VertexAttribute::Size::Size_8_8_8:
412 return VK_FORMAT_R8G8B8_USCALED; 422 return VK_FORMAT_R8G8B8_SINT;
413 case Maxwell::VertexAttribute::Size::Size_8_8_8_8: 423 case Maxwell::VertexAttribute::Size::Size_8_8_8_8:
414 return VK_FORMAT_R8G8B8A8_USCALED; 424 return VK_FORMAT_R8G8B8A8_SINT;
415 case Maxwell::VertexAttribute::Size::Size_16: 425 case Maxwell::VertexAttribute::Size::Size_16:
416 return VK_FORMAT_R16_USCALED; 426 return VK_FORMAT_R16_SINT;
417 case Maxwell::VertexAttribute::Size::Size_16_16: 427 case Maxwell::VertexAttribute::Size::Size_16_16:
418 return VK_FORMAT_R16G16_USCALED; 428 return VK_FORMAT_R16G16_SINT;
419 case Maxwell::VertexAttribute::Size::Size_16_16_16: 429 case Maxwell::VertexAttribute::Size::Size_16_16_16:
420 return VK_FORMAT_R16G16B16_USCALED; 430 return VK_FORMAT_R16G16B16_SINT;
421 case Maxwell::VertexAttribute::Size::Size_16_16_16_16: 431 case Maxwell::VertexAttribute::Size::Size_16_16_16_16:
422 return VK_FORMAT_R16G16B16A16_USCALED; 432 return VK_FORMAT_R16G16B16A16_SINT;
423 default: 433 case Maxwell::VertexAttribute::Size::Size_32:
424 break; 434 return VK_FORMAT_R32_SINT;
435 case Maxwell::VertexAttribute::Size::Size_32_32:
436 return VK_FORMAT_R32G32_SINT;
437 case Maxwell::VertexAttribute::Size::Size_32_32_32:
438 return VK_FORMAT_R32G32B32_SINT;
439 case Maxwell::VertexAttribute::Size::Size_32_32_32_32:
440 return VK_FORMAT_R32G32B32A32_SINT;
441 case Maxwell::VertexAttribute::Size::Size_10_10_10_2:
442 return VK_FORMAT_A2B10G10R10_SINT_PACK32;
425 } 443 }
426 break; 444 break;
427 case Maxwell::VertexAttribute::Type::SignedScaled: 445 case Maxwell::VertexAttribute::Type::Float:
428 switch (size) { 446 switch (size) {
429 case Maxwell::VertexAttribute::Size::Size_8:
430 return VK_FORMAT_R8_SSCALED;
431 case Maxwell::VertexAttribute::Size::Size_8_8:
432 return VK_FORMAT_R8G8_SSCALED;
433 case Maxwell::VertexAttribute::Size::Size_8_8_8:
434 return VK_FORMAT_R8G8B8_SSCALED;
435 case Maxwell::VertexAttribute::Size::Size_8_8_8_8:
436 return VK_FORMAT_R8G8B8A8_SSCALED;
437 case Maxwell::VertexAttribute::Size::Size_16: 447 case Maxwell::VertexAttribute::Size::Size_16:
438 return VK_FORMAT_R16_SSCALED; 448 return VK_FORMAT_R16_SFLOAT;
439 case Maxwell::VertexAttribute::Size::Size_16_16: 449 case Maxwell::VertexAttribute::Size::Size_16_16:
440 return VK_FORMAT_R16G16_SSCALED; 450 return VK_FORMAT_R16G16_SFLOAT;
441 case Maxwell::VertexAttribute::Size::Size_16_16_16: 451 case Maxwell::VertexAttribute::Size::Size_16_16_16:
442 return VK_FORMAT_R16G16B16_SSCALED; 452 return VK_FORMAT_R16G16B16_SFLOAT;
443 case Maxwell::VertexAttribute::Size::Size_16_16_16_16: 453 case Maxwell::VertexAttribute::Size::Size_16_16_16_16:
444 return VK_FORMAT_R16G16B16A16_SSCALED; 454 return VK_FORMAT_R16G16B16A16_SFLOAT;
445 default:
446 break;
447 }
448 break;
449 case Maxwell::VertexAttribute::Type::Float:
450 switch (size) {
451 case Maxwell::VertexAttribute::Size::Size_32: 455 case Maxwell::VertexAttribute::Size::Size_32:
452 return VK_FORMAT_R32_SFLOAT; 456 return VK_FORMAT_R32_SFLOAT;
453 case Maxwell::VertexAttribute::Size::Size_32_32: 457 case Maxwell::VertexAttribute::Size::Size_32_32:
@@ -456,16 +460,6 @@ VkFormat VertexFormat(Maxwell::VertexAttribute::Type type, Maxwell::VertexAttrib
456 return VK_FORMAT_R32G32B32_SFLOAT; 460 return VK_FORMAT_R32G32B32_SFLOAT;
457 case Maxwell::VertexAttribute::Size::Size_32_32_32_32: 461 case Maxwell::VertexAttribute::Size::Size_32_32_32_32:
458 return VK_FORMAT_R32G32B32A32_SFLOAT; 462 return VK_FORMAT_R32G32B32A32_SFLOAT;
459 case Maxwell::VertexAttribute::Size::Size_16:
460 return VK_FORMAT_R16_SFLOAT;
461 case Maxwell::VertexAttribute::Size::Size_16_16:
462 return VK_FORMAT_R16G16_SFLOAT;
463 case Maxwell::VertexAttribute::Size::Size_16_16_16:
464 return VK_FORMAT_R16G16B16_SFLOAT;
465 case Maxwell::VertexAttribute::Size::Size_16_16_16_16:
466 return VK_FORMAT_R16G16B16A16_SFLOAT;
467 default:
468 break;
469 } 463 }
470 break; 464 break;
471 } 465 }
diff --git a/src/video_core/renderer_vulkan/renderer_vulkan.cpp b/src/video_core/renderer_vulkan/renderer_vulkan.cpp
index cd9673d1f..2d9b18ed9 100644
--- a/src/video_core/renderer_vulkan/renderer_vulkan.cpp
+++ b/src/video_core/renderer_vulkan/renderer_vulkan.cpp
@@ -155,11 +155,31 @@ vk::Instance CreateInstance(Common::DynamicLibrary& library, vk::InstanceDispatc
155 } 155 }
156 } 156 }
157 157
158 static constexpr std::array layers_data{"VK_LAYER_LUNARG_standard_validation"}; 158 std::vector<const char*> layers;
159 vk::Span<const char*> layers = layers_data; 159 layers.reserve(1);
160 if (!enable_layers) { 160 if (enable_layers) {
161 layers = {}; 161 layers.push_back("VK_LAYER_KHRONOS_validation");
162 }
163
164 const std::optional layer_properties = vk::EnumerateInstanceLayerProperties(dld);
165 if (!layer_properties) {
166 LOG_ERROR(Render_Vulkan, "Failed to query layer properties, disabling layers");
167 layers.clear();
168 }
169
170 for (auto layer_it = layers.begin(); layer_it != layers.end();) {
171 const char* const layer = *layer_it;
172 const auto it = std::find_if(
173 layer_properties->begin(), layer_properties->end(),
174 [layer](const VkLayerProperties& prop) { return !std::strcmp(layer, prop.layerName); });
175 if (it == layer_properties->end()) {
176 LOG_ERROR(Render_Vulkan, "Layer {} not available, removing it", layer);
177 layer_it = layers.erase(layer_it);
178 } else {
179 ++layer_it;
180 }
162 } 181 }
182
163 vk::Instance instance = vk::Instance::Create(layers, extensions, dld); 183 vk::Instance instance = vk::Instance::Create(layers, extensions, dld);
164 if (!instance) { 184 if (!instance) {
165 LOG_ERROR(Render_Vulkan, "Failed to create Vulkan instance"); 185 LOG_ERROR(Render_Vulkan, "Failed to create Vulkan instance");
diff --git a/src/video_core/renderer_vulkan/vk_buffer_cache.cpp b/src/video_core/renderer_vulkan/vk_buffer_cache.cpp
index 1fde38328..2be38d419 100644
--- a/src/video_core/renderer_vulkan/vk_buffer_cache.cpp
+++ b/src/video_core/renderer_vulkan/vk_buffer_cache.cpp
@@ -37,9 +37,9 @@ std::unique_ptr<VKStreamBuffer> CreateStreamBuffer(const VKDevice& device, VKSch
37 37
38} // Anonymous namespace 38} // Anonymous namespace
39 39
40Buffer::Buffer(const VKDevice& device, VKMemoryManager& memory_manager, VAddr cpu_addr, 40Buffer::Buffer(const VKDevice& device, VKMemoryManager& memory_manager, VKScheduler& scheduler_,
41 std::size_t size) 41 VKStagingBufferPool& staging_pool_, VAddr cpu_addr, std::size_t size)
42 : VideoCommon::BufferBlock{cpu_addr, size} { 42 : VideoCommon::BufferBlock{cpu_addr, size}, scheduler{scheduler_}, staging_pool{staging_pool_} {
43 VkBufferCreateInfo ci; 43 VkBufferCreateInfo ci;
44 ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; 44 ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
45 ci.pNext = nullptr; 45 ci.pNext = nullptr;
@@ -56,40 +56,15 @@ Buffer::Buffer(const VKDevice& device, VKMemoryManager& memory_manager, VAddr cp
56 56
57Buffer::~Buffer() = default; 57Buffer::~Buffer() = default;
58 58
59VKBufferCache::VKBufferCache(VideoCore::RasterizerInterface& rasterizer, Core::System& system, 59void Buffer::Upload(std::size_t offset, std::size_t size, const u8* data) {
60 const VKDevice& device, VKMemoryManager& memory_manager,
61 VKScheduler& scheduler, VKStagingBufferPool& staging_pool)
62 : VideoCommon::BufferCache<Buffer, VkBuffer, VKStreamBuffer>{rasterizer, system,
63 CreateStreamBuffer(device,
64 scheduler)},
65 device{device}, memory_manager{memory_manager}, scheduler{scheduler}, staging_pool{
66 staging_pool} {}
67
68VKBufferCache::~VKBufferCache() = default;
69
70std::shared_ptr<Buffer> VKBufferCache::CreateBlock(VAddr cpu_addr, std::size_t size) {
71 return std::make_shared<Buffer>(device, memory_manager, cpu_addr, size);
72}
73
74VkBuffer VKBufferCache::GetEmptyBuffer(std::size_t size) {
75 size = std::max(size, std::size_t(4));
76 const auto& empty = staging_pool.GetUnusedBuffer(size, false);
77 scheduler.RequestOutsideRenderPassOperationContext();
78 scheduler.Record([size, buffer = *empty.handle](vk::CommandBuffer cmdbuf) {
79 cmdbuf.FillBuffer(buffer, 0, size, 0);
80 });
81 return *empty.handle;
82}
83
84void VKBufferCache::UploadBlockData(const Buffer& buffer, std::size_t offset, std::size_t size,
85 const u8* data) {
86 const auto& staging = staging_pool.GetUnusedBuffer(size, true); 60 const auto& staging = staging_pool.GetUnusedBuffer(size, true);
87 std::memcpy(staging.commit->Map(size), data, size); 61 std::memcpy(staging.commit->Map(size), data, size);
88 62
89 scheduler.RequestOutsideRenderPassOperationContext(); 63 scheduler.RequestOutsideRenderPassOperationContext();
90 scheduler.Record([staging = *staging.handle, buffer = buffer.Handle(), offset, 64
91 size](vk::CommandBuffer cmdbuf) { 65 const VkBuffer handle = Handle();
92 cmdbuf.CopyBuffer(staging, buffer, VkBufferCopy{0, offset, size}); 66 scheduler.Record([staging = *staging.handle, handle, offset, size](vk::CommandBuffer cmdbuf) {
67 cmdbuf.CopyBuffer(staging, handle, VkBufferCopy{0, offset, size});
93 68
94 VkBufferMemoryBarrier barrier; 69 VkBufferMemoryBarrier barrier;
95 barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER; 70 barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
@@ -98,7 +73,7 @@ void VKBufferCache::UploadBlockData(const Buffer& buffer, std::size_t offset, st
98 barrier.dstAccessMask = UPLOAD_ACCESS_BARRIERS; 73 barrier.dstAccessMask = UPLOAD_ACCESS_BARRIERS;
99 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; 74 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
100 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; 75 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
101 barrier.buffer = buffer; 76 barrier.buffer = handle;
102 barrier.offset = offset; 77 barrier.offset = offset;
103 barrier.size = size; 78 barrier.size = size;
104 cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, UPLOAD_PIPELINE_STAGE, 0, {}, 79 cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, UPLOAD_PIPELINE_STAGE, 0, {},
@@ -106,12 +81,12 @@ void VKBufferCache::UploadBlockData(const Buffer& buffer, std::size_t offset, st
106 }); 81 });
107} 82}
108 83
109void VKBufferCache::DownloadBlockData(const Buffer& buffer, std::size_t offset, std::size_t size, 84void Buffer::Download(std::size_t offset, std::size_t size, u8* data) {
110 u8* data) {
111 const auto& staging = staging_pool.GetUnusedBuffer(size, true); 85 const auto& staging = staging_pool.GetUnusedBuffer(size, true);
112 scheduler.RequestOutsideRenderPassOperationContext(); 86 scheduler.RequestOutsideRenderPassOperationContext();
113 scheduler.Record([staging = *staging.handle, buffer = buffer.Handle(), offset, 87
114 size](vk::CommandBuffer cmdbuf) { 88 const VkBuffer handle = Handle();
89 scheduler.Record([staging = *staging.handle, handle, offset, size](vk::CommandBuffer cmdbuf) {
115 VkBufferMemoryBarrier barrier; 90 VkBufferMemoryBarrier barrier;
116 barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER; 91 barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
117 barrier.pNext = nullptr; 92 barrier.pNext = nullptr;
@@ -119,7 +94,7 @@ void VKBufferCache::DownloadBlockData(const Buffer& buffer, std::size_t offset,
119 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT; 94 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
120 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; 95 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
121 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; 96 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
122 barrier.buffer = buffer; 97 barrier.buffer = handle;
123 barrier.offset = offset; 98 barrier.offset = offset;
124 barrier.size = size; 99 barrier.size = size;
125 100
@@ -127,17 +102,19 @@ void VKBufferCache::DownloadBlockData(const Buffer& buffer, std::size_t offset,
127 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | 102 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
128 VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 103 VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
129 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, {}, barrier, {}); 104 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, {}, barrier, {});
130 cmdbuf.CopyBuffer(buffer, staging, VkBufferCopy{offset, 0, size}); 105 cmdbuf.CopyBuffer(handle, staging, VkBufferCopy{offset, 0, size});
131 }); 106 });
132 scheduler.Finish(); 107 scheduler.Finish();
133 108
134 std::memcpy(data, staging.commit->Map(size), size); 109 std::memcpy(data, staging.commit->Map(size), size);
135} 110}
136 111
137void VKBufferCache::CopyBlock(const Buffer& src, const Buffer& dst, std::size_t src_offset, 112void Buffer::CopyFrom(const Buffer& src, std::size_t src_offset, std::size_t dst_offset,
138 std::size_t dst_offset, std::size_t size) { 113 std::size_t size) {
139 scheduler.RequestOutsideRenderPassOperationContext(); 114 scheduler.RequestOutsideRenderPassOperationContext();
140 scheduler.Record([src_buffer = src.Handle(), dst_buffer = dst.Handle(), src_offset, dst_offset, 115
116 const VkBuffer dst_buffer = Handle();
117 scheduler.Record([src_buffer = src.Handle(), dst_buffer, src_offset, dst_offset,
141 size](vk::CommandBuffer cmdbuf) { 118 size](vk::CommandBuffer cmdbuf) {
142 cmdbuf.CopyBuffer(src_buffer, dst_buffer, VkBufferCopy{src_offset, dst_offset, size}); 119 cmdbuf.CopyBuffer(src_buffer, dst_buffer, VkBufferCopy{src_offset, dst_offset, size});
143 120
@@ -165,4 +142,30 @@ void VKBufferCache::CopyBlock(const Buffer& src, const Buffer& dst, std::size_t
165 }); 142 });
166} 143}
167 144
145VKBufferCache::VKBufferCache(VideoCore::RasterizerInterface& rasterizer, Core::System& system,
146 const VKDevice& device, VKMemoryManager& memory_manager,
147 VKScheduler& scheduler, VKStagingBufferPool& staging_pool)
148 : VideoCommon::BufferCache<Buffer, VkBuffer, VKStreamBuffer>{rasterizer, system,
149 CreateStreamBuffer(device,
150 scheduler)},
151 device{device}, memory_manager{memory_manager}, scheduler{scheduler}, staging_pool{
152 staging_pool} {}
153
154VKBufferCache::~VKBufferCache() = default;
155
156std::shared_ptr<Buffer> VKBufferCache::CreateBlock(VAddr cpu_addr, std::size_t size) {
157 return std::make_shared<Buffer>(device, memory_manager, scheduler, staging_pool, cpu_addr,
158 size);
159}
160
161VKBufferCache::BufferInfo VKBufferCache::GetEmptyBuffer(std::size_t size) {
162 size = std::max(size, std::size_t(4));
163 const auto& empty = staging_pool.GetUnusedBuffer(size, false);
164 scheduler.RequestOutsideRenderPassOperationContext();
165 scheduler.Record([size, buffer = *empty.handle](vk::CommandBuffer cmdbuf) {
166 cmdbuf.FillBuffer(buffer, 0, size, 0);
167 });
168 return {*empty.handle, 0, 0};
169}
170
168} // namespace Vulkan 171} // namespace Vulkan
diff --git a/src/video_core/renderer_vulkan/vk_buffer_cache.h b/src/video_core/renderer_vulkan/vk_buffer_cache.h
index 9ebbef835..991ee451c 100644
--- a/src/video_core/renderer_vulkan/vk_buffer_cache.h
+++ b/src/video_core/renderer_vulkan/vk_buffer_cache.h
@@ -25,15 +25,29 @@ class VKScheduler;
25 25
26class Buffer final : public VideoCommon::BufferBlock { 26class Buffer final : public VideoCommon::BufferBlock {
27public: 27public:
28 explicit Buffer(const VKDevice& device, VKMemoryManager& memory_manager, VAddr cpu_addr, 28 explicit Buffer(const VKDevice& device, VKMemoryManager& memory_manager, VKScheduler& scheduler,
29 std::size_t size); 29 VKStagingBufferPool& staging_pool, VAddr cpu_addr, std::size_t size);
30 ~Buffer(); 30 ~Buffer();
31 31
32 void Upload(std::size_t offset, std::size_t size, const u8* data);
33
34 void Download(std::size_t offset, std::size_t size, u8* data);
35
36 void CopyFrom(const Buffer& src, std::size_t src_offset, std::size_t dst_offset,
37 std::size_t size);
38
32 VkBuffer Handle() const { 39 VkBuffer Handle() const {
33 return *buffer.handle; 40 return *buffer.handle;
34 } 41 }
35 42
43 u64 Address() const {
44 return 0;
45 }
46
36private: 47private:
48 VKScheduler& scheduler;
49 VKStagingBufferPool& staging_pool;
50
37 VKBuffer buffer; 51 VKBuffer buffer;
38}; 52};
39 53
@@ -44,20 +58,11 @@ public:
44 VKScheduler& scheduler, VKStagingBufferPool& staging_pool); 58 VKScheduler& scheduler, VKStagingBufferPool& staging_pool);
45 ~VKBufferCache(); 59 ~VKBufferCache();
46 60
47 VkBuffer GetEmptyBuffer(std::size_t size) override; 61 BufferInfo GetEmptyBuffer(std::size_t size) override;
48 62
49protected: 63protected:
50 std::shared_ptr<Buffer> CreateBlock(VAddr cpu_addr, std::size_t size) override; 64 std::shared_ptr<Buffer> CreateBlock(VAddr cpu_addr, std::size_t size) override;
51 65
52 void UploadBlockData(const Buffer& buffer, std::size_t offset, std::size_t size,
53 const u8* data) override;
54
55 void DownloadBlockData(const Buffer& buffer, std::size_t offset, std::size_t size,
56 u8* data) override;
57
58 void CopyBlock(const Buffer& src, const Buffer& dst, std::size_t src_offset,
59 std::size_t dst_offset, std::size_t size) override;
60
61private: 66private:
62 const VKDevice& device; 67 const VKDevice& device;
63 VKMemoryManager& memory_manager; 68 VKMemoryManager& memory_manager;
diff --git a/src/video_core/renderer_vulkan/vk_rasterizer.cpp b/src/video_core/renderer_vulkan/vk_rasterizer.cpp
index a77fa35c3..a8d94eac3 100644
--- a/src/video_core/renderer_vulkan/vk_rasterizer.cpp
+++ b/src/video_core/renderer_vulkan/vk_rasterizer.cpp
@@ -143,6 +143,49 @@ Tegra::Texture::FullTextureInfo GetTextureInfo(const Engine& engine, const Entry
143 } 143 }
144} 144}
145 145
146/// @brief Determine if an attachment to be updated has to preserve contents
147/// @param is_clear True when a clear is being executed
148/// @param regs 3D registers
149/// @return True when the contents have to be preserved
150bool HasToPreserveColorContents(bool is_clear, const Maxwell& regs) {
151 if (!is_clear) {
152 return true;
153 }
154 // First we have to make sure all clear masks are enabled.
155 if (!regs.clear_buffers.R || !regs.clear_buffers.G || !regs.clear_buffers.B ||
156 !regs.clear_buffers.A) {
157 return true;
158 }
159 // If scissors are disabled, the whole screen is cleared
160 if (!regs.clear_flags.scissor) {
161 return false;
162 }
163 // Then we have to confirm scissor testing clears the whole image
164 const std::size_t index = regs.clear_buffers.RT;
165 const auto& scissor = regs.scissor_test[0];
166 return scissor.min_x > 0 || scissor.min_y > 0 || scissor.max_x < regs.rt[index].width ||
167 scissor.max_y < regs.rt[index].height;
168}
169
170/// @brief Determine if an attachment to be updated has to preserve contents
171/// @param is_clear True when a clear is being executed
172/// @param regs 3D registers
173/// @return True when the contents have to be preserved
174bool HasToPreserveDepthContents(bool is_clear, const Maxwell& regs) {
175 // If we are not clearing, the contents have to be preserved
176 if (!is_clear) {
177 return true;
178 }
179 // For depth stencil clears we only have to confirm scissor test covers the whole image
180 if (!regs.clear_flags.scissor) {
181 return false;
182 }
183 // Make sure the clear cover the whole image
184 const auto& scissor = regs.scissor_test[0];
185 return scissor.min_x > 0 || scissor.min_y > 0 || scissor.max_x < regs.zeta_width ||
186 scissor.max_y < regs.zeta_height;
187}
188
146} // Anonymous namespace 189} // Anonymous namespace
147 190
148class BufferBindings final { 191class BufferBindings final {
@@ -344,7 +387,7 @@ void RasterizerVulkan::Draw(bool is_indexed, bool is_instanced) {
344 387
345 buffer_cache.Unmap(); 388 buffer_cache.Unmap();
346 389
347 const Texceptions texceptions = UpdateAttachments(); 390 const Texceptions texceptions = UpdateAttachments(false);
348 SetupImageTransitions(texceptions, color_attachments, zeta_attachment); 391 SetupImageTransitions(texceptions, color_attachments, zeta_attachment);
349 392
350 key.renderpass_params = GetRenderPassParams(texceptions); 393 key.renderpass_params = GetRenderPassParams(texceptions);
@@ -400,7 +443,7 @@ void RasterizerVulkan::Clear() {
400 return; 443 return;
401 } 444 }
402 445
403 [[maybe_unused]] const auto texceptions = UpdateAttachments(); 446 [[maybe_unused]] const auto texceptions = UpdateAttachments(true);
404 DEBUG_ASSERT(texceptions.none()); 447 DEBUG_ASSERT(texceptions.none());
405 SetupImageTransitions(0, color_attachments, zeta_attachment); 448 SetupImageTransitions(0, color_attachments, zeta_attachment);
406 449
@@ -677,9 +720,12 @@ void RasterizerVulkan::FlushWork() {
677 draw_counter = 0; 720 draw_counter = 0;
678} 721}
679 722
680RasterizerVulkan::Texceptions RasterizerVulkan::UpdateAttachments() { 723RasterizerVulkan::Texceptions RasterizerVulkan::UpdateAttachments(bool is_clear) {
681 MICROPROFILE_SCOPE(Vulkan_RenderTargets); 724 MICROPROFILE_SCOPE(Vulkan_RenderTargets);
682 auto& dirty = system.GPU().Maxwell3D().dirty.flags; 725 auto& maxwell3d = system.GPU().Maxwell3D();
726 auto& dirty = maxwell3d.dirty.flags;
727 auto& regs = maxwell3d.regs;
728
683 const bool update_rendertargets = dirty[VideoCommon::Dirty::RenderTargets]; 729 const bool update_rendertargets = dirty[VideoCommon::Dirty::RenderTargets];
684 dirty[VideoCommon::Dirty::RenderTargets] = false; 730 dirty[VideoCommon::Dirty::RenderTargets] = false;
685 731
@@ -688,7 +734,8 @@ RasterizerVulkan::Texceptions RasterizerVulkan::UpdateAttachments() {
688 Texceptions texceptions; 734 Texceptions texceptions;
689 for (std::size_t rt = 0; rt < Maxwell::NumRenderTargets; ++rt) { 735 for (std::size_t rt = 0; rt < Maxwell::NumRenderTargets; ++rt) {
690 if (update_rendertargets) { 736 if (update_rendertargets) {
691 color_attachments[rt] = texture_cache.GetColorBufferSurface(rt, true); 737 const bool preserve_contents = HasToPreserveColorContents(is_clear, regs);
738 color_attachments[rt] = texture_cache.GetColorBufferSurface(rt, preserve_contents);
692 } 739 }
693 if (color_attachments[rt] && WalkAttachmentOverlaps(*color_attachments[rt])) { 740 if (color_attachments[rt] && WalkAttachmentOverlaps(*color_attachments[rt])) {
694 texceptions[rt] = true; 741 texceptions[rt] = true;
@@ -696,7 +743,8 @@ RasterizerVulkan::Texceptions RasterizerVulkan::UpdateAttachments() {
696 } 743 }
697 744
698 if (update_rendertargets) { 745 if (update_rendertargets) {
699 zeta_attachment = texture_cache.GetDepthBufferSurface(true); 746 const bool preserve_contents = HasToPreserveDepthContents(is_clear, regs);
747 zeta_attachment = texture_cache.GetDepthBufferSurface(preserve_contents);
700 } 748 }
701 if (zeta_attachment && WalkAttachmentOverlaps(*zeta_attachment)) { 749 if (zeta_attachment && WalkAttachmentOverlaps(*zeta_attachment)) {
702 texceptions[ZETA_TEXCEPTION_INDEX] = true; 750 texceptions[ZETA_TEXCEPTION_INDEX] = true;
@@ -870,10 +918,10 @@ void RasterizerVulkan::BeginTransformFeedback() {
870 UNIMPLEMENTED_IF(binding.buffer_offset != 0); 918 UNIMPLEMENTED_IF(binding.buffer_offset != 0);
871 919
872 const GPUVAddr gpu_addr = binding.Address(); 920 const GPUVAddr gpu_addr = binding.Address();
873 const auto size = static_cast<VkDeviceSize>(binding.buffer_size); 921 const VkDeviceSize size = static_cast<VkDeviceSize>(binding.buffer_size);
874 const auto [buffer, offset] = buffer_cache.UploadMemory(gpu_addr, size, 4, true); 922 const auto info = buffer_cache.UploadMemory(gpu_addr, size, 4, true);
875 923
876 scheduler.Record([buffer = buffer, offset = offset, size](vk::CommandBuffer cmdbuf) { 924 scheduler.Record([buffer = info.handle, offset = info.offset, size](vk::CommandBuffer cmdbuf) {
877 cmdbuf.BindTransformFeedbackBuffersEXT(0, 1, &buffer, &offset, &size); 925 cmdbuf.BindTransformFeedbackBuffersEXT(0, 1, &buffer, &offset, &size);
878 cmdbuf.BeginTransformFeedbackEXT(0, 0, nullptr, nullptr); 926 cmdbuf.BeginTransformFeedbackEXT(0, 0, nullptr, nullptr);
879 }); 927 });
@@ -925,8 +973,8 @@ void RasterizerVulkan::SetupVertexArrays(FixedPipelineState::VertexInput& vertex
925 buffer_bindings.AddVertexBinding(DefaultBuffer(), 0); 973 buffer_bindings.AddVertexBinding(DefaultBuffer(), 0);
926 continue; 974 continue;
927 } 975 }
928 const auto [buffer, offset] = buffer_cache.UploadMemory(start, size); 976 const auto info = buffer_cache.UploadMemory(start, size);
929 buffer_bindings.AddVertexBinding(buffer, offset); 977 buffer_bindings.AddVertexBinding(info.handle, info.offset);
930 } 978 }
931} 979}
932 980
@@ -948,7 +996,9 @@ void RasterizerVulkan::SetupIndexBuffer(BufferBindings& buffer_bindings, DrawPar
948 break; 996 break;
949 } 997 }
950 const GPUVAddr gpu_addr = regs.index_array.IndexStart(); 998 const GPUVAddr gpu_addr = regs.index_array.IndexStart();
951 auto [buffer, offset] = buffer_cache.UploadMemory(gpu_addr, CalculateIndexBufferSize()); 999 const auto info = buffer_cache.UploadMemory(gpu_addr, CalculateIndexBufferSize());
1000 VkBuffer buffer = info.handle;
1001 u64 offset = info.offset;
952 std::tie(buffer, offset) = quad_indexed_pass.Assemble( 1002 std::tie(buffer, offset) = quad_indexed_pass.Assemble(
953 regs.index_array.format, params.num_vertices, params.base_vertex, buffer, offset); 1003 regs.index_array.format, params.num_vertices, params.base_vertex, buffer, offset);
954 1004
@@ -962,7 +1012,9 @@ void RasterizerVulkan::SetupIndexBuffer(BufferBindings& buffer_bindings, DrawPar
962 break; 1012 break;
963 } 1013 }
964 const GPUVAddr gpu_addr = regs.index_array.IndexStart(); 1014 const GPUVAddr gpu_addr = regs.index_array.IndexStart();
965 auto [buffer, offset] = buffer_cache.UploadMemory(gpu_addr, CalculateIndexBufferSize()); 1015 const auto info = buffer_cache.UploadMemory(gpu_addr, CalculateIndexBufferSize());
1016 VkBuffer buffer = info.handle;
1017 u64 offset = info.offset;
966 1018
967 auto format = regs.index_array.format; 1019 auto format = regs.index_array.format;
968 const bool is_uint8 = format == Maxwell::IndexFormat::UnsignedByte; 1020 const bool is_uint8 = format == Maxwell::IndexFormat::UnsignedByte;
@@ -1109,10 +1161,9 @@ void RasterizerVulkan::SetupConstBuffer(const ConstBufferEntry& entry,
1109 Common::AlignUp(CalculateConstBufferSize(entry, buffer), 4 * sizeof(float)); 1161 Common::AlignUp(CalculateConstBufferSize(entry, buffer), 4 * sizeof(float));
1110 ASSERT(size <= MaxConstbufferSize); 1162 ASSERT(size <= MaxConstbufferSize);
1111 1163
1112 const auto [buffer_handle, offset] = 1164 const auto info =
1113 buffer_cache.UploadMemory(buffer.address, size, device.GetUniformBufferAlignment()); 1165 buffer_cache.UploadMemory(buffer.address, size, device.GetUniformBufferAlignment());
1114 1166 update_descriptor_queue.AddBuffer(info.handle, info.offset, size);
1115 update_descriptor_queue.AddBuffer(buffer_handle, offset, size);
1116} 1167}
1117 1168
1118void RasterizerVulkan::SetupGlobalBuffer(const GlobalBufferEntry& entry, GPUVAddr address) { 1169void RasterizerVulkan::SetupGlobalBuffer(const GlobalBufferEntry& entry, GPUVAddr address) {
@@ -1126,14 +1177,14 @@ void RasterizerVulkan::SetupGlobalBuffer(const GlobalBufferEntry& entry, GPUVAdd
1126 // Note: Do *not* use DefaultBuffer() here, storage buffers can be written breaking the 1177 // Note: Do *not* use DefaultBuffer() here, storage buffers can be written breaking the
1127 // default buffer. 1178 // default buffer.
1128 static constexpr std::size_t dummy_size = 4; 1179 static constexpr std::size_t dummy_size = 4;
1129 const auto buffer = buffer_cache.GetEmptyBuffer(dummy_size); 1180 const auto info = buffer_cache.GetEmptyBuffer(dummy_size);
1130 update_descriptor_queue.AddBuffer(buffer, 0, dummy_size); 1181 update_descriptor_queue.AddBuffer(info.handle, info.offset, dummy_size);
1131 return; 1182 return;
1132 } 1183 }
1133 1184
1134 const auto [buffer, offset] = buffer_cache.UploadMemory( 1185 const auto info = buffer_cache.UploadMemory(
1135 actual_addr, size, device.GetStorageBufferAlignment(), entry.IsWritten()); 1186 actual_addr, size, device.GetStorageBufferAlignment(), entry.IsWritten());
1136 update_descriptor_queue.AddBuffer(buffer, offset, size); 1187 update_descriptor_queue.AddBuffer(info.handle, info.offset, size);
1137} 1188}
1138 1189
1139void RasterizerVulkan::SetupUniformTexels(const Tegra::Texture::TICEntry& tic, 1190void RasterizerVulkan::SetupUniformTexels(const Tegra::Texture::TICEntry& tic,
@@ -1154,7 +1205,7 @@ void RasterizerVulkan::SetupTexture(const Tegra::Texture::FullTextureInfo& textu
1154 const auto sampler = sampler_cache.GetSampler(texture.tsc); 1205 const auto sampler = sampler_cache.GetSampler(texture.tsc);
1155 update_descriptor_queue.AddSampledImage(sampler, image_view); 1206 update_descriptor_queue.AddSampledImage(sampler, image_view);
1156 1207
1157 const auto image_layout = update_descriptor_queue.GetLastImageLayout(); 1208 VkImageLayout* const image_layout = update_descriptor_queue.LastImageLayout();
1158 *image_layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; 1209 *image_layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
1159 sampled_views.push_back(ImageView{std::move(view), image_layout}); 1210 sampled_views.push_back(ImageView{std::move(view), image_layout});
1160} 1211}
@@ -1180,7 +1231,7 @@ void RasterizerVulkan::SetupImage(const Tegra::Texture::TICEntry& tic, const Ima
1180 view->GetImageView(tic.x_source, tic.y_source, tic.z_source, tic.w_source); 1231 view->GetImageView(tic.x_source, tic.y_source, tic.z_source, tic.w_source);
1181 update_descriptor_queue.AddImage(image_view); 1232 update_descriptor_queue.AddImage(image_view);
1182 1233
1183 const auto image_layout = update_descriptor_queue.GetLastImageLayout(); 1234 VkImageLayout* const image_layout = update_descriptor_queue.LastImageLayout();
1184 *image_layout = VK_IMAGE_LAYOUT_GENERAL; 1235 *image_layout = VK_IMAGE_LAYOUT_GENERAL;
1185 image_views.push_back(ImageView{std::move(view), image_layout}); 1236 image_views.push_back(ImageView{std::move(view), image_layout});
1186} 1237}
diff --git a/src/video_core/renderer_vulkan/vk_rasterizer.h b/src/video_core/renderer_vulkan/vk_rasterizer.h
index c8c187606..83e00e7e9 100644
--- a/src/video_core/renderer_vulkan/vk_rasterizer.h
+++ b/src/video_core/renderer_vulkan/vk_rasterizer.h
@@ -159,7 +159,10 @@ private:
159 159
160 void FlushWork(); 160 void FlushWork();
161 161
162 Texceptions UpdateAttachments(); 162 /// @brief Updates the currently bound attachments
163 /// @param is_clear True when the framebuffer is updated as a clear
164 /// @return Bitfield of attachments being used as sampled textures
165 Texceptions UpdateAttachments(bool is_clear);
163 166
164 std::tuple<VkFramebuffer, VkExtent2D> ConfigureFramebuffers(VkRenderPass renderpass); 167 std::tuple<VkFramebuffer, VkExtent2D> ConfigureFramebuffers(VkRenderPass renderpass);
165 168
diff --git a/src/video_core/renderer_vulkan/vk_scheduler.cpp b/src/video_core/renderer_vulkan/vk_scheduler.cpp
index 82ec9180e..56524e6f3 100644
--- a/src/video_core/renderer_vulkan/vk_scheduler.cpp
+++ b/src/video_core/renderer_vulkan/vk_scheduler.cpp
@@ -9,6 +9,7 @@
9#include <utility> 9#include <utility>
10 10
11#include "common/microprofile.h" 11#include "common/microprofile.h"
12#include "common/thread.h"
12#include "video_core/renderer_vulkan/vk_device.h" 13#include "video_core/renderer_vulkan/vk_device.h"
13#include "video_core/renderer_vulkan/vk_query_cache.h" 14#include "video_core/renderer_vulkan/vk_query_cache.h"
14#include "video_core/renderer_vulkan/vk_resource_manager.h" 15#include "video_core/renderer_vulkan/vk_resource_manager.h"
@@ -133,6 +134,7 @@ void VKScheduler::BindGraphicsPipeline(VkPipeline pipeline) {
133} 134}
134 135
135void VKScheduler::WorkerThread() { 136void VKScheduler::WorkerThread() {
137 Common::SetCurrentThreadPriority(Common::ThreadPriority::High);
136 std::unique_lock lock{mutex}; 138 std::unique_lock lock{mutex};
137 do { 139 do {
138 cv.wait(lock, [this] { return !chunk_queue.Empty() || quit; }); 140 cv.wait(lock, [this] { return !chunk_queue.Empty() || quit; });
diff --git a/src/video_core/renderer_vulkan/vk_stream_buffer.h b/src/video_core/renderer_vulkan/vk_stream_buffer.h
index c765c60a0..689f0d276 100644
--- a/src/video_core/renderer_vulkan/vk_stream_buffer.h
+++ b/src/video_core/renderer_vulkan/vk_stream_buffer.h
@@ -35,10 +35,14 @@ public:
35 /// Ensures that "size" bytes of memory are available to the GPU, potentially recording a copy. 35 /// Ensures that "size" bytes of memory are available to the GPU, potentially recording a copy.
36 void Unmap(u64 size); 36 void Unmap(u64 size);
37 37
38 VkBuffer Handle() const { 38 VkBuffer Handle() const noexcept {
39 return *buffer; 39 return *buffer;
40 } 40 }
41 41
42 u64 Address() const noexcept {
43 return 0;
44 }
45
42private: 46private:
43 struct Watch final { 47 struct Watch final {
44 VKFenceWatch fence; 48 VKFenceWatch fence;
diff --git a/src/video_core/renderer_vulkan/vk_update_descriptor.cpp b/src/video_core/renderer_vulkan/vk_update_descriptor.cpp
index 681ecde98..351c048d2 100644
--- a/src/video_core/renderer_vulkan/vk_update_descriptor.cpp
+++ b/src/video_core/renderer_vulkan/vk_update_descriptor.cpp
@@ -24,35 +24,25 @@ void VKUpdateDescriptorQueue::TickFrame() {
24} 24}
25 25
26void VKUpdateDescriptorQueue::Acquire() { 26void VKUpdateDescriptorQueue::Acquire() {
27 entries.clear(); 27 // Minimum number of entries required.
28} 28 // This is the maximum number of entries a single draw call migth use.
29 static constexpr std::size_t MIN_ENTRIES = 0x400;
29 30
30void VKUpdateDescriptorQueue::Send(VkDescriptorUpdateTemplateKHR update_template, 31 if (payload.size() + MIN_ENTRIES >= payload.max_size()) {
31 VkDescriptorSet set) {
32 if (payload.size() + entries.size() >= payload.max_size()) {
33 LOG_WARNING(Render_Vulkan, "Payload overflow, waiting for worker thread"); 32 LOG_WARNING(Render_Vulkan, "Payload overflow, waiting for worker thread");
34 scheduler.WaitWorker(); 33 scheduler.WaitWorker();
35 payload.clear(); 34 payload.clear();
36 } 35 }
36 upload_start = &*payload.end();
37}
37 38
38 // TODO(Rodrigo): Rework to write the payload directly 39void VKUpdateDescriptorQueue::Send(VkDescriptorUpdateTemplateKHR update_template,
39 const auto payload_start = payload.data() + payload.size(); 40 VkDescriptorSet set) {
40 for (const auto& entry : entries) { 41 const void* const data = upload_start;
41 if (const auto image = std::get_if<VkDescriptorImageInfo>(&entry)) { 42 const vk::Device* const logical = &device.GetLogical();
42 payload.push_back(*image); 43 scheduler.Record([data, logical, set, update_template](vk::CommandBuffer) {
43 } else if (const auto buffer = std::get_if<VkDescriptorBufferInfo>(&entry)) { 44 logical->UpdateDescriptorSet(set, update_template, data);
44 payload.push_back(*buffer); 45 });
45 } else if (const auto texel = std::get_if<VkBufferView>(&entry)) {
46 payload.push_back(*texel);
47 } else {
48 UNREACHABLE();
49 }
50 }
51
52 scheduler.Record(
53 [payload_start, set, update_template, logical = &device.GetLogical()](vk::CommandBuffer) {
54 logical->UpdateDescriptorSet(set, update_template, payload_start);
55 });
56} 46}
57 47
58} // namespace Vulkan 48} // namespace Vulkan
diff --git a/src/video_core/renderer_vulkan/vk_update_descriptor.h b/src/video_core/renderer_vulkan/vk_update_descriptor.h
index cc7e3dff4..945320c72 100644
--- a/src/video_core/renderer_vulkan/vk_update_descriptor.h
+++ b/src/video_core/renderer_vulkan/vk_update_descriptor.h
@@ -15,17 +15,13 @@ namespace Vulkan {
15class VKDevice; 15class VKDevice;
16class VKScheduler; 16class VKScheduler;
17 17
18class DescriptorUpdateEntry { 18struct DescriptorUpdateEntry {
19public: 19 DescriptorUpdateEntry(VkDescriptorImageInfo image_) : image{image_} {}
20 explicit DescriptorUpdateEntry() {}
21
22 DescriptorUpdateEntry(VkDescriptorImageInfo image) : image{image} {}
23 20
24 DescriptorUpdateEntry(VkDescriptorBufferInfo buffer) : buffer{buffer} {} 21 DescriptorUpdateEntry(VkDescriptorBufferInfo buffer_) : buffer{buffer_} {}
25 22
26 DescriptorUpdateEntry(VkBufferView texel_buffer) : texel_buffer{texel_buffer} {} 23 DescriptorUpdateEntry(VkBufferView texel_buffer_) : texel_buffer{texel_buffer_} {}
27 24
28private:
29 union { 25 union {
30 VkDescriptorImageInfo image; 26 VkDescriptorImageInfo image;
31 VkDescriptorBufferInfo buffer; 27 VkDescriptorBufferInfo buffer;
@@ -45,32 +41,34 @@ public:
45 void Send(VkDescriptorUpdateTemplateKHR update_template, VkDescriptorSet set); 41 void Send(VkDescriptorUpdateTemplateKHR update_template, VkDescriptorSet set);
46 42
47 void AddSampledImage(VkSampler sampler, VkImageView image_view) { 43 void AddSampledImage(VkSampler sampler, VkImageView image_view) {
48 entries.emplace_back(VkDescriptorImageInfo{sampler, image_view, {}}); 44 payload.emplace_back(VkDescriptorImageInfo{sampler, image_view, {}});
49 } 45 }
50 46
51 void AddImage(VkImageView image_view) { 47 void AddImage(VkImageView image_view) {
52 entries.emplace_back(VkDescriptorImageInfo{{}, image_view, {}}); 48 payload.emplace_back(VkDescriptorImageInfo{{}, image_view, {}});
53 } 49 }
54 50
55 void AddBuffer(VkBuffer buffer, u64 offset, std::size_t size) { 51 void AddBuffer(VkBuffer buffer, u64 offset, std::size_t size) {
56 entries.emplace_back(VkDescriptorBufferInfo{buffer, offset, size}); 52 payload.emplace_back(VkDescriptorBufferInfo{buffer, offset, size});
57 } 53 }
58 54
59 void AddTexelBuffer(VkBufferView texel_buffer) { 55 void AddTexelBuffer(VkBufferView texel_buffer) {
60 entries.emplace_back(texel_buffer); 56 payload.emplace_back(texel_buffer);
61 } 57 }
62 58
63 VkImageLayout* GetLastImageLayout() { 59 VkImageLayout* LastImageLayout() {
64 return &std::get<VkDescriptorImageInfo>(entries.back()).imageLayout; 60 return &payload.back().image.imageLayout;
65 } 61 }
66 62
67private: 63 const VkImageLayout* LastImageLayout() const {
68 using Variant = std::variant<VkDescriptorImageInfo, VkDescriptorBufferInfo, VkBufferView>; 64 return &payload.back().image.imageLayout;
65 }
69 66
67private:
70 const VKDevice& device; 68 const VKDevice& device;
71 VKScheduler& scheduler; 69 VKScheduler& scheduler;
72 70
73 boost::container::static_vector<Variant, 0x400> entries; 71 const DescriptorUpdateEntry* upload_start = nullptr;
74 boost::container::static_vector<DescriptorUpdateEntry, 0x10000> payload; 72 boost::container::static_vector<DescriptorUpdateEntry, 0x10000> payload;
75}; 73};
76 74
diff --git a/src/video_core/renderer_vulkan/wrapper.cpp b/src/video_core/renderer_vulkan/wrapper.cpp
index 2ce9b0626..0d485a662 100644
--- a/src/video_core/renderer_vulkan/wrapper.cpp
+++ b/src/video_core/renderer_vulkan/wrapper.cpp
@@ -153,7 +153,8 @@ void Load(VkDevice device, DeviceDispatch& dld) noexcept {
153 153
154bool Load(InstanceDispatch& dld) noexcept { 154bool Load(InstanceDispatch& dld) noexcept {
155#define X(name) Proc(dld.name, dld, #name) 155#define X(name) Proc(dld.name, dld, #name)
156 return X(vkCreateInstance) && X(vkEnumerateInstanceExtensionProperties); 156 return X(vkCreateInstance) && X(vkEnumerateInstanceExtensionProperties) &&
157 X(vkEnumerateInstanceLayerProperties);
157#undef X 158#undef X
158} 159}
159 160
@@ -725,8 +726,7 @@ bool PhysicalDevice::GetSurfaceSupportKHR(u32 queue_family_index, VkSurfaceKHR s
725 return supported == VK_TRUE; 726 return supported == VK_TRUE;
726} 727}
727 728
728VkSurfaceCapabilitiesKHR PhysicalDevice::GetSurfaceCapabilitiesKHR(VkSurfaceKHR surface) const 729VkSurfaceCapabilitiesKHR PhysicalDevice::GetSurfaceCapabilitiesKHR(VkSurfaceKHR surface) const {
729 noexcept {
730 VkSurfaceCapabilitiesKHR capabilities; 730 VkSurfaceCapabilitiesKHR capabilities;
731 Check(dld->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_device, surface, &capabilities)); 731 Check(dld->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_device, surface, &capabilities));
732 return capabilities; 732 return capabilities;
@@ -771,4 +771,17 @@ std::optional<std::vector<VkExtensionProperties>> EnumerateInstanceExtensionProp
771 return properties; 771 return properties;
772} 772}
773 773
774std::optional<std::vector<VkLayerProperties>> EnumerateInstanceLayerProperties(
775 const InstanceDispatch& dld) {
776 u32 num;
777 if (dld.vkEnumerateInstanceLayerProperties(&num, nullptr) != VK_SUCCESS) {
778 return std::nullopt;
779 }
780 std::vector<VkLayerProperties> properties(num);
781 if (dld.vkEnumerateInstanceLayerProperties(&num, properties.data()) != VK_SUCCESS) {
782 return std::nullopt;
783 }
784 return properties;
785}
786
774} // namespace Vulkan::vk 787} // namespace Vulkan::vk
diff --git a/src/video_core/renderer_vulkan/wrapper.h b/src/video_core/renderer_vulkan/wrapper.h
index 98937a77a..d56fdb3f9 100644
--- a/src/video_core/renderer_vulkan/wrapper.h
+++ b/src/video_core/renderer_vulkan/wrapper.h
@@ -141,6 +141,7 @@ struct InstanceDispatch {
141 PFN_vkCreateInstance vkCreateInstance; 141 PFN_vkCreateInstance vkCreateInstance;
142 PFN_vkDestroyInstance vkDestroyInstance; 142 PFN_vkDestroyInstance vkDestroyInstance;
143 PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties; 143 PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties;
144 PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties;
144 145
145 PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT; 146 PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT;
146 PFN_vkCreateDevice vkCreateDevice; 147 PFN_vkCreateDevice vkCreateDevice;
@@ -779,7 +780,7 @@ public:
779 780
780 bool GetSurfaceSupportKHR(u32 queue_family_index, VkSurfaceKHR) const; 781 bool GetSurfaceSupportKHR(u32 queue_family_index, VkSurfaceKHR) const;
781 782
782 VkSurfaceCapabilitiesKHR GetSurfaceCapabilitiesKHR(VkSurfaceKHR) const noexcept; 783 VkSurfaceCapabilitiesKHR GetSurfaceCapabilitiesKHR(VkSurfaceKHR) const;
783 784
784 std::vector<VkSurfaceFormatKHR> GetSurfaceFormatsKHR(VkSurfaceKHR) const; 785 std::vector<VkSurfaceFormatKHR> GetSurfaceFormatsKHR(VkSurfaceKHR) const;
785 786
@@ -996,4 +997,7 @@ private:
996std::optional<std::vector<VkExtensionProperties>> EnumerateInstanceExtensionProperties( 997std::optional<std::vector<VkExtensionProperties>> EnumerateInstanceExtensionProperties(
997 const InstanceDispatch& dld); 998 const InstanceDispatch& dld);
998 999
1000std::optional<std::vector<VkLayerProperties>> EnumerateInstanceLayerProperties(
1001 const InstanceDispatch& dld);
1002
999} // namespace Vulkan::vk 1003} // namespace Vulkan::vk