diff options
| author | 2020-07-16 16:59:35 -0400 | |
|---|---|---|
| committer | 2020-07-16 16:59:39 -0400 | |
| commit | a66a0a6a53bb5404551966cbd67bdf77f0683ae6 (patch) | |
| tree | 528bb23a46a1ecd361572afc572a77cfc850ff2e /src | |
| parent | Merge pull request #4327 from lioncash/desig2 (diff) | |
| download | yuzu-a66a0a6a53bb5404551966cbd67bdf77f0683ae6.tar.gz yuzu-a66a0a6a53bb5404551966cbd67bdf77f0683ae6.tar.xz yuzu-a66a0a6a53bb5404551966cbd67bdf77f0683ae6.zip | |
vk_buffer_cache: Make use of designated initializers where applicable
Note: An array within CopyFrom() cannot be converted over yet, as it
ICEs MSVC when converted over.
Diffstat (limited to 'src')
| -rw-r--r-- | src/video_core/renderer_vulkan/vk_buffer_cache.cpp | 63 |
1 files changed, 33 insertions, 30 deletions
diff --git a/src/video_core/renderer_vulkan/vk_buffer_cache.cpp b/src/video_core/renderer_vulkan/vk_buffer_cache.cpp index 2be38d419..1d2f8b557 100644 --- a/src/video_core/renderer_vulkan/vk_buffer_cache.cpp +++ b/src/video_core/renderer_vulkan/vk_buffer_cache.cpp | |||
| @@ -39,16 +39,17 @@ std::unique_ptr<VKStreamBuffer> CreateStreamBuffer(const VKDevice& device, VKSch | |||
| 39 | 39 | ||
| 40 | Buffer::Buffer(const VKDevice& device, VKMemoryManager& memory_manager, VKScheduler& scheduler_, | 40 | Buffer::Buffer(const VKDevice& device, VKMemoryManager& memory_manager, VKScheduler& scheduler_, |
| 41 | VKStagingBufferPool& staging_pool_, VAddr cpu_addr, std::size_t size) | 41 | VKStagingBufferPool& staging_pool_, VAddr cpu_addr, std::size_t size) |
| 42 | : VideoCommon::BufferBlock{cpu_addr, size}, scheduler{scheduler_}, staging_pool{staging_pool_} { | 42 | : BufferBlock{cpu_addr, size}, scheduler{scheduler_}, staging_pool{staging_pool_} { |
| 43 | VkBufferCreateInfo ci; | 43 | const VkBufferCreateInfo ci{ |
| 44 | ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; | 44 | .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, |
| 45 | ci.pNext = nullptr; | 45 | .pNext = nullptr, |
| 46 | ci.flags = 0; | 46 | .flags = 0, |
| 47 | ci.size = static_cast<VkDeviceSize>(size); | 47 | .size = static_cast<VkDeviceSize>(size), |
| 48 | ci.usage = BUFFER_USAGE | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; | 48 | .usage = BUFFER_USAGE | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, |
| 49 | ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE; | 49 | .sharingMode = VK_SHARING_MODE_EXCLUSIVE, |
| 50 | ci.queueFamilyIndexCount = 0; | 50 | .queueFamilyIndexCount = 0, |
| 51 | ci.pQueueFamilyIndices = nullptr; | 51 | .pQueueFamilyIndices = nullptr, |
| 52 | }; | ||
| 52 | 53 | ||
| 53 | buffer.handle = device.GetLogical().CreateBuffer(ci); | 54 | buffer.handle = device.GetLogical().CreateBuffer(ci); |
| 54 | buffer.commit = memory_manager.Commit(buffer.handle, false); | 55 | buffer.commit = memory_manager.Commit(buffer.handle, false); |
| @@ -66,16 +67,17 @@ void Buffer::Upload(std::size_t offset, std::size_t size, const u8* data) { | |||
| 66 | scheduler.Record([staging = *staging.handle, handle, offset, size](vk::CommandBuffer cmdbuf) { | 67 | scheduler.Record([staging = *staging.handle, handle, offset, size](vk::CommandBuffer cmdbuf) { |
| 67 | cmdbuf.CopyBuffer(staging, handle, VkBufferCopy{0, offset, size}); | 68 | cmdbuf.CopyBuffer(staging, handle, VkBufferCopy{0, offset, size}); |
| 68 | 69 | ||
| 69 | VkBufferMemoryBarrier barrier; | 70 | const VkBufferMemoryBarrier barrier{ |
| 70 | barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER; | 71 | .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, |
| 71 | barrier.pNext = nullptr; | 72 | .pNext = nullptr, |
| 72 | barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; | 73 | .srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT, |
| 73 | barrier.dstAccessMask = UPLOAD_ACCESS_BARRIERS; | 74 | .dstAccessMask = UPLOAD_ACCESS_BARRIERS, |
| 74 | barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; | 75 | .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, |
| 75 | barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; | 76 | .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, |
| 76 | barrier.buffer = handle; | 77 | .buffer = handle, |
| 77 | barrier.offset = offset; | 78 | .offset = offset, |
| 78 | barrier.size = size; | 79 | .size = size, |
| 80 | }; | ||
| 79 | cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, UPLOAD_PIPELINE_STAGE, 0, {}, | 81 | cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, UPLOAD_PIPELINE_STAGE, 0, {}, |
| 80 | barrier, {}); | 82 | barrier, {}); |
| 81 | }); | 83 | }); |
| @@ -87,16 +89,17 @@ void Buffer::Download(std::size_t offset, std::size_t size, u8* data) { | |||
| 87 | 89 | ||
| 88 | const VkBuffer handle = Handle(); | 90 | const VkBuffer handle = Handle(); |
| 89 | scheduler.Record([staging = *staging.handle, handle, offset, size](vk::CommandBuffer cmdbuf) { | 91 | scheduler.Record([staging = *staging.handle, handle, offset, size](vk::CommandBuffer cmdbuf) { |
| 90 | VkBufferMemoryBarrier barrier; | 92 | const VkBufferMemoryBarrier barrier{ |
| 91 | barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER; | 93 | .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, |
| 92 | barrier.pNext = nullptr; | 94 | .pNext = nullptr, |
| 93 | barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT; | 95 | .srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT, |
| 94 | barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT; | 96 | .dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT, |
| 95 | barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; | 97 | .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, |
| 96 | barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; | 98 | .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, |
| 97 | barrier.buffer = handle; | 99 | .buffer = handle, |
| 98 | barrier.offset = offset; | 100 | .offset = offset, |
| 99 | barrier.size = size; | 101 | .size = size, |
| 102 | }; | ||
| 100 | 103 | ||
| 101 | cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | | 104 | cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | |
| 102 | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | | 105 | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | |