summaryrefslogtreecommitdiff
path: root/src/video_core/renderer_vulkan
diff options
context:
space:
mode:
authorGravatar Lioncash2020-12-05 11:40:14 -0500
committerGravatar Lioncash2020-12-05 16:02:23 -0500
commitf95602f15207851b849c57e2a2dd313a087b2493 (patch)
treeed6122ab0a30de177acb2a59dffc8109232870ec /src/video_core/renderer_vulkan
parentMerge pull request #5133 from lioncash/video-shadow2 (diff)
downloadyuzu-f95602f15207851b849c57e2a2dd313a087b2493.tar.gz
yuzu-f95602f15207851b849c57e2a2dd313a087b2493.tar.xz
yuzu-f95602f15207851b849c57e2a2dd313a087b2493.zip
video_core: Resolve more variable shadowing scenarios pt.3
Cleans out the rest of the occurrences of variable shadowing and makes any further occurrences of shadowing compiler errors.
Diffstat (limited to 'src/video_core/renderer_vulkan')
-rw-r--r--src/video_core/renderer_vulkan/vk_buffer_cache.cpp110
-rw-r--r--src/video_core/renderer_vulkan/vk_buffer_cache.h16
-rw-r--r--src/video_core/renderer_vulkan/vk_command_pool.cpp4
-rw-r--r--src/video_core/renderer_vulkan/vk_command_pool.h2
-rw-r--r--src/video_core/renderer_vulkan/vk_pipeline_cache.cpp19
-rw-r--r--src/video_core/renderer_vulkan/vk_pipeline_cache.h20
-rw-r--r--src/video_core/renderer_vulkan/vk_query_cache.cpp22
-rw-r--r--src/video_core/renderer_vulkan/vk_query_cache.h4
-rw-r--r--src/video_core/renderer_vulkan/vk_rasterizer.cpp14
-rw-r--r--src/video_core/renderer_vulkan/vk_rasterizer.h10
-rw-r--r--src/video_core/renderer_vulkan/vk_texture_cache.cpp8
-rw-r--r--src/video_core/renderer_vulkan/vk_texture_cache.h9
12 files changed, 119 insertions, 119 deletions
diff --git a/src/video_core/renderer_vulkan/vk_buffer_cache.cpp b/src/video_core/renderer_vulkan/vk_buffer_cache.cpp
index d9d3da9ea..444d3fb93 100644
--- a/src/video_core/renderer_vulkan/vk_buffer_cache.cpp
+++ b/src/video_core/renderer_vulkan/vk_buffer_cache.cpp
@@ -38,13 +38,13 @@ std::unique_ptr<VKStreamBuffer> CreateStreamBuffer(const VKDevice& device, VKSch
38} // Anonymous namespace 38} // Anonymous namespace
39 39
40Buffer::Buffer(const VKDevice& device, VKMemoryManager& memory_manager, VKScheduler& scheduler_, 40Buffer::Buffer(const VKDevice& device, VKMemoryManager& memory_manager, VKScheduler& scheduler_,
41 VKStagingBufferPool& staging_pool_, VAddr cpu_addr, std::size_t size) 41 VKStagingBufferPool& staging_pool_, VAddr cpu_addr_, std::size_t size_)
42 : BufferBlock{cpu_addr, size}, scheduler{scheduler_}, staging_pool{staging_pool_} { 42 : BufferBlock{cpu_addr_, size_}, scheduler{scheduler_}, staging_pool{staging_pool_} {
43 const VkBufferCreateInfo ci{ 43 const VkBufferCreateInfo ci{
44 .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, 44 .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
45 .pNext = nullptr, 45 .pNext = nullptr,
46 .flags = 0, 46 .flags = 0,
47 .size = static_cast<VkDeviceSize>(size), 47 .size = static_cast<VkDeviceSize>(size_),
48 .usage = BUFFER_USAGE | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, 48 .usage = BUFFER_USAGE | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT,
49 .sharingMode = VK_SHARING_MODE_EXCLUSIVE, 49 .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
50 .queueFamilyIndexCount = 0, 50 .queueFamilyIndexCount = 0,
@@ -57,69 +57,71 @@ Buffer::Buffer(const VKDevice& device, VKMemoryManager& memory_manager, VKSchedu
57 57
58Buffer::~Buffer() = default; 58Buffer::~Buffer() = default;
59 59
60void Buffer::Upload(std::size_t offset, std::size_t size, const u8* data) { 60void Buffer::Upload(std::size_t offset, std::size_t data_size, const u8* data) {
61 const auto& staging = staging_pool.GetUnusedBuffer(size, true); 61 const auto& staging = staging_pool.GetUnusedBuffer(data_size, true);
62 std::memcpy(staging.commit->Map(size), data, size); 62 std::memcpy(staging.commit->Map(data_size), data, data_size);
63 63
64 scheduler.RequestOutsideRenderPassOperationContext(); 64 scheduler.RequestOutsideRenderPassOperationContext();
65 65
66 const VkBuffer handle = Handle(); 66 const VkBuffer handle = Handle();
67 scheduler.Record([staging = *staging.handle, handle, offset, size](vk::CommandBuffer cmdbuf) { 67 scheduler.Record(
68 cmdbuf.CopyBuffer(staging, handle, VkBufferCopy{0, offset, size}); 68 [staging = *staging.handle, handle, offset, data_size](vk::CommandBuffer cmdbuf) {
69 69 cmdbuf.CopyBuffer(staging, handle, VkBufferCopy{0, offset, data_size});
70 const VkBufferMemoryBarrier barrier{ 70
71 .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, 71 const VkBufferMemoryBarrier barrier{
72 .pNext = nullptr, 72 .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
73 .srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT, 73 .pNext = nullptr,
74 .dstAccessMask = UPLOAD_ACCESS_BARRIERS, 74 .srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
75 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, 75 .dstAccessMask = UPLOAD_ACCESS_BARRIERS,
76 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, 76 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
77 .buffer = handle, 77 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
78 .offset = offset, 78 .buffer = handle,
79 .size = size, 79 .offset = offset,
80 }; 80 .size = data_size,
81 cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, UPLOAD_PIPELINE_STAGE, 0, {}, 81 };
82 barrier, {}); 82 cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, UPLOAD_PIPELINE_STAGE, 0, {},
83 }); 83 barrier, {});
84 });
84} 85}
85 86
86void Buffer::Download(std::size_t offset, std::size_t size, u8* data) { 87void Buffer::Download(std::size_t offset, std::size_t data_size, u8* data) {
87 const auto& staging = staging_pool.GetUnusedBuffer(size, true); 88 const auto& staging = staging_pool.GetUnusedBuffer(data_size, true);
88 scheduler.RequestOutsideRenderPassOperationContext(); 89 scheduler.RequestOutsideRenderPassOperationContext();
89 90
90 const VkBuffer handle = Handle(); 91 const VkBuffer handle = Handle();
91 scheduler.Record([staging = *staging.handle, handle, offset, size](vk::CommandBuffer cmdbuf) { 92 scheduler.Record(
92 const VkBufferMemoryBarrier barrier{ 93 [staging = *staging.handle, handle, offset, data_size](vk::CommandBuffer cmdbuf) {
93 .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, 94 const VkBufferMemoryBarrier barrier{
94 .pNext = nullptr, 95 .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
95 .srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT, 96 .pNext = nullptr,
96 .dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT, 97 .srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT,
97 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, 98 .dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT,
98 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, 99 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
99 .buffer = handle, 100 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
100 .offset = offset, 101 .buffer = handle,
101 .size = size, 102 .offset = offset,
102 }; 103 .size = data_size,
103 104 };
104 cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | 105
105 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | 106 cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_VERTEX_SHADER_BIT |
106 VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 107 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
107 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, {}, barrier, {}); 108 VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
108 cmdbuf.CopyBuffer(handle, staging, VkBufferCopy{offset, 0, size}); 109 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, {}, barrier, {});
109 }); 110 cmdbuf.CopyBuffer(handle, staging, VkBufferCopy{offset, 0, data_size});
111 });
110 scheduler.Finish(); 112 scheduler.Finish();
111 113
112 std::memcpy(data, staging.commit->Map(size), size); 114 std::memcpy(data, staging.commit->Map(data_size), data_size);
113} 115}
114 116
115void Buffer::CopyFrom(const Buffer& src, std::size_t src_offset, std::size_t dst_offset, 117void Buffer::CopyFrom(const Buffer& src, std::size_t src_offset, std::size_t dst_offset,
116 std::size_t size) { 118 std::size_t copy_size) {
117 scheduler.RequestOutsideRenderPassOperationContext(); 119 scheduler.RequestOutsideRenderPassOperationContext();
118 120
119 const VkBuffer dst_buffer = Handle(); 121 const VkBuffer dst_buffer = Handle();
120 scheduler.Record([src_buffer = src.Handle(), dst_buffer, src_offset, dst_offset, 122 scheduler.Record([src_buffer = src.Handle(), dst_buffer, src_offset, dst_offset,
121 size](vk::CommandBuffer cmdbuf) { 123 copy_size](vk::CommandBuffer cmdbuf) {
122 cmdbuf.CopyBuffer(src_buffer, dst_buffer, VkBufferCopy{src_offset, dst_offset, size}); 124 cmdbuf.CopyBuffer(src_buffer, dst_buffer, VkBufferCopy{src_offset, dst_offset, copy_size});
123 125
124 std::array<VkBufferMemoryBarrier, 2> barriers; 126 std::array<VkBufferMemoryBarrier, 2> barriers;
125 barriers[0].sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER; 127 barriers[0].sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
@@ -130,7 +132,7 @@ void Buffer::CopyFrom(const Buffer& src, std::size_t src_offset, std::size_t dst
130 barriers[0].dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; 132 barriers[0].dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
131 barriers[0].buffer = src_buffer; 133 barriers[0].buffer = src_buffer;
132 barriers[0].offset = src_offset; 134 barriers[0].offset = src_offset;
133 barriers[0].size = size; 135 barriers[0].size = copy_size;
134 barriers[1].sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER; 136 barriers[1].sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
135 barriers[1].pNext = nullptr; 137 barriers[1].pNext = nullptr;
136 barriers[1].srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; 138 barriers[1].srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
@@ -139,19 +141,17 @@ void Buffer::CopyFrom(const Buffer& src, std::size_t src_offset, std::size_t dst
139 barriers[1].dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; 141 barriers[1].dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
140 barriers[1].buffer = dst_buffer; 142 barriers[1].buffer = dst_buffer;
141 barriers[1].offset = dst_offset; 143 barriers[1].offset = dst_offset;
142 barriers[1].size = size; 144 barriers[1].size = copy_size;
143 cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, UPLOAD_PIPELINE_STAGE, 0, {}, 145 cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, UPLOAD_PIPELINE_STAGE, 0, {},
144 barriers, {}); 146 barriers, {});
145 }); 147 });
146} 148}
147 149
148VKBufferCache::VKBufferCache(VideoCore::RasterizerInterface& rasterizer, 150VKBufferCache::VKBufferCache(VideoCore::RasterizerInterface& rasterizer_,
149 Tegra::MemoryManager& gpu_memory, Core::Memory::Memory& cpu_memory, 151 Tegra::MemoryManager& gpu_memory_, Core::Memory::Memory& cpu_memory_,
150 const VKDevice& device_, VKMemoryManager& memory_manager_, 152 const VKDevice& device_, VKMemoryManager& memory_manager_,
151 VKScheduler& scheduler_, VKStagingBufferPool& staging_pool_) 153 VKScheduler& scheduler_, VKStagingBufferPool& staging_pool_)
152 : VideoCommon::BufferCache<Buffer, VkBuffer, VKStreamBuffer>{rasterizer, gpu_memory, cpu_memory, 154 : BufferCache{rasterizer_, gpu_memory_, cpu_memory_, CreateStreamBuffer(device_, scheduler_)},
153 CreateStreamBuffer(device_,
154 scheduler_)},
155 device{device_}, memory_manager{memory_manager_}, scheduler{scheduler_}, staging_pool{ 155 device{device_}, memory_manager{memory_manager_}, scheduler{scheduler_}, staging_pool{
156 staging_pool_} {} 156 staging_pool_} {}
157 157
diff --git a/src/video_core/renderer_vulkan/vk_buffer_cache.h b/src/video_core/renderer_vulkan/vk_buffer_cache.h
index 7fb5ceedf..6008b8373 100644
--- a/src/video_core/renderer_vulkan/vk_buffer_cache.h
+++ b/src/video_core/renderer_vulkan/vk_buffer_cache.h
@@ -22,15 +22,15 @@ class VKScheduler;
22class Buffer final : public VideoCommon::BufferBlock { 22class Buffer final : public VideoCommon::BufferBlock {
23public: 23public:
24 explicit Buffer(const VKDevice& device, VKMemoryManager& memory_manager, VKScheduler& scheduler, 24 explicit Buffer(const VKDevice& device, VKMemoryManager& memory_manager, VKScheduler& scheduler,
25 VKStagingBufferPool& staging_pool, VAddr cpu_addr, std::size_t size); 25 VKStagingBufferPool& staging_pool, VAddr cpu_addr_, std::size_t size_);
26 ~Buffer(); 26 ~Buffer();
27 27
28 void Upload(std::size_t offset, std::size_t size, const u8* data); 28 void Upload(std::size_t offset, std::size_t data_size, const u8* data);
29 29
30 void Download(std::size_t offset, std::size_t size, u8* data); 30 void Download(std::size_t offset, std::size_t data_size, u8* data);
31 31
32 void CopyFrom(const Buffer& src, std::size_t src_offset, std::size_t dst_offset, 32 void CopyFrom(const Buffer& src, std::size_t src_offset, std::size_t dst_offset,
33 std::size_t size); 33 std::size_t copy_size);
34 34
35 VkBuffer Handle() const { 35 VkBuffer Handle() const {
36 return *buffer.handle; 36 return *buffer.handle;
@@ -49,10 +49,10 @@ private:
49 49
50class VKBufferCache final : public VideoCommon::BufferCache<Buffer, VkBuffer, VKStreamBuffer> { 50class VKBufferCache final : public VideoCommon::BufferCache<Buffer, VkBuffer, VKStreamBuffer> {
51public: 51public:
52 explicit VKBufferCache(VideoCore::RasterizerInterface& rasterizer, 52 explicit VKBufferCache(VideoCore::RasterizerInterface& rasterizer_,
53 Tegra::MemoryManager& gpu_memory, Core::Memory::Memory& cpu_memory, 53 Tegra::MemoryManager& gpu_memory_, Core::Memory::Memory& cpu_memory_,
54 const VKDevice& device, VKMemoryManager& memory_manager, 54 const VKDevice& device_, VKMemoryManager& memory_manager_,
55 VKScheduler& scheduler, VKStagingBufferPool& staging_pool); 55 VKScheduler& scheduler_, VKStagingBufferPool& staging_pool_);
56 ~VKBufferCache(); 56 ~VKBufferCache();
57 57
58 BufferInfo GetEmptyBuffer(std::size_t size) override; 58 BufferInfo GetEmptyBuffer(std::size_t size) override;
diff --git a/src/video_core/renderer_vulkan/vk_command_pool.cpp b/src/video_core/renderer_vulkan/vk_command_pool.cpp
index 256a39148..8f7d6410e 100644
--- a/src/video_core/renderer_vulkan/vk_command_pool.cpp
+++ b/src/video_core/renderer_vulkan/vk_command_pool.cpp
@@ -17,8 +17,8 @@ struct CommandPool::Pool {
17 vk::CommandBuffers cmdbufs; 17 vk::CommandBuffers cmdbufs;
18}; 18};
19 19
20CommandPool::CommandPool(MasterSemaphore& master_semaphore, const VKDevice& device_) 20CommandPool::CommandPool(MasterSemaphore& master_semaphore_, const VKDevice& device_)
21 : ResourcePool(master_semaphore, COMMAND_BUFFER_POOL_SIZE), device{device_} {} 21 : ResourcePool(master_semaphore_, COMMAND_BUFFER_POOL_SIZE), device{device_} {}
22 22
23CommandPool::~CommandPool() = default; 23CommandPool::~CommandPool() = default;
24 24
diff --git a/src/video_core/renderer_vulkan/vk_command_pool.h b/src/video_core/renderer_vulkan/vk_command_pool.h
index 33655eca4..62a7ce3f1 100644
--- a/src/video_core/renderer_vulkan/vk_command_pool.h
+++ b/src/video_core/renderer_vulkan/vk_command_pool.h
@@ -17,7 +17,7 @@ class VKDevice;
17 17
18class CommandPool final : public ResourcePool { 18class CommandPool final : public ResourcePool {
19public: 19public:
20 explicit CommandPool(MasterSemaphore& master_semaphore, const VKDevice& device_); 20 explicit CommandPool(MasterSemaphore& master_semaphore_, const VKDevice& device_);
21 ~CommandPool() override; 21 ~CommandPool() override;
22 22
23 void Allocate(size_t begin, size_t end) override; 23 void Allocate(size_t begin, size_t end) override;
diff --git a/src/video_core/renderer_vulkan/vk_pipeline_cache.cpp b/src/video_core/renderer_vulkan/vk_pipeline_cache.cpp
index df7e8c864..39e58a56f 100644
--- a/src/video_core/renderer_vulkan/vk_pipeline_cache.cpp
+++ b/src/video_core/renderer_vulkan/vk_pipeline_cache.cpp
@@ -136,26 +136,25 @@ bool ComputePipelineCacheKey::operator==(const ComputePipelineCacheKey& rhs) con
136 return std::memcmp(&rhs, this, sizeof *this) == 0; 136 return std::memcmp(&rhs, this, sizeof *this) == 0;
137} 137}
138 138
139Shader::Shader(Tegra::Engines::ConstBufferEngineInterface& engine, Tegra::Engines::ShaderType stage, 139Shader::Shader(Tegra::Engines::ConstBufferEngineInterface& engine_, ShaderType stage_,
140 GPUVAddr gpu_addr_, VAddr cpu_addr, VideoCommon::Shader::ProgramCode program_code_, 140 GPUVAddr gpu_addr_, VAddr cpu_addr_, ProgramCode program_code_, u32 main_offset_)
141 u32 main_offset) 141 : gpu_addr(gpu_addr_), program_code(std::move(program_code_)), registry(stage_, engine_),
142 : gpu_addr(gpu_addr_), program_code(std::move(program_code_)), registry(stage, engine), 142 shader_ir(program_code, main_offset_, compiler_settings, registry),
143 shader_ir(program_code, main_offset, compiler_settings, registry),
144 entries(GenerateShaderEntries(shader_ir)) {} 143 entries(GenerateShaderEntries(shader_ir)) {}
145 144
146Shader::~Shader() = default; 145Shader::~Shader() = default;
147 146
148VKPipelineCache::VKPipelineCache(RasterizerVulkan& rasterizer, Tegra::GPU& gpu_, 147VKPipelineCache::VKPipelineCache(RasterizerVulkan& rasterizer_, Tegra::GPU& gpu_,
149 Tegra::Engines::Maxwell3D& maxwell3d_, 148 Tegra::Engines::Maxwell3D& maxwell3d_,
150 Tegra::Engines::KeplerCompute& kepler_compute_, 149 Tegra::Engines::KeplerCompute& kepler_compute_,
151 Tegra::MemoryManager& gpu_memory_, const VKDevice& device_, 150 Tegra::MemoryManager& gpu_memory_, const VKDevice& device_,
152 VKScheduler& scheduler_, VKDescriptorPool& descriptor_pool_, 151 VKScheduler& scheduler_, VKDescriptorPool& descriptor_pool_,
153 VKUpdateDescriptorQueue& update_descriptor_queue_, 152 VKUpdateDescriptorQueue& update_descriptor_queue_,
154 VKRenderPassCache& renderpass_cache_) 153 VKRenderPassCache& renderpass_cache_)
155 : VideoCommon::ShaderCache<Shader>{rasterizer}, gpu{gpu_}, maxwell3d{maxwell3d_}, 154 : ShaderCache{rasterizer_}, gpu{gpu_}, maxwell3d{maxwell3d_}, kepler_compute{kepler_compute_},
156 kepler_compute{kepler_compute_}, gpu_memory{gpu_memory_}, device{device_}, 155 gpu_memory{gpu_memory_}, device{device_}, scheduler{scheduler_},
157 scheduler{scheduler_}, descriptor_pool{descriptor_pool_}, 156 descriptor_pool{descriptor_pool_}, update_descriptor_queue{update_descriptor_queue_},
158 update_descriptor_queue{update_descriptor_queue_}, renderpass_cache{renderpass_cache_} {} 157 renderpass_cache{renderpass_cache_} {}
159 158
160VKPipelineCache::~VKPipelineCache() = default; 159VKPipelineCache::~VKPipelineCache() = default;
161 160
diff --git a/src/video_core/renderer_vulkan/vk_pipeline_cache.h b/src/video_core/renderer_vulkan/vk_pipeline_cache.h
index e558e6658..9e1f8fcbb 100644
--- a/src/video_core/renderer_vulkan/vk_pipeline_cache.h
+++ b/src/video_core/renderer_vulkan/vk_pipeline_cache.h
@@ -84,9 +84,9 @@ namespace Vulkan {
84 84
85class Shader { 85class Shader {
86public: 86public:
87 explicit Shader(Tegra::Engines::ConstBufferEngineInterface& engine, 87 explicit Shader(Tegra::Engines::ConstBufferEngineInterface& engine_,
88 Tegra::Engines::ShaderType stage, GPUVAddr gpu_addr, VAddr cpu_addr, 88 Tegra::Engines::ShaderType stage_, GPUVAddr gpu_addr, VAddr cpu_addr_,
89 VideoCommon::Shader::ProgramCode program_code, u32 main_offset); 89 VideoCommon::Shader::ProgramCode program_code, u32 main_offset_);
90 ~Shader(); 90 ~Shader();
91 91
92 GPUVAddr GetGpuAddr() const { 92 GPUVAddr GetGpuAddr() const {
@@ -119,13 +119,13 @@ private:
119 119
120class VKPipelineCache final : public VideoCommon::ShaderCache<Shader> { 120class VKPipelineCache final : public VideoCommon::ShaderCache<Shader> {
121public: 121public:
122 explicit VKPipelineCache(RasterizerVulkan& rasterizer, Tegra::GPU& gpu, 122 explicit VKPipelineCache(RasterizerVulkan& rasterizer_, Tegra::GPU& gpu_,
123 Tegra::Engines::Maxwell3D& maxwell3d, 123 Tegra::Engines::Maxwell3D& maxwell3d_,
124 Tegra::Engines::KeplerCompute& kepler_compute, 124 Tegra::Engines::KeplerCompute& kepler_compute_,
125 Tegra::MemoryManager& gpu_memory, const VKDevice& device, 125 Tegra::MemoryManager& gpu_memory_, const VKDevice& device_,
126 VKScheduler& scheduler, VKDescriptorPool& descriptor_pool, 126 VKScheduler& scheduler_, VKDescriptorPool& descriptor_pool_,
127 VKUpdateDescriptorQueue& update_descriptor_queue, 127 VKUpdateDescriptorQueue& update_descriptor_queue_,
128 VKRenderPassCache& renderpass_cache); 128 VKRenderPassCache& renderpass_cache_);
129 ~VKPipelineCache() override; 129 ~VKPipelineCache() override;
130 130
131 std::array<Shader*, Maxwell::MaxShaderProgram> GetShaders(); 131 std::array<Shader*, Maxwell::MaxShaderProgram> GetShaders();
diff --git a/src/video_core/renderer_vulkan/vk_query_cache.cpp b/src/video_core/renderer_vulkan/vk_query_cache.cpp
index 6fa071737..038760de3 100644
--- a/src/video_core/renderer_vulkan/vk_query_cache.cpp
+++ b/src/video_core/renderer_vulkan/vk_query_cache.cpp
@@ -69,12 +69,10 @@ void QueryPool::Reserve(std::pair<VkQueryPool, u32> query) {
69VKQueryCache::VKQueryCache(VideoCore::RasterizerInterface& rasterizer_, 69VKQueryCache::VKQueryCache(VideoCore::RasterizerInterface& rasterizer_,
70 Tegra::Engines::Maxwell3D& maxwell3d_, Tegra::MemoryManager& gpu_memory_, 70 Tegra::Engines::Maxwell3D& maxwell3d_, Tegra::MemoryManager& gpu_memory_,
71 const VKDevice& device_, VKScheduler& scheduler_) 71 const VKDevice& device_, VKScheduler& scheduler_)
72 : QueryCacheBase<VKQueryCache, CachedQuery, CounterStream, HostCounter>{rasterizer_, maxwell3d_, 72 : QueryCacheBase{rasterizer_, maxwell3d_, gpu_memory_}, device{device_}, scheduler{scheduler_},
73 gpu_memory_}, 73 query_pools{
74 device{device_}, scheduler{scheduler_}, query_pools{ 74 QueryPool{device_, scheduler_, QueryType::SamplesPassed},
75 QueryPool{device_, scheduler_, 75 } {}
76 QueryType::SamplesPassed},
77 } {}
78 76
79VKQueryCache::~VKQueryCache() { 77VKQueryCache::~VKQueryCache() {
80 // TODO(Rodrigo): This is a hack to destroy all HostCounter instances before the base class 78 // TODO(Rodrigo): This is a hack to destroy all HostCounter instances before the base class
@@ -97,8 +95,8 @@ void VKQueryCache::Reserve(QueryType type, std::pair<VkQueryPool, u32> query) {
97 95
98HostCounter::HostCounter(VKQueryCache& cache_, std::shared_ptr<HostCounter> dependency_, 96HostCounter::HostCounter(VKQueryCache& cache_, std::shared_ptr<HostCounter> dependency_,
99 QueryType type_) 97 QueryType type_)
100 : HostCounterBase<VKQueryCache, HostCounter>{std::move(dependency_)}, cache{cache_}, 98 : HostCounterBase{std::move(dependency_)}, cache{cache_}, type{type_},
101 type{type_}, query{cache_.AllocateQuery(type_)}, tick{cache_.Scheduler().CurrentTick()} { 99 query{cache_.AllocateQuery(type_)}, tick{cache_.Scheduler().CurrentTick()} {
102 const vk::Device* logical = &cache_.Device().GetLogical(); 100 const vk::Device* logical = &cache_.Device().GetLogical();
103 cache_.Scheduler().Record([logical, query = query](vk::CommandBuffer cmdbuf) { 101 cache_.Scheduler().Record([logical, query = query](vk::CommandBuffer cmdbuf) {
104 logical->ResetQueryPoolEXT(query.first, query.second, 1); 102 logical->ResetQueryPoolEXT(query.first, query.second, 1);
@@ -119,18 +117,20 @@ u64 HostCounter::BlockingQuery() const {
119 if (tick >= cache.Scheduler().CurrentTick()) { 117 if (tick >= cache.Scheduler().CurrentTick()) {
120 cache.Scheduler().Flush(); 118 cache.Scheduler().Flush();
121 } 119 }
120
122 u64 data; 121 u64 data;
123 const VkResult result = cache.Device().GetLogical().GetQueryResults( 122 const VkResult query_result = cache.Device().GetLogical().GetQueryResults(
124 query.first, query.second, 1, sizeof(data), &data, sizeof(data), 123 query.first, query.second, 1, sizeof(data), &data, sizeof(data),
125 VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT); 124 VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT);
126 switch (result) { 125
126 switch (query_result) {
127 case VK_SUCCESS: 127 case VK_SUCCESS:
128 return data; 128 return data;
129 case VK_ERROR_DEVICE_LOST: 129 case VK_ERROR_DEVICE_LOST:
130 cache.Device().ReportLoss(); 130 cache.Device().ReportLoss();
131 [[fallthrough]]; 131 [[fallthrough]];
132 default: 132 default:
133 throw vk::Exception(result); 133 throw vk::Exception(query_result);
134 } 134 }
135} 135}
136 136
diff --git a/src/video_core/renderer_vulkan/vk_query_cache.h b/src/video_core/renderer_vulkan/vk_query_cache.h
index 201fca888..837fe9ebf 100644
--- a/src/video_core/renderer_vulkan/vk_query_cache.h
+++ b/src/video_core/renderer_vulkan/vk_query_cache.h
@@ -95,8 +95,8 @@ private:
95 95
96class CachedQuery : public VideoCommon::CachedQueryBase<HostCounter> { 96class CachedQuery : public VideoCommon::CachedQueryBase<HostCounter> {
97public: 97public:
98 explicit CachedQuery(VKQueryCache&, VideoCore::QueryType, VAddr cpu_addr, u8* host_ptr) 98 explicit CachedQuery(VKQueryCache&, VideoCore::QueryType, VAddr cpu_addr_, u8* host_ptr_)
99 : VideoCommon::CachedQueryBase<HostCounter>{cpu_addr, host_ptr} {} 99 : CachedQueryBase{cpu_addr_, host_ptr_} {}
100}; 100};
101 101
102} // namespace Vulkan 102} // namespace Vulkan
diff --git a/src/video_core/renderer_vulkan/vk_rasterizer.cpp b/src/video_core/renderer_vulkan/vk_rasterizer.cpp
index 560386081..f93986aab 100644
--- a/src/video_core/renderer_vulkan/vk_rasterizer.cpp
+++ b/src/video_core/renderer_vulkan/vk_rasterizer.cpp
@@ -128,12 +128,12 @@ Tegra::Texture::FullTextureInfo GetTextureInfo(const Engine& engine, const Entry
128 const u32 offset_2 = entry.secondary_offset; 128 const u32 offset_2 = entry.secondary_offset;
129 const u32 handle_1 = engine.AccessConstBuffer32(stage_type, buffer_1, offset_1); 129 const u32 handle_1 = engine.AccessConstBuffer32(stage_type, buffer_1, offset_1);
130 const u32 handle_2 = engine.AccessConstBuffer32(stage_type, buffer_2, offset_2); 130 const u32 handle_2 = engine.AccessConstBuffer32(stage_type, buffer_2, offset_2);
131 return engine.GetTextureInfo(handle_1 | handle_2); 131 return engine.GetTextureInfo(Tegra::Texture::TextureHandle{handle_1 | handle_2});
132 } 132 }
133 } 133 }
134 if (entry.is_bindless) { 134 if (entry.is_bindless) {
135 const auto tex_handle = engine.AccessConstBuffer32(stage_type, entry.buffer, entry.offset); 135 const auto tex_handle = engine.AccessConstBuffer32(stage_type, entry.buffer, entry.offset);
136 return engine.GetTextureInfo(tex_handle); 136 return engine.GetTextureInfo(Tegra::Texture::TextureHandle{tex_handle});
137 } 137 }
138 const auto& gpu_profile = engine.AccessGuestDriverProfile(); 138 const auto& gpu_profile = engine.AccessGuestDriverProfile();
139 const u32 entry_offset = static_cast<u32>(index * gpu_profile.GetTextureHandlerSize()); 139 const u32 entry_offset = static_cast<u32>(index * gpu_profile.GetTextureHandlerSize());
@@ -380,12 +380,12 @@ void RasterizerVulkan::DrawParameters::Draw(vk::CommandBuffer cmdbuf) const {
380 } 380 }
381} 381}
382 382
383RasterizerVulkan::RasterizerVulkan(Core::Frontend::EmuWindow& emu_window, Tegra::GPU& gpu_, 383RasterizerVulkan::RasterizerVulkan(Core::Frontend::EmuWindow& emu_window_, Tegra::GPU& gpu_,
384 Tegra::MemoryManager& gpu_memory_, 384 Tegra::MemoryManager& gpu_memory_,
385 Core::Memory::Memory& cpu_memory, VKScreenInfo& screen_info_, 385 Core::Memory::Memory& cpu_memory_, VKScreenInfo& screen_info_,
386 const VKDevice& device_, VKMemoryManager& memory_manager_, 386 const VKDevice& device_, VKMemoryManager& memory_manager_,
387 StateTracker& state_tracker_, VKScheduler& scheduler_) 387 StateTracker& state_tracker_, VKScheduler& scheduler_)
388 : RasterizerAccelerated(cpu_memory), gpu(gpu_), gpu_memory(gpu_memory_), 388 : RasterizerAccelerated(cpu_memory_), gpu(gpu_), gpu_memory(gpu_memory_),
389 maxwell3d(gpu.Maxwell3D()), kepler_compute(gpu.KeplerCompute()), screen_info(screen_info_), 389 maxwell3d(gpu.Maxwell3D()), kepler_compute(gpu.KeplerCompute()), screen_info(screen_info_),
390 device(device_), memory_manager(memory_manager_), state_tracker(state_tracker_), 390 device(device_), memory_manager(memory_manager_), state_tracker(state_tracker_),
391 scheduler(scheduler_), staging_pool(device, memory_manager, scheduler), 391 scheduler(scheduler_), staging_pool(device, memory_manager, scheduler),
@@ -397,11 +397,11 @@ RasterizerVulkan::RasterizerVulkan(Core::Frontend::EmuWindow& emu_window, Tegra:
397 texture_cache(*this, maxwell3d, gpu_memory, device, memory_manager, scheduler, staging_pool), 397 texture_cache(*this, maxwell3d, gpu_memory, device, memory_manager, scheduler, staging_pool),
398 pipeline_cache(*this, gpu, maxwell3d, kepler_compute, gpu_memory, device, scheduler, 398 pipeline_cache(*this, gpu, maxwell3d, kepler_compute, gpu_memory, device, scheduler,
399 descriptor_pool, update_descriptor_queue, renderpass_cache), 399 descriptor_pool, update_descriptor_queue, renderpass_cache),
400 buffer_cache(*this, gpu_memory, cpu_memory, device, memory_manager, scheduler, staging_pool), 400 buffer_cache(*this, gpu_memory, cpu_memory_, device, memory_manager, scheduler, staging_pool),
401 sampler_cache(device), query_cache(*this, maxwell3d, gpu_memory, device, scheduler), 401 sampler_cache(device), query_cache(*this, maxwell3d, gpu_memory, device, scheduler),
402 fence_manager(*this, gpu, gpu_memory, texture_cache, buffer_cache, query_cache, device, 402 fence_manager(*this, gpu, gpu_memory, texture_cache, buffer_cache, query_cache, device,
403 scheduler), 403 scheduler),
404 wfi_event(device.GetLogical().CreateEvent()), async_shaders(emu_window) { 404 wfi_event(device.GetLogical().CreateEvent()), async_shaders(emu_window_) {
405 scheduler.SetQueryCache(query_cache); 405 scheduler.SetQueryCache(query_cache);
406 if (device.UseAsynchronousShaders()) { 406 if (device.UseAsynchronousShaders()) {
407 async_shaders.AllocateWorkers(); 407 async_shaders.AllocateWorkers();
diff --git a/src/video_core/renderer_vulkan/vk_rasterizer.h b/src/video_core/renderer_vulkan/vk_rasterizer.h
index 1789fb285..30ec58eb4 100644
--- a/src/video_core/renderer_vulkan/vk_rasterizer.h
+++ b/src/video_core/renderer_vulkan/vk_rasterizer.h
@@ -105,11 +105,11 @@ struct ImageView {
105 105
106class RasterizerVulkan final : public VideoCore::RasterizerAccelerated { 106class RasterizerVulkan final : public VideoCore::RasterizerAccelerated {
107public: 107public:
108 explicit RasterizerVulkan(Core::Frontend::EmuWindow& emu_window, Tegra::GPU& gpu, 108 explicit RasterizerVulkan(Core::Frontend::EmuWindow& emu_window_, Tegra::GPU& gpu_,
109 Tegra::MemoryManager& gpu_memory, Core::Memory::Memory& cpu_memory, 109 Tegra::MemoryManager& gpu_memory_, Core::Memory::Memory& cpu_memory_,
110 VKScreenInfo& screen_info, const VKDevice& device, 110 VKScreenInfo& screen_info_, const VKDevice& device_,
111 VKMemoryManager& memory_manager, StateTracker& state_tracker, 111 VKMemoryManager& memory_manager_, StateTracker& state_tracker_,
112 VKScheduler& scheduler); 112 VKScheduler& scheduler_);
113 ~RasterizerVulkan() override; 113 ~RasterizerVulkan() override;
114 114
115 void Draw(bool is_indexed, bool is_instanced) override; 115 void Draw(bool is_indexed, bool is_instanced) override;
diff --git a/src/video_core/renderer_vulkan/vk_texture_cache.cpp b/src/video_core/renderer_vulkan/vk_texture_cache.cpp
index 64649699f..1ff109880 100644
--- a/src/video_core/renderer_vulkan/vk_texture_cache.cpp
+++ b/src/video_core/renderer_vulkan/vk_texture_cache.cpp
@@ -489,12 +489,12 @@ VkImageView CachedSurfaceView::GetAttachment() {
489 return *render_target; 489 return *render_target;
490} 490}
491 491
492VKTextureCache::VKTextureCache(VideoCore::RasterizerInterface& rasterizer, 492VKTextureCache::VKTextureCache(VideoCore::RasterizerInterface& rasterizer_,
493 Tegra::Engines::Maxwell3D& maxwell3d, 493 Tegra::Engines::Maxwell3D& maxwell3d_,
494 Tegra::MemoryManager& gpu_memory, const VKDevice& device_, 494 Tegra::MemoryManager& gpu_memory_, const VKDevice& device_,
495 VKMemoryManager& memory_manager_, VKScheduler& scheduler_, 495 VKMemoryManager& memory_manager_, VKScheduler& scheduler_,
496 VKStagingBufferPool& staging_pool_) 496 VKStagingBufferPool& staging_pool_)
497 : TextureCache(rasterizer, maxwell3d, gpu_memory, device_.IsOptimalAstcSupported()), 497 : TextureCache(rasterizer_, maxwell3d_, gpu_memory_, device_.IsOptimalAstcSupported()),
498 device{device_}, memory_manager{memory_manager_}, scheduler{scheduler_}, staging_pool{ 498 device{device_}, memory_manager{memory_manager_}, scheduler{scheduler_}, staging_pool{
499 staging_pool_} {} 499 staging_pool_} {}
500 500
diff --git a/src/video_core/renderer_vulkan/vk_texture_cache.h b/src/video_core/renderer_vulkan/vk_texture_cache.h
index 06880f228..1c632bd2c 100644
--- a/src/video_core/renderer_vulkan/vk_texture_cache.h
+++ b/src/video_core/renderer_vulkan/vk_texture_cache.h
@@ -193,10 +193,11 @@ private:
193 193
194class VKTextureCache final : public TextureCacheBase { 194class VKTextureCache final : public TextureCacheBase {
195public: 195public:
196 explicit VKTextureCache(VideoCore::RasterizerInterface& rasterizer, 196 explicit VKTextureCache(VideoCore::RasterizerInterface& rasterizer_,
197 Tegra::Engines::Maxwell3D& maxwell3d, Tegra::MemoryManager& gpu_memory, 197 Tegra::Engines::Maxwell3D& maxwell3d_,
198 const VKDevice& device, VKMemoryManager& memory_manager, 198 Tegra::MemoryManager& gpu_memory_, const VKDevice& device_,
199 VKScheduler& scheduler, VKStagingBufferPool& staging_pool); 199 VKMemoryManager& memory_manager_, VKScheduler& scheduler_,
200 VKStagingBufferPool& staging_pool_);
200 ~VKTextureCache(); 201 ~VKTextureCache();
201 202
202private: 203private: