diff options
| author | 2020-07-16 17:32:12 -0400 | |
|---|---|---|
| committer | 2020-07-16 17:32:12 -0400 | |
| commit | 5330ca396d593757c78c271341691eb33c62d909 (patch) | |
| tree | 48a882bf589f15b1c5874d775dce692ab108ec08 | |
| parent | vk_compute_pass: Make use of designated initializers where applicable (diff) | |
| download | yuzu-5330ca396d593757c78c271341691eb33c62d909.tar.gz yuzu-5330ca396d593757c78c271341691eb33c62d909.tar.xz yuzu-5330ca396d593757c78c271341691eb33c62d909.zip | |
vk_compute_pipeline: Make use of designated initializers where applicable
| -rw-r--r-- | src/video_core/renderer_vulkan/vk_compute_pipeline.cpp | 131 |
1 files changed, 68 insertions, 63 deletions
diff --git a/src/video_core/renderer_vulkan/vk_compute_pipeline.cpp b/src/video_core/renderer_vulkan/vk_compute_pipeline.cpp index 281bf9ac3..ed9d2991c 100644 --- a/src/video_core/renderer_vulkan/vk_compute_pipeline.cpp +++ b/src/video_core/renderer_vulkan/vk_compute_pipeline.cpp | |||
| @@ -43,12 +43,13 @@ vk::DescriptorSetLayout VKComputePipeline::CreateDescriptorSetLayout() const { | |||
| 43 | const auto add_bindings = [&](VkDescriptorType descriptor_type, std::size_t num_entries) { | 43 | const auto add_bindings = [&](VkDescriptorType descriptor_type, std::size_t num_entries) { |
| 44 | // TODO(Rodrigo): Maybe make individual bindings here? | 44 | // TODO(Rodrigo): Maybe make individual bindings here? |
| 45 | for (u32 bindpoint = 0; bindpoint < static_cast<u32>(num_entries); ++bindpoint) { | 45 | for (u32 bindpoint = 0; bindpoint < static_cast<u32>(num_entries); ++bindpoint) { |
| 46 | VkDescriptorSetLayoutBinding& entry = bindings.emplace_back(); | 46 | bindings.push_back({ |
| 47 | entry.binding = binding++; | 47 | .binding = binding++, |
| 48 | entry.descriptorType = descriptor_type; | 48 | .descriptorType = descriptor_type, |
| 49 | entry.descriptorCount = 1; | 49 | .descriptorCount = 1, |
| 50 | entry.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT; | 50 | .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT, |
| 51 | entry.pImmutableSamplers = nullptr; | 51 | .pImmutableSamplers = nullptr, |
| 52 | }); | ||
| 52 | } | 53 | } |
| 53 | }; | 54 | }; |
| 54 | add_bindings(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, entries.const_buffers.size()); | 55 | add_bindings(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, entries.const_buffers.size()); |
| @@ -58,25 +59,25 @@ vk::DescriptorSetLayout VKComputePipeline::CreateDescriptorSetLayout() const { | |||
| 58 | add_bindings(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, entries.storage_texels.size()); | 59 | add_bindings(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, entries.storage_texels.size()); |
| 59 | add_bindings(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, entries.images.size()); | 60 | add_bindings(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, entries.images.size()); |
| 60 | 61 | ||
| 61 | VkDescriptorSetLayoutCreateInfo ci; | 62 | return device.GetLogical().CreateDescriptorSetLayout({ |
| 62 | ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; | 63 | .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, |
| 63 | ci.pNext = nullptr; | 64 | .pNext = nullptr, |
| 64 | ci.flags = 0; | 65 | .flags = 0, |
| 65 | ci.bindingCount = static_cast<u32>(bindings.size()); | 66 | .bindingCount = static_cast<u32>(bindings.size()), |
| 66 | ci.pBindings = bindings.data(); | 67 | .pBindings = bindings.data(), |
| 67 | return device.GetLogical().CreateDescriptorSetLayout(ci); | 68 | }); |
| 68 | } | 69 | } |
| 69 | 70 | ||
| 70 | vk::PipelineLayout VKComputePipeline::CreatePipelineLayout() const { | 71 | vk::PipelineLayout VKComputePipeline::CreatePipelineLayout() const { |
| 71 | VkPipelineLayoutCreateInfo ci; | 72 | return device.GetLogical().CreatePipelineLayout({ |
| 72 | ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; | 73 | .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, |
| 73 | ci.pNext = nullptr; | 74 | .pNext = nullptr, |
| 74 | ci.flags = 0; | 75 | .flags = 0, |
| 75 | ci.setLayoutCount = 1; | 76 | .setLayoutCount = 1, |
| 76 | ci.pSetLayouts = descriptor_set_layout.address(); | 77 | .pSetLayouts = descriptor_set_layout.address(), |
| 77 | ci.pushConstantRangeCount = 0; | 78 | .pushConstantRangeCount = 0, |
| 78 | ci.pPushConstantRanges = nullptr; | 79 | .pPushConstantRanges = nullptr, |
| 79 | return device.GetLogical().CreatePipelineLayout(ci); | 80 | }); |
| 80 | } | 81 | } |
| 81 | 82 | ||
| 82 | vk::DescriptorUpdateTemplateKHR VKComputePipeline::CreateDescriptorUpdateTemplate() const { | 83 | vk::DescriptorUpdateTemplateKHR VKComputePipeline::CreateDescriptorUpdateTemplate() const { |
| @@ -89,59 +90,63 @@ vk::DescriptorUpdateTemplateKHR VKComputePipeline::CreateDescriptorUpdateTemplat | |||
| 89 | return {}; | 90 | return {}; |
| 90 | } | 91 | } |
| 91 | 92 | ||
| 92 | VkDescriptorUpdateTemplateCreateInfoKHR ci; | 93 | return device.GetLogical().CreateDescriptorUpdateTemplateKHR({ |
| 93 | ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR; | 94 | .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, |
| 94 | ci.pNext = nullptr; | 95 | .pNext = nullptr, |
| 95 | ci.flags = 0; | 96 | .flags = 0, |
| 96 | ci.descriptorUpdateEntryCount = static_cast<u32>(template_entries.size()); | 97 | .descriptorUpdateEntryCount = static_cast<u32>(template_entries.size()), |
| 97 | ci.pDescriptorUpdateEntries = template_entries.data(); | 98 | .pDescriptorUpdateEntries = template_entries.data(), |
| 98 | ci.templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR; | 99 | .templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR, |
| 99 | ci.descriptorSetLayout = *descriptor_set_layout; | 100 | .descriptorSetLayout = *descriptor_set_layout, |
| 100 | ci.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; | 101 | .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS, |
| 101 | ci.pipelineLayout = *layout; | 102 | .pipelineLayout = *layout, |
| 102 | ci.set = DESCRIPTOR_SET; | 103 | .set = DESCRIPTOR_SET, |
| 103 | return device.GetLogical().CreateDescriptorUpdateTemplateKHR(ci); | 104 | }); |
| 104 | } | 105 | } |
| 105 | 106 | ||
| 106 | vk::ShaderModule VKComputePipeline::CreateShaderModule(const std::vector<u32>& code) const { | 107 | vk::ShaderModule VKComputePipeline::CreateShaderModule(const std::vector<u32>& code) const { |
| 107 | device.SaveShader(code); | 108 | device.SaveShader(code); |
| 108 | 109 | ||
| 109 | VkShaderModuleCreateInfo ci; | 110 | return device.GetLogical().CreateShaderModule({ |
| 110 | ci.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; | 111 | .sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, |
| 111 | ci.pNext = nullptr; | 112 | .pNext = nullptr, |
| 112 | ci.flags = 0; | 113 | .flags = 0, |
| 113 | ci.codeSize = code.size() * sizeof(u32); | 114 | .codeSize = code.size() * sizeof(u32), |
| 114 | ci.pCode = code.data(); | 115 | .pCode = code.data(), |
| 115 | return device.GetLogical().CreateShaderModule(ci); | 116 | }); |
| 116 | } | 117 | } |
| 117 | 118 | ||
| 118 | vk::Pipeline VKComputePipeline::CreatePipeline() const { | 119 | vk::Pipeline VKComputePipeline::CreatePipeline() const { |
| 119 | VkComputePipelineCreateInfo ci; | 120 | |
| 120 | VkPipelineShaderStageCreateInfo& stage_ci = ci.stage; | 121 | VkComputePipelineCreateInfo ci{ |
| 121 | stage_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; | 122 | .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, |
| 122 | stage_ci.pNext = nullptr; | 123 | .pNext = nullptr, |
| 123 | stage_ci.flags = 0; | 124 | .flags = 0, |
| 124 | stage_ci.stage = VK_SHADER_STAGE_COMPUTE_BIT; | 125 | .stage = |
| 125 | stage_ci.module = *shader_module; | 126 | { |
| 126 | stage_ci.pName = "main"; | 127 | .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, |
| 127 | stage_ci.pSpecializationInfo = nullptr; | 128 | .pNext = nullptr, |
| 128 | 129 | .flags = 0, | |
| 129 | VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT subgroup_size_ci; | 130 | .stage = VK_SHADER_STAGE_COMPUTE_BIT, |
| 130 | subgroup_size_ci.sType = | 131 | .module = *shader_module, |
| 131 | VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT; | 132 | .pName = "main", |
| 132 | subgroup_size_ci.pNext = nullptr; | 133 | .pSpecializationInfo = nullptr, |
| 133 | subgroup_size_ci.requiredSubgroupSize = GuestWarpSize; | 134 | }, |
| 135 | .layout = *layout, | ||
| 136 | .basePipelineHandle = nullptr, | ||
| 137 | .basePipelineIndex = 0, | ||
| 138 | }; | ||
| 139 | |||
| 140 | const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT subgroup_size_ci{ | ||
| 141 | .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, | ||
| 142 | .pNext = nullptr, | ||
| 143 | .requiredSubgroupSize = GuestWarpSize, | ||
| 144 | }; | ||
| 134 | 145 | ||
| 135 | if (entries.uses_warps && device.IsGuestWarpSizeSupported(VK_SHADER_STAGE_COMPUTE_BIT)) { | 146 | if (entries.uses_warps && device.IsGuestWarpSizeSupported(VK_SHADER_STAGE_COMPUTE_BIT)) { |
| 136 | stage_ci.pNext = &subgroup_size_ci; | 147 | ci.stage.pNext = &subgroup_size_ci; |
| 137 | } | 148 | } |
| 138 | 149 | ||
| 139 | ci.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO; | ||
| 140 | ci.pNext = nullptr; | ||
| 141 | ci.flags = 0; | ||
| 142 | ci.layout = *layout; | ||
| 143 | ci.basePipelineHandle = nullptr; | ||
| 144 | ci.basePipelineIndex = 0; | ||
| 145 | return device.GetLogical().CreateComputePipeline(ci); | 150 | return device.GetLogical().CreateComputePipeline(ci); |
| 146 | } | 151 | } |
| 147 | 152 | ||