diff options
| author | 2020-07-01 02:28:53 -0300 | |
|---|---|---|
| committer | 2020-07-13 01:44:23 -0300 | |
| commit | fbc232426d8ff739b0028fb5b41fb5124f7e1792 (patch) | |
| tree | f3e99246a2b398151b409a1d331f27780a9a28ef /src/video_core/renderer_vulkan | |
| parent | video_core: Fix DXT4 and RGB565 (diff) | |
| download | yuzu-fbc232426d8ff739b0028fb5b41fb5124f7e1792.tar.gz yuzu-fbc232426d8ff739b0028fb5b41fb5124f7e1792.tar.xz yuzu-fbc232426d8ff739b0028fb5b41fb5124f7e1792.zip | |
video_core: Rearrange pixel format names
Normalizes pixel format names to match Vulkan names. Previous to this
commit pixel formats had no convention, leading to confusion and
potential bugs.
Diffstat (limited to 'src/video_core/renderer_vulkan')
| -rw-r--r-- | src/video_core/renderer_vulkan/maxwell_to_vk.cpp | 184 | ||||
| -rw-r--r-- | src/video_core/renderer_vulkan/vk_blit_screen.cpp | 4 | ||||
| -rw-r--r-- | src/video_core/renderer_vulkan/vk_texture_cache.cpp | 10 |
3 files changed, 99 insertions, 99 deletions
diff --git a/src/video_core/renderer_vulkan/maxwell_to_vk.cpp b/src/video_core/renderer_vulkan/maxwell_to_vk.cpp index 57c3822c2..f8c77f4fa 100644 --- a/src/video_core/renderer_vulkan/maxwell_to_vk.cpp +++ b/src/video_core/renderer_vulkan/maxwell_to_vk.cpp | |||
| @@ -117,101 +117,101 @@ struct FormatTuple { | |||
| 117 | VkFormat format; ///< Vulkan format | 117 | VkFormat format; ///< Vulkan format |
| 118 | int usage = 0; ///< Describes image format usage | 118 | int usage = 0; ///< Describes image format usage |
| 119 | } constexpr tex_format_tuples[] = { | 119 | } constexpr tex_format_tuples[] = { |
| 120 | {VK_FORMAT_A8B8G8R8_UNORM_PACK32, Attachable | Storage}, // ABGR8U | 120 | {VK_FORMAT_A8B8G8R8_UNORM_PACK32, Attachable | Storage}, // A8B8G8R8_UNORM |
| 121 | {VK_FORMAT_A8B8G8R8_SNORM_PACK32, Attachable | Storage}, // ABGR8S | 121 | {VK_FORMAT_A8B8G8R8_SNORM_PACK32, Attachable | Storage}, // A8B8G8R8_SNORM |
| 122 | {VK_FORMAT_A8B8G8R8_SINT_PACK32, Attachable | Storage}, // ABGR8I | 122 | {VK_FORMAT_A8B8G8R8_SINT_PACK32, Attachable | Storage}, // A8B8G8R8_SINT |
| 123 | {VK_FORMAT_A8B8G8R8_UINT_PACK32, Attachable | Storage}, // ABGR8UI | 123 | {VK_FORMAT_A8B8G8R8_UINT_PACK32, Attachable | Storage}, // A8B8G8R8_UINT |
| 124 | {VK_FORMAT_R5G6B5_UNORM_PACK16, Attachable}, // B5G6R5U | 124 | {VK_FORMAT_R5G6B5_UNORM_PACK16, Attachable}, // R5G6B5_UNORM |
| 125 | {VK_FORMAT_B5G6R5_UNORM_PACK16, Attachable}, // R5G6B5U | 125 | {VK_FORMAT_B5G6R5_UNORM_PACK16, Attachable}, // B5G6R5_UNORM |
| 126 | {VK_FORMAT_A1R5G5B5_UNORM_PACK16, Attachable}, // B5G5R5A1U | 126 | {VK_FORMAT_A1R5G5B5_UNORM_PACK16, Attachable}, // A1R5G5B5_UNORM |
| 127 | {VK_FORMAT_A2B10G10R10_UNORM_PACK32, Attachable | Storage}, // A2B10G10R10U | 127 | {VK_FORMAT_A2B10G10R10_UNORM_PACK32, Attachable | Storage}, // A2B10G10R10_UNORM |
| 128 | {VK_FORMAT_A2B10G10R10_UINT_PACK32, Attachable | Storage}, // A2B10G10R10UI | 128 | {VK_FORMAT_A2B10G10R10_UINT_PACK32, Attachable | Storage}, // A2B10G10R10_UINT |
| 129 | {VK_FORMAT_A1R5G5B5_UNORM_PACK16, Attachable}, // A1B5G5R5U (flipped with swizzle) | 129 | {VK_FORMAT_A1R5G5B5_UNORM_PACK16, Attachable}, // A1B5G5R5_UNORM (flipped with swizzle) |
| 130 | {VK_FORMAT_R8_UNORM, Attachable | Storage}, // R8U | 130 | {VK_FORMAT_R8_UNORM, Attachable | Storage}, // R8_UNORM |
| 131 | {VK_FORMAT_R8_SNORM, Attachable | Storage}, // R8S | 131 | {VK_FORMAT_R8_SNORM, Attachable | Storage}, // R8_SNORM |
| 132 | {VK_FORMAT_R8_SINT, Attachable | Storage}, // R8I | 132 | {VK_FORMAT_R8_SINT, Attachable | Storage}, // R8_SINT |
| 133 | {VK_FORMAT_R8_UINT, Attachable | Storage}, // R8UI | 133 | {VK_FORMAT_R8_UINT, Attachable | Storage}, // R8_UINT |
| 134 | {VK_FORMAT_R16G16B16A16_SFLOAT, Attachable | Storage}, // RGBA16F | 134 | {VK_FORMAT_R16G16B16A16_SFLOAT, Attachable | Storage}, // R16G16B16A16_FLOAT |
| 135 | {VK_FORMAT_R16G16B16A16_UNORM, Attachable | Storage}, // RGBA16U | 135 | {VK_FORMAT_R16G16B16A16_UNORM, Attachable | Storage}, // R16G16B16A16_UNORM |
| 136 | {VK_FORMAT_R16G16B16A16_SNORM, Attachable | Storage}, // RGBA16S | 136 | {VK_FORMAT_R16G16B16A16_SNORM, Attachable | Storage}, // R16G16B16A16_SNORM |
| 137 | {VK_FORMAT_R16G16B16A16_SINT, Attachable | Storage}, // RGBA16I | 137 | {VK_FORMAT_R16G16B16A16_SINT, Attachable | Storage}, // R16G16B16A16_SINT |
| 138 | {VK_FORMAT_R16G16B16A16_UINT, Attachable | Storage}, // RGBA16UI | 138 | {VK_FORMAT_R16G16B16A16_UINT, Attachable | Storage}, // R16G16B16A16_UINT |
| 139 | {VK_FORMAT_B10G11R11_UFLOAT_PACK32, Attachable | Storage}, // R11FG11FB10F | 139 | {VK_FORMAT_B10G11R11_UFLOAT_PACK32, Attachable | Storage}, // B10G11R11_FLOAT |
| 140 | {VK_FORMAT_R32G32B32A32_UINT, Attachable | Storage}, // RGBA32UI | 140 | {VK_FORMAT_R32G32B32A32_UINT, Attachable | Storage}, // R32G32B32A32_UINT |
| 141 | {VK_FORMAT_BC1_RGBA_UNORM_BLOCK}, // DXT1 | 141 | {VK_FORMAT_BC1_RGBA_UNORM_BLOCK}, // BC1_RGBA_UNORM |
| 142 | {VK_FORMAT_BC2_UNORM_BLOCK}, // DXT23 | 142 | {VK_FORMAT_BC2_UNORM_BLOCK}, // BC2_UNORM |
| 143 | {VK_FORMAT_BC3_UNORM_BLOCK}, // DXT45 | 143 | {VK_FORMAT_BC3_UNORM_BLOCK}, // BC3_UNORM |
| 144 | {VK_FORMAT_BC4_UNORM_BLOCK}, // DXN1UNORM | 144 | {VK_FORMAT_BC4_UNORM_BLOCK}, // BC4_UNORM |
| 145 | {VK_FORMAT_BC4_SNORM_BLOCK}, // DXN1SNORM | 145 | {VK_FORMAT_BC4_SNORM_BLOCK}, // BC4_SNORM |
| 146 | {VK_FORMAT_BC5_UNORM_BLOCK}, // DXN2UNORM | 146 | {VK_FORMAT_BC5_UNORM_BLOCK}, // BC5_UNORM |
| 147 | {VK_FORMAT_BC5_SNORM_BLOCK}, // DXN2SNORM | 147 | {VK_FORMAT_BC5_SNORM_BLOCK}, // BC5_SNORM |
| 148 | {VK_FORMAT_BC7_UNORM_BLOCK}, // BC7U | 148 | {VK_FORMAT_BC7_UNORM_BLOCK}, // BC7_UNORM |
| 149 | {VK_FORMAT_BC6H_UFLOAT_BLOCK}, // BC6H_UF16 | 149 | {VK_FORMAT_BC6H_UFLOAT_BLOCK}, // BC6H_UFLOAT |
| 150 | {VK_FORMAT_BC6H_SFLOAT_BLOCK}, // BC6H_SF16 | 150 | {VK_FORMAT_BC6H_SFLOAT_BLOCK}, // BC6H_SFLOAT |
| 151 | {VK_FORMAT_ASTC_4x4_UNORM_BLOCK}, // ASTC_2D_4X4 | 151 | {VK_FORMAT_ASTC_4x4_UNORM_BLOCK}, // ASTC_2D_4X4_UNORM |
| 152 | {VK_FORMAT_B8G8R8A8_UNORM, Attachable}, // BGRA8 | 152 | {VK_FORMAT_B8G8R8A8_UNORM, Attachable}, // B8G8R8A8_UNORM |
| 153 | {VK_FORMAT_R32G32B32A32_SFLOAT, Attachable | Storage}, // RGBA32F | 153 | {VK_FORMAT_R32G32B32A32_SFLOAT, Attachable | Storage}, // R32G32B32A32_FLOAT |
| 154 | {VK_FORMAT_R32G32B32A32_SINT, Attachable | Storage}, // RGBA32I | 154 | {VK_FORMAT_R32G32B32A32_SINT, Attachable | Storage}, // R32G32B32A32_SINT |
| 155 | {VK_FORMAT_R32G32_SFLOAT, Attachable | Storage}, // RG32F | 155 | {VK_FORMAT_R32G32_SFLOAT, Attachable | Storage}, // R32G32_FLOAT |
| 156 | {VK_FORMAT_R32G32_SINT, Attachable | Storage}, // RG32I | 156 | {VK_FORMAT_R32G32_SINT, Attachable | Storage}, // R32G32_SINT |
| 157 | {VK_FORMAT_R32_SFLOAT, Attachable | Storage}, // R32F | 157 | {VK_FORMAT_R32_SFLOAT, Attachable | Storage}, // R32_FLOAT |
| 158 | {VK_FORMAT_R16_SFLOAT, Attachable | Storage}, // R16F | 158 | {VK_FORMAT_R16_SFLOAT, Attachable | Storage}, // R16_FLOAT |
| 159 | {VK_FORMAT_R16_UNORM, Attachable | Storage}, // R16U | 159 | {VK_FORMAT_R16_UNORM, Attachable | Storage}, // R16_UNORM |
| 160 | {VK_FORMAT_UNDEFINED}, // R16S | 160 | {VK_FORMAT_UNDEFINED}, // R16_SNORM |
| 161 | {VK_FORMAT_R16_UINT, Attachable | Storage}, // R16UI | 161 | {VK_FORMAT_R16_UINT, Attachable | Storage}, // R16_UINT |
| 162 | {VK_FORMAT_UNDEFINED}, // R16I | 162 | {VK_FORMAT_UNDEFINED}, // R16_SINT |
| 163 | {VK_FORMAT_R16G16_UNORM, Attachable | Storage}, // RG16 | 163 | {VK_FORMAT_R16G16_UNORM, Attachable | Storage}, // R16G16_UNORM |
| 164 | {VK_FORMAT_R16G16_SFLOAT, Attachable | Storage}, // RG16F | 164 | {VK_FORMAT_R16G16_SFLOAT, Attachable | Storage}, // R16G16_FLOAT |
| 165 | {VK_FORMAT_UNDEFINED}, // RG16UI | 165 | {VK_FORMAT_UNDEFINED}, // R16G16_UINT |
| 166 | {VK_FORMAT_UNDEFINED}, // RG16I | 166 | {VK_FORMAT_UNDEFINED}, // R16G16_SINT |
| 167 | {VK_FORMAT_R16G16_SNORM, Attachable | Storage}, // RG16S | 167 | {VK_FORMAT_R16G16_SNORM, Attachable | Storage}, // R16G16_SNORM |
| 168 | {VK_FORMAT_UNDEFINED}, // RGB32F | 168 | {VK_FORMAT_UNDEFINED}, // R32G32B32_FLOAT |
| 169 | {VK_FORMAT_R8G8B8A8_SRGB, Attachable}, // RGBA8_SRGB | 169 | {VK_FORMAT_R8G8B8A8_SRGB, Attachable}, // A8B8G8R8_SRGB |
| 170 | {VK_FORMAT_R8G8_UNORM, Attachable | Storage}, // RG8U | 170 | {VK_FORMAT_R8G8_UNORM, Attachable | Storage}, // R8G8_UNORM |
| 171 | {VK_FORMAT_R8G8_SNORM, Attachable | Storage}, // RG8S | 171 | {VK_FORMAT_R8G8_SNORM, Attachable | Storage}, // R8G8_SNORM |
| 172 | {VK_FORMAT_R8G8_SINT, Attachable | Storage}, // RG8I | 172 | {VK_FORMAT_R8G8_SINT, Attachable | Storage}, // R8G8_SINT |
| 173 | {VK_FORMAT_R8G8_UINT, Attachable | Storage}, // RG8UI | 173 | {VK_FORMAT_R8G8_UINT, Attachable | Storage}, // R8G8_UINT |
| 174 | {VK_FORMAT_R32G32_UINT, Attachable | Storage}, // RG32UI | 174 | {VK_FORMAT_R32G32_UINT, Attachable | Storage}, // R32G32_UINT |
| 175 | {VK_FORMAT_UNDEFINED}, // RGBX16F | 175 | {VK_FORMAT_UNDEFINED}, // R16G16B16X16_FLOAT |
| 176 | {VK_FORMAT_R32_UINT, Attachable | Storage}, // R32UI | 176 | {VK_FORMAT_R32_UINT, Attachable | Storage}, // R32_UINT |
| 177 | {VK_FORMAT_R32_SINT, Attachable | Storage}, // R32I | 177 | {VK_FORMAT_R32_SINT, Attachable | Storage}, // R32_SINT |
| 178 | {VK_FORMAT_ASTC_8x8_UNORM_BLOCK}, // ASTC_2D_8X8 | 178 | {VK_FORMAT_ASTC_8x8_UNORM_BLOCK}, // ASTC_2D_8X8_UNORM |
| 179 | {VK_FORMAT_UNDEFINED}, // ASTC_2D_8X5 | 179 | {VK_FORMAT_UNDEFINED}, // ASTC_2D_8X5_UNORM |
| 180 | {VK_FORMAT_UNDEFINED}, // ASTC_2D_5X4 | 180 | {VK_FORMAT_UNDEFINED}, // ASTC_2D_5X4_UNORM |
| 181 | {VK_FORMAT_B8G8R8A8_SRGB, Attachable}, // BGRA8_SRGB | 181 | {VK_FORMAT_B8G8R8A8_SRGB, Attachable}, // B8G8R8A8_SRGB |
| 182 | {VK_FORMAT_BC1_RGBA_SRGB_BLOCK}, // DXT1_SRGB | 182 | {VK_FORMAT_BC1_RGBA_SRGB_BLOCK}, // BC1_RGBA_SRGB |
| 183 | {VK_FORMAT_BC2_SRGB_BLOCK}, // DXT23_SRGB | 183 | {VK_FORMAT_BC2_SRGB_BLOCK}, // BC2_SRGB |
| 184 | {VK_FORMAT_BC3_SRGB_BLOCK}, // DXT45_SRGB | 184 | {VK_FORMAT_BC3_SRGB_BLOCK}, // BC3_SRGB |
| 185 | {VK_FORMAT_BC7_SRGB_BLOCK}, // BC7U_SRGB | 185 | {VK_FORMAT_BC7_SRGB_BLOCK}, // BC7_SRGB |
| 186 | {VK_FORMAT_R4G4B4A4_UNORM_PACK16, Attachable}, // R4G4B4A4U | 186 | {VK_FORMAT_R4G4B4A4_UNORM_PACK16, Attachable}, // A4B4G4R4_UNORM |
| 187 | {VK_FORMAT_ASTC_4x4_SRGB_BLOCK}, // ASTC_2D_4X4_SRGB | 187 | {VK_FORMAT_ASTC_4x4_SRGB_BLOCK}, // ASTC_2D_4X4_SRGB |
| 188 | {VK_FORMAT_ASTC_8x8_SRGB_BLOCK}, // ASTC_2D_8X8_SRGB | 188 | {VK_FORMAT_ASTC_8x8_SRGB_BLOCK}, // ASTC_2D_8X8_SRGB |
| 189 | {VK_FORMAT_ASTC_8x5_SRGB_BLOCK}, // ASTC_2D_8X5_SRGB | 189 | {VK_FORMAT_ASTC_8x5_SRGB_BLOCK}, // ASTC_2D_8X5_SRGB |
| 190 | {VK_FORMAT_ASTC_5x4_SRGB_BLOCK}, // ASTC_2D_5X4_SRGB | 190 | {VK_FORMAT_ASTC_5x4_SRGB_BLOCK}, // ASTC_2D_5X4_SRGB |
| 191 | {VK_FORMAT_ASTC_5x5_UNORM_BLOCK}, // ASTC_2D_5X5 | 191 | {VK_FORMAT_ASTC_5x5_UNORM_BLOCK}, // ASTC_2D_5X5_UNORM |
| 192 | {VK_FORMAT_ASTC_5x5_SRGB_BLOCK}, // ASTC_2D_5X5_SRGB | 192 | {VK_FORMAT_ASTC_5x5_SRGB_BLOCK}, // ASTC_2D_5X5_SRGB |
| 193 | {VK_FORMAT_ASTC_10x8_UNORM_BLOCK}, // ASTC_2D_10X8 | 193 | {VK_FORMAT_ASTC_10x8_UNORM_BLOCK}, // ASTC_2D_10X8_UNORM |
| 194 | {VK_FORMAT_ASTC_10x8_SRGB_BLOCK}, // ASTC_2D_10X8_SRGB | 194 | {VK_FORMAT_ASTC_10x8_SRGB_BLOCK}, // ASTC_2D_10X8_SRGB |
| 195 | {VK_FORMAT_ASTC_6x6_UNORM_BLOCK}, // ASTC_2D_6X6 | 195 | {VK_FORMAT_ASTC_6x6_UNORM_BLOCK}, // ASTC_2D_6X6_UNORM |
| 196 | {VK_FORMAT_ASTC_6x6_SRGB_BLOCK}, // ASTC_2D_6X6_SRGB | 196 | {VK_FORMAT_ASTC_6x6_SRGB_BLOCK}, // ASTC_2D_6X6_SRGB |
| 197 | {VK_FORMAT_ASTC_10x10_UNORM_BLOCK}, // ASTC_2D_10X10 | 197 | {VK_FORMAT_ASTC_10x10_UNORM_BLOCK}, // ASTC_2D_10X10_UNORM |
| 198 | {VK_FORMAT_ASTC_10x10_SRGB_BLOCK}, // ASTC_2D_10X10_SRGB | 198 | {VK_FORMAT_ASTC_10x10_SRGB_BLOCK}, // ASTC_2D_10X10_SRGB |
| 199 | {VK_FORMAT_ASTC_12x12_UNORM_BLOCK}, // ASTC_2D_12X12 | 199 | {VK_FORMAT_ASTC_12x12_UNORM_BLOCK}, // ASTC_2D_12X12_UNORM |
| 200 | {VK_FORMAT_ASTC_12x12_SRGB_BLOCK}, // ASTC_2D_12X12_SRGB | 200 | {VK_FORMAT_ASTC_12x12_SRGB_BLOCK}, // ASTC_2D_12X12_SRGB |
| 201 | {VK_FORMAT_ASTC_8x6_UNORM_BLOCK}, // ASTC_2D_8X6 | 201 | {VK_FORMAT_ASTC_8x6_UNORM_BLOCK}, // ASTC_2D_8X6_UNORM |
| 202 | {VK_FORMAT_ASTC_8x6_SRGB_BLOCK}, // ASTC_2D_8X6_SRGB | 202 | {VK_FORMAT_ASTC_8x6_SRGB_BLOCK}, // ASTC_2D_8X6_SRGB |
| 203 | {VK_FORMAT_ASTC_6x5_UNORM_BLOCK}, // ASTC_2D_6X5 | 203 | {VK_FORMAT_ASTC_6x5_UNORM_BLOCK}, // ASTC_2D_6X5_UNORM |
| 204 | {VK_FORMAT_ASTC_6x5_SRGB_BLOCK}, // ASTC_2D_6X5_SRGB | 204 | {VK_FORMAT_ASTC_6x5_SRGB_BLOCK}, // ASTC_2D_6X5_SRGB |
| 205 | {VK_FORMAT_E5B9G9R9_UFLOAT_PACK32}, // E5B9G9R9F | 205 | {VK_FORMAT_E5B9G9R9_UFLOAT_PACK32}, // E5B9G9R9_FLOAT |
| 206 | 206 | ||
| 207 | // Depth formats | 207 | // Depth formats |
| 208 | {VK_FORMAT_D32_SFLOAT, Attachable}, // Z32F | 208 | {VK_FORMAT_D32_SFLOAT, Attachable}, // D32_FLOAT |
| 209 | {VK_FORMAT_D16_UNORM, Attachable}, // Z16 | 209 | {VK_FORMAT_D16_UNORM, Attachable}, // D16_UNORM |
| 210 | 210 | ||
| 211 | // DepthStencil formats | 211 | // DepthStencil formats |
| 212 | {VK_FORMAT_D24_UNORM_S8_UINT, Attachable}, // Z24S8 | 212 | {VK_FORMAT_D24_UNORM_S8_UINT, Attachable}, // D24_UNORM_S8_UINT |
| 213 | {VK_FORMAT_D24_UNORM_S8_UINT, Attachable}, // S8Z24 (emulated) | 213 | {VK_FORMAT_D24_UNORM_S8_UINT, Attachable}, // S8_UINT_D24_UNORM (emulated) |
| 214 | {VK_FORMAT_D32_SFLOAT_S8_UINT, Attachable}, // Z32FS8 | 214 | {VK_FORMAT_D32_SFLOAT_S8_UINT, Attachable}, // D32_FLOAT_S8_UINT |
| 215 | }; | 215 | }; |
| 216 | static_assert(std::size(tex_format_tuples) == VideoCore::Surface::MaxPixelFormat); | 216 | static_assert(std::size(tex_format_tuples) == VideoCore::Surface::MaxPixelFormat); |
| 217 | 217 | ||
| @@ -232,7 +232,7 @@ FormatInfo SurfaceFormat(const VKDevice& device, FormatType format_type, PixelFo | |||
| 232 | return {VK_FORMAT_A8B8G8R8_UNORM_PACK32, true, true}; | 232 | return {VK_FORMAT_A8B8G8R8_UNORM_PACK32, true, true}; |
| 233 | } | 233 | } |
| 234 | 234 | ||
| 235 | // Use ABGR8 on hardware that doesn't support ASTC natively | 235 | // Use A8B8G8R8_UNORM on hardware that doesn't support ASTC natively |
| 236 | if (!device.IsOptimalAstcSupported() && VideoCore::Surface::IsPixelFormatASTC(pixel_format)) { | 236 | if (!device.IsOptimalAstcSupported() && VideoCore::Surface::IsPixelFormatASTC(pixel_format)) { |
| 237 | tuple.format = VideoCore::Surface::IsPixelFormatSRGB(pixel_format) | 237 | tuple.format = VideoCore::Surface::IsPixelFormatSRGB(pixel_format) |
| 238 | ? VK_FORMAT_A8B8G8R8_SRGB_PACK32 | 238 | ? VK_FORMAT_A8B8G8R8_SRGB_PACK32 |
diff --git a/src/video_core/renderer_vulkan/vk_blit_screen.cpp b/src/video_core/renderer_vulkan/vk_blit_screen.cpp index fbd406f2b..be175a829 100644 --- a/src/video_core/renderer_vulkan/vk_blit_screen.cpp +++ b/src/video_core/renderer_vulkan/vk_blit_screen.cpp | |||
| @@ -183,9 +183,9 @@ std::size_t GetSizeInBytes(const Tegra::FramebufferConfig& framebuffer) { | |||
| 183 | 183 | ||
| 184 | VkFormat GetFormat(const Tegra::FramebufferConfig& framebuffer) { | 184 | VkFormat GetFormat(const Tegra::FramebufferConfig& framebuffer) { |
| 185 | switch (framebuffer.pixel_format) { | 185 | switch (framebuffer.pixel_format) { |
| 186 | case Tegra::FramebufferConfig::PixelFormat::ABGR8: | 186 | case Tegra::FramebufferConfig::PixelFormat::A8B8G8R8_UNORM: |
| 187 | return VK_FORMAT_A8B8G8R8_UNORM_PACK32; | 187 | return VK_FORMAT_A8B8G8R8_UNORM_PACK32; |
| 188 | case Tegra::FramebufferConfig::PixelFormat::RGB565: | 188 | case Tegra::FramebufferConfig::PixelFormat::RGB565_UNORM: |
| 189 | return VK_FORMAT_R5G6B5_UNORM_PACK16; | 189 | return VK_FORMAT_R5G6B5_UNORM_PACK16; |
| 190 | default: | 190 | default: |
| 191 | UNIMPLEMENTED_MSG("Unknown framebuffer pixel format: {}", | 191 | UNIMPLEMENTED_MSG("Unknown framebuffer pixel format: {}", |
diff --git a/src/video_core/renderer_vulkan/vk_texture_cache.cpp b/src/video_core/renderer_vulkan/vk_texture_cache.cpp index 430031665..29c53a5fa 100644 --- a/src/video_core/renderer_vulkan/vk_texture_cache.cpp +++ b/src/video_core/renderer_vulkan/vk_texture_cache.cpp | |||
| @@ -233,7 +233,7 @@ void CachedSurface::UploadTexture(const std::vector<u8>& staging_buffer) { | |||
| 233 | void CachedSurface::DownloadTexture(std::vector<u8>& staging_buffer) { | 233 | void CachedSurface::DownloadTexture(std::vector<u8>& staging_buffer) { |
| 234 | UNIMPLEMENTED_IF(params.IsBuffer()); | 234 | UNIMPLEMENTED_IF(params.IsBuffer()); |
| 235 | 235 | ||
| 236 | if (params.pixel_format == VideoCore::Surface::PixelFormat::A1B5G5R5U) { | 236 | if (params.pixel_format == VideoCore::Surface::PixelFormat::A1B5G5R5_UNORM) { |
| 237 | LOG_WARNING(Render_Vulkan, "A1B5G5R5 flushing is stubbed"); | 237 | LOG_WARNING(Render_Vulkan, "A1B5G5R5 flushing is stubbed"); |
| 238 | } | 238 | } |
| 239 | 239 | ||
| @@ -382,7 +382,7 @@ VkImageView CachedSurfaceView::GetImageView(SwizzleSource x_source, SwizzleSourc | |||
| 382 | 382 | ||
| 383 | std::array swizzle{MaxwellToVK::SwizzleSource(x_source), MaxwellToVK::SwizzleSource(y_source), | 383 | std::array swizzle{MaxwellToVK::SwizzleSource(x_source), MaxwellToVK::SwizzleSource(y_source), |
| 384 | MaxwellToVK::SwizzleSource(z_source), MaxwellToVK::SwizzleSource(w_source)}; | 384 | MaxwellToVK::SwizzleSource(z_source), MaxwellToVK::SwizzleSource(w_source)}; |
| 385 | if (params.pixel_format == VideoCore::Surface::PixelFormat::A1B5G5R5U) { | 385 | if (params.pixel_format == VideoCore::Surface::PixelFormat::A1B5G5R5_UNORM) { |
| 386 | // A1B5G5R5 is implemented as A1R5G5B5, we have to change the swizzle here. | 386 | // A1B5G5R5 is implemented as A1R5G5B5, we have to change the swizzle here. |
| 387 | std::swap(swizzle[0], swizzle[2]); | 387 | std::swap(swizzle[0], swizzle[2]); |
| 388 | } | 388 | } |
| @@ -394,11 +394,11 @@ VkImageView CachedSurfaceView::GetImageView(SwizzleSource x_source, SwizzleSourc | |||
| 394 | UNIMPLEMENTED_IF(x_source != SwizzleSource::R && x_source != SwizzleSource::G); | 394 | UNIMPLEMENTED_IF(x_source != SwizzleSource::R && x_source != SwizzleSource::G); |
| 395 | const bool is_first = x_source == SwizzleSource::R; | 395 | const bool is_first = x_source == SwizzleSource::R; |
| 396 | switch (params.pixel_format) { | 396 | switch (params.pixel_format) { |
| 397 | case VideoCore::Surface::PixelFormat::Z24S8: | 397 | case VideoCore::Surface::PixelFormat::D24_UNORM_S8_UINT: |
| 398 | case VideoCore::Surface::PixelFormat::Z32FS8: | 398 | case VideoCore::Surface::PixelFormat::D32_FLOAT_S8_UINT: |
| 399 | aspect = is_first ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_STENCIL_BIT; | 399 | aspect = is_first ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_STENCIL_BIT; |
| 400 | break; | 400 | break; |
| 401 | case VideoCore::Surface::PixelFormat::S8Z24: | 401 | case VideoCore::Surface::PixelFormat::S8_UINT_D24_UNORM: |
| 402 | aspect = is_first ? VK_IMAGE_ASPECT_STENCIL_BIT : VK_IMAGE_ASPECT_DEPTH_BIT; | 402 | aspect = is_first ? VK_IMAGE_ASPECT_STENCIL_BIT : VK_IMAGE_ASPECT_DEPTH_BIT; |
| 403 | break; | 403 | break; |
| 404 | default: | 404 | default: |