diff options
| author | 2020-07-21 00:13:04 -0400 | |
|---|---|---|
| committer | 2020-07-21 00:13:04 -0400 | |
| commit | 3d13d7f48f3bf7c1fb7b7673e963aa3e54db2838 (patch) | |
| tree | 3214c882ce18526a3e57223ae96f06e7083369f2 /src/video_core/renderer_vulkan | |
| parent | Merge pull request #4168 from ReinUsesLisp/global-memory (diff) | |
| parent | video_core: Rearrange pixel format names (diff) | |
| download | yuzu-3d13d7f48f3bf7c1fb7b7673e963aa3e54db2838.tar.gz yuzu-3d13d7f48f3bf7c1fb7b7673e963aa3e54db2838.tar.xz yuzu-3d13d7f48f3bf7c1fb7b7673e963aa3e54db2838.zip | |
Merge pull request #4324 from ReinUsesLisp/formats
video_core: Fix, add and rename pixel formats
Diffstat (limited to 'src/video_core/renderer_vulkan')
| -rw-r--r-- | src/video_core/renderer_vulkan/maxwell_to_vk.cpp | 173 | ||||
| -rw-r--r-- | src/video_core/renderer_vulkan/vk_blit_screen.cpp | 4 | ||||
| -rw-r--r-- | src/video_core/renderer_vulkan/vk_device.cpp | 9 | ||||
| -rw-r--r-- | src/video_core/renderer_vulkan/vk_texture_cache.cpp | 10 |
4 files changed, 108 insertions, 88 deletions
diff --git a/src/video_core/renderer_vulkan/maxwell_to_vk.cpp b/src/video_core/renderer_vulkan/maxwell_to_vk.cpp index d7f1ae89f..f8c77f4fa 100644 --- a/src/video_core/renderer_vulkan/maxwell_to_vk.cpp +++ b/src/video_core/renderer_vulkan/maxwell_to_vk.cpp | |||
| @@ -117,90 +117,101 @@ struct FormatTuple { | |||
| 117 | VkFormat format; ///< Vulkan format | 117 | VkFormat format; ///< Vulkan format |
| 118 | int usage = 0; ///< Describes image format usage | 118 | int usage = 0; ///< Describes image format usage |
| 119 | } constexpr tex_format_tuples[] = { | 119 | } constexpr tex_format_tuples[] = { |
| 120 | {VK_FORMAT_A8B8G8R8_UNORM_PACK32, Attachable | Storage}, // ABGR8U | 120 | {VK_FORMAT_A8B8G8R8_UNORM_PACK32, Attachable | Storage}, // A8B8G8R8_UNORM |
| 121 | {VK_FORMAT_A8B8G8R8_SNORM_PACK32, Attachable | Storage}, // ABGR8S | 121 | {VK_FORMAT_A8B8G8R8_SNORM_PACK32, Attachable | Storage}, // A8B8G8R8_SNORM |
| 122 | {VK_FORMAT_A8B8G8R8_UINT_PACK32, Attachable | Storage}, // ABGR8UI | 122 | {VK_FORMAT_A8B8G8R8_SINT_PACK32, Attachable | Storage}, // A8B8G8R8_SINT |
| 123 | {VK_FORMAT_B5G6R5_UNORM_PACK16}, // B5G6R5U | 123 | {VK_FORMAT_A8B8G8R8_UINT_PACK32, Attachable | Storage}, // A8B8G8R8_UINT |
| 124 | {VK_FORMAT_A2B10G10R10_UNORM_PACK32, Attachable | Storage}, // A2B10G10R10U | 124 | {VK_FORMAT_R5G6B5_UNORM_PACK16, Attachable}, // R5G6B5_UNORM |
| 125 | {VK_FORMAT_A1R5G5B5_UNORM_PACK16, Attachable}, // A1B5G5R5U (flipped with swizzle) | 125 | {VK_FORMAT_B5G6R5_UNORM_PACK16, Attachable}, // B5G6R5_UNORM |
| 126 | {VK_FORMAT_R8_UNORM, Attachable | Storage}, // R8U | 126 | {VK_FORMAT_A1R5G5B5_UNORM_PACK16, Attachable}, // A1R5G5B5_UNORM |
| 127 | {VK_FORMAT_R8_UINT, Attachable | Storage}, // R8UI | 127 | {VK_FORMAT_A2B10G10R10_UNORM_PACK32, Attachable | Storage}, // A2B10G10R10_UNORM |
| 128 | {VK_FORMAT_R16G16B16A16_SFLOAT, Attachable | Storage}, // RGBA16F | 128 | {VK_FORMAT_A2B10G10R10_UINT_PACK32, Attachable | Storage}, // A2B10G10R10_UINT |
| 129 | {VK_FORMAT_R16G16B16A16_UNORM, Attachable | Storage}, // RGBA16U | 129 | {VK_FORMAT_A1R5G5B5_UNORM_PACK16, Attachable}, // A1B5G5R5_UNORM (flipped with swizzle) |
| 130 | {VK_FORMAT_R16G16B16A16_SNORM, Attachable | Storage}, // RGBA16S | 130 | {VK_FORMAT_R8_UNORM, Attachable | Storage}, // R8_UNORM |
| 131 | {VK_FORMAT_R16G16B16A16_UINT, Attachable | Storage}, // RGBA16UI | 131 | {VK_FORMAT_R8_SNORM, Attachable | Storage}, // R8_SNORM |
| 132 | {VK_FORMAT_B10G11R11_UFLOAT_PACK32, Attachable | Storage}, // R11FG11FB10F | 132 | {VK_FORMAT_R8_SINT, Attachable | Storage}, // R8_SINT |
| 133 | {VK_FORMAT_R32G32B32A32_UINT, Attachable | Storage}, // RGBA32UI | 133 | {VK_FORMAT_R8_UINT, Attachable | Storage}, // R8_UINT |
| 134 | {VK_FORMAT_BC1_RGBA_UNORM_BLOCK}, // DXT1 | 134 | {VK_FORMAT_R16G16B16A16_SFLOAT, Attachable | Storage}, // R16G16B16A16_FLOAT |
| 135 | {VK_FORMAT_BC2_UNORM_BLOCK}, // DXT23 | 135 | {VK_FORMAT_R16G16B16A16_UNORM, Attachable | Storage}, // R16G16B16A16_UNORM |
| 136 | {VK_FORMAT_BC3_UNORM_BLOCK}, // DXT45 | 136 | {VK_FORMAT_R16G16B16A16_SNORM, Attachable | Storage}, // R16G16B16A16_SNORM |
| 137 | {VK_FORMAT_BC4_UNORM_BLOCK}, // DXN1 | 137 | {VK_FORMAT_R16G16B16A16_SINT, Attachable | Storage}, // R16G16B16A16_SINT |
| 138 | {VK_FORMAT_BC5_UNORM_BLOCK}, // DXN2UNORM | 138 | {VK_FORMAT_R16G16B16A16_UINT, Attachable | Storage}, // R16G16B16A16_UINT |
| 139 | {VK_FORMAT_BC5_SNORM_BLOCK}, // DXN2SNORM | 139 | {VK_FORMAT_B10G11R11_UFLOAT_PACK32, Attachable | Storage}, // B10G11R11_FLOAT |
| 140 | {VK_FORMAT_BC7_UNORM_BLOCK}, // BC7U | 140 | {VK_FORMAT_R32G32B32A32_UINT, Attachable | Storage}, // R32G32B32A32_UINT |
| 141 | {VK_FORMAT_BC6H_UFLOAT_BLOCK}, // BC6H_UF16 | 141 | {VK_FORMAT_BC1_RGBA_UNORM_BLOCK}, // BC1_RGBA_UNORM |
| 142 | {VK_FORMAT_BC6H_SFLOAT_BLOCK}, // BC6H_SF16 | 142 | {VK_FORMAT_BC2_UNORM_BLOCK}, // BC2_UNORM |
| 143 | {VK_FORMAT_ASTC_4x4_UNORM_BLOCK}, // ASTC_2D_4X4 | 143 | {VK_FORMAT_BC3_UNORM_BLOCK}, // BC3_UNORM |
| 144 | {VK_FORMAT_B8G8R8A8_UNORM, Attachable}, // BGRA8 | 144 | {VK_FORMAT_BC4_UNORM_BLOCK}, // BC4_UNORM |
| 145 | {VK_FORMAT_R32G32B32A32_SFLOAT, Attachable | Storage}, // RGBA32F | 145 | {VK_FORMAT_BC4_SNORM_BLOCK}, // BC4_SNORM |
| 146 | {VK_FORMAT_R32G32_SFLOAT, Attachable | Storage}, // RG32F | 146 | {VK_FORMAT_BC5_UNORM_BLOCK}, // BC5_UNORM |
| 147 | {VK_FORMAT_R32_SFLOAT, Attachable | Storage}, // R32F | 147 | {VK_FORMAT_BC5_SNORM_BLOCK}, // BC5_SNORM |
| 148 | {VK_FORMAT_R16_SFLOAT, Attachable | Storage}, // R16F | 148 | {VK_FORMAT_BC7_UNORM_BLOCK}, // BC7_UNORM |
| 149 | {VK_FORMAT_R16_UNORM, Attachable | Storage}, // R16U | 149 | {VK_FORMAT_BC6H_UFLOAT_BLOCK}, // BC6H_UFLOAT |
| 150 | {VK_FORMAT_UNDEFINED}, // R16S | 150 | {VK_FORMAT_BC6H_SFLOAT_BLOCK}, // BC6H_SFLOAT |
| 151 | {VK_FORMAT_R16_UINT, Attachable | Storage}, // R16UI | 151 | {VK_FORMAT_ASTC_4x4_UNORM_BLOCK}, // ASTC_2D_4X4_UNORM |
| 152 | {VK_FORMAT_UNDEFINED}, // R16I | 152 | {VK_FORMAT_B8G8R8A8_UNORM, Attachable}, // B8G8R8A8_UNORM |
| 153 | {VK_FORMAT_R16G16_UNORM, Attachable | Storage}, // RG16 | 153 | {VK_FORMAT_R32G32B32A32_SFLOAT, Attachable | Storage}, // R32G32B32A32_FLOAT |
| 154 | {VK_FORMAT_R16G16_SFLOAT, Attachable | Storage}, // RG16F | 154 | {VK_FORMAT_R32G32B32A32_SINT, Attachable | Storage}, // R32G32B32A32_SINT |
| 155 | {VK_FORMAT_UNDEFINED}, // RG16UI | 155 | {VK_FORMAT_R32G32_SFLOAT, Attachable | Storage}, // R32G32_FLOAT |
| 156 | {VK_FORMAT_UNDEFINED}, // RG16I | 156 | {VK_FORMAT_R32G32_SINT, Attachable | Storage}, // R32G32_SINT |
| 157 | {VK_FORMAT_R16G16_SNORM, Attachable | Storage}, // RG16S | 157 | {VK_FORMAT_R32_SFLOAT, Attachable | Storage}, // R32_FLOAT |
| 158 | {VK_FORMAT_UNDEFINED}, // RGB32F | 158 | {VK_FORMAT_R16_SFLOAT, Attachable | Storage}, // R16_FLOAT |
| 159 | {VK_FORMAT_R8G8B8A8_SRGB, Attachable}, // RGBA8_SRGB | 159 | {VK_FORMAT_R16_UNORM, Attachable | Storage}, // R16_UNORM |
| 160 | {VK_FORMAT_R8G8_UNORM, Attachable | Storage}, // RG8U | 160 | {VK_FORMAT_UNDEFINED}, // R16_SNORM |
| 161 | {VK_FORMAT_R8G8_SNORM, Attachable | Storage}, // RG8S | 161 | {VK_FORMAT_R16_UINT, Attachable | Storage}, // R16_UINT |
| 162 | {VK_FORMAT_R8G8_UINT, Attachable | Storage}, // RG8UI | 162 | {VK_FORMAT_UNDEFINED}, // R16_SINT |
| 163 | {VK_FORMAT_R32G32_UINT, Attachable | Storage}, // RG32UI | 163 | {VK_FORMAT_R16G16_UNORM, Attachable | Storage}, // R16G16_UNORM |
| 164 | {VK_FORMAT_UNDEFINED}, // RGBX16F | 164 | {VK_FORMAT_R16G16_SFLOAT, Attachable | Storage}, // R16G16_FLOAT |
| 165 | {VK_FORMAT_R32_UINT, Attachable | Storage}, // R32UI | 165 | {VK_FORMAT_UNDEFINED}, // R16G16_UINT |
| 166 | {VK_FORMAT_R32_SINT, Attachable | Storage}, // R32I | 166 | {VK_FORMAT_UNDEFINED}, // R16G16_SINT |
| 167 | {VK_FORMAT_ASTC_8x8_UNORM_BLOCK}, // ASTC_2D_8X8 | 167 | {VK_FORMAT_R16G16_SNORM, Attachable | Storage}, // R16G16_SNORM |
| 168 | {VK_FORMAT_UNDEFINED}, // ASTC_2D_8X5 | 168 | {VK_FORMAT_UNDEFINED}, // R32G32B32_FLOAT |
| 169 | {VK_FORMAT_UNDEFINED}, // ASTC_2D_5X4 | 169 | {VK_FORMAT_R8G8B8A8_SRGB, Attachable}, // A8B8G8R8_SRGB |
| 170 | {VK_FORMAT_B8G8R8A8_SRGB, Attachable}, // BGRA8_SRGB | 170 | {VK_FORMAT_R8G8_UNORM, Attachable | Storage}, // R8G8_UNORM |
| 171 | {VK_FORMAT_BC1_RGBA_SRGB_BLOCK}, // DXT1_SRGB | 171 | {VK_FORMAT_R8G8_SNORM, Attachable | Storage}, // R8G8_SNORM |
| 172 | {VK_FORMAT_BC2_SRGB_BLOCK}, // DXT23_SRGB | 172 | {VK_FORMAT_R8G8_SINT, Attachable | Storage}, // R8G8_SINT |
| 173 | {VK_FORMAT_BC3_SRGB_BLOCK}, // DXT45_SRGB | 173 | {VK_FORMAT_R8G8_UINT, Attachable | Storage}, // R8G8_UINT |
| 174 | {VK_FORMAT_BC7_SRGB_BLOCK}, // BC7U_SRGB | 174 | {VK_FORMAT_R32G32_UINT, Attachable | Storage}, // R32G32_UINT |
| 175 | {VK_FORMAT_R4G4B4A4_UNORM_PACK16, Attachable}, // R4G4B4A4U | 175 | {VK_FORMAT_UNDEFINED}, // R16G16B16X16_FLOAT |
| 176 | {VK_FORMAT_ASTC_4x4_SRGB_BLOCK}, // ASTC_2D_4X4_SRGB | 176 | {VK_FORMAT_R32_UINT, Attachable | Storage}, // R32_UINT |
| 177 | {VK_FORMAT_ASTC_8x8_SRGB_BLOCK}, // ASTC_2D_8X8_SRGB | 177 | {VK_FORMAT_R32_SINT, Attachable | Storage}, // R32_SINT |
| 178 | {VK_FORMAT_ASTC_8x5_SRGB_BLOCK}, // ASTC_2D_8X5_SRGB | 178 | {VK_FORMAT_ASTC_8x8_UNORM_BLOCK}, // ASTC_2D_8X8_UNORM |
| 179 | {VK_FORMAT_ASTC_5x4_SRGB_BLOCK}, // ASTC_2D_5X4_SRGB | 179 | {VK_FORMAT_UNDEFINED}, // ASTC_2D_8X5_UNORM |
| 180 | {VK_FORMAT_ASTC_5x5_UNORM_BLOCK}, // ASTC_2D_5X5 | 180 | {VK_FORMAT_UNDEFINED}, // ASTC_2D_5X4_UNORM |
| 181 | {VK_FORMAT_ASTC_5x5_SRGB_BLOCK}, // ASTC_2D_5X5_SRGB | 181 | {VK_FORMAT_B8G8R8A8_SRGB, Attachable}, // B8G8R8A8_SRGB |
| 182 | {VK_FORMAT_ASTC_10x8_UNORM_BLOCK}, // ASTC_2D_10X8 | 182 | {VK_FORMAT_BC1_RGBA_SRGB_BLOCK}, // BC1_RGBA_SRGB |
| 183 | {VK_FORMAT_ASTC_10x8_SRGB_BLOCK}, // ASTC_2D_10X8_SRGB | 183 | {VK_FORMAT_BC2_SRGB_BLOCK}, // BC2_SRGB |
| 184 | {VK_FORMAT_ASTC_6x6_UNORM_BLOCK}, // ASTC_2D_6X6 | 184 | {VK_FORMAT_BC3_SRGB_BLOCK}, // BC3_SRGB |
| 185 | {VK_FORMAT_ASTC_6x6_SRGB_BLOCK}, // ASTC_2D_6X6_SRGB | 185 | {VK_FORMAT_BC7_SRGB_BLOCK}, // BC7_SRGB |
| 186 | {VK_FORMAT_ASTC_10x10_UNORM_BLOCK}, // ASTC_2D_10X10 | 186 | {VK_FORMAT_R4G4B4A4_UNORM_PACK16, Attachable}, // A4B4G4R4_UNORM |
| 187 | {VK_FORMAT_ASTC_10x10_SRGB_BLOCK}, // ASTC_2D_10X10_SRGB | 187 | {VK_FORMAT_ASTC_4x4_SRGB_BLOCK}, // ASTC_2D_4X4_SRGB |
| 188 | {VK_FORMAT_ASTC_12x12_UNORM_BLOCK}, // ASTC_2D_12X12 | 188 | {VK_FORMAT_ASTC_8x8_SRGB_BLOCK}, // ASTC_2D_8X8_SRGB |
| 189 | {VK_FORMAT_ASTC_12x12_SRGB_BLOCK}, // ASTC_2D_12X12_SRGB | 189 | {VK_FORMAT_ASTC_8x5_SRGB_BLOCK}, // ASTC_2D_8X5_SRGB |
| 190 | {VK_FORMAT_ASTC_8x6_UNORM_BLOCK}, // ASTC_2D_8X6 | 190 | {VK_FORMAT_ASTC_5x4_SRGB_BLOCK}, // ASTC_2D_5X4_SRGB |
| 191 | {VK_FORMAT_ASTC_8x6_SRGB_BLOCK}, // ASTC_2D_8X6_SRGB | 191 | {VK_FORMAT_ASTC_5x5_UNORM_BLOCK}, // ASTC_2D_5X5_UNORM |
| 192 | {VK_FORMAT_ASTC_6x5_UNORM_BLOCK}, // ASTC_2D_6X5 | 192 | {VK_FORMAT_ASTC_5x5_SRGB_BLOCK}, // ASTC_2D_5X5_SRGB |
| 193 | {VK_FORMAT_ASTC_6x5_SRGB_BLOCK}, // ASTC_2D_6X5_SRGB | 193 | {VK_FORMAT_ASTC_10x8_UNORM_BLOCK}, // ASTC_2D_10X8_UNORM |
| 194 | {VK_FORMAT_E5B9G9R9_UFLOAT_PACK32}, // E5B9G9R9F | 194 | {VK_FORMAT_ASTC_10x8_SRGB_BLOCK}, // ASTC_2D_10X8_SRGB |
| 195 | {VK_FORMAT_ASTC_6x6_UNORM_BLOCK}, // ASTC_2D_6X6_UNORM | ||
| 196 | {VK_FORMAT_ASTC_6x6_SRGB_BLOCK}, // ASTC_2D_6X6_SRGB | ||
| 197 | {VK_FORMAT_ASTC_10x10_UNORM_BLOCK}, // ASTC_2D_10X10_UNORM | ||
| 198 | {VK_FORMAT_ASTC_10x10_SRGB_BLOCK}, // ASTC_2D_10X10_SRGB | ||
| 199 | {VK_FORMAT_ASTC_12x12_UNORM_BLOCK}, // ASTC_2D_12X12_UNORM | ||
| 200 | {VK_FORMAT_ASTC_12x12_SRGB_BLOCK}, // ASTC_2D_12X12_SRGB | ||
| 201 | {VK_FORMAT_ASTC_8x6_UNORM_BLOCK}, // ASTC_2D_8X6_UNORM | ||
| 202 | {VK_FORMAT_ASTC_8x6_SRGB_BLOCK}, // ASTC_2D_8X6_SRGB | ||
| 203 | {VK_FORMAT_ASTC_6x5_UNORM_BLOCK}, // ASTC_2D_6X5_UNORM | ||
| 204 | {VK_FORMAT_ASTC_6x5_SRGB_BLOCK}, // ASTC_2D_6X5_SRGB | ||
| 205 | {VK_FORMAT_E5B9G9R9_UFLOAT_PACK32}, // E5B9G9R9_FLOAT | ||
| 195 | 206 | ||
| 196 | // Depth formats | 207 | // Depth formats |
| 197 | {VK_FORMAT_D32_SFLOAT, Attachable}, // Z32F | 208 | {VK_FORMAT_D32_SFLOAT, Attachable}, // D32_FLOAT |
| 198 | {VK_FORMAT_D16_UNORM, Attachable}, // Z16 | 209 | {VK_FORMAT_D16_UNORM, Attachable}, // D16_UNORM |
| 199 | 210 | ||
| 200 | // DepthStencil formats | 211 | // DepthStencil formats |
| 201 | {VK_FORMAT_D24_UNORM_S8_UINT, Attachable}, // Z24S8 | 212 | {VK_FORMAT_D24_UNORM_S8_UINT, Attachable}, // D24_UNORM_S8_UINT |
| 202 | {VK_FORMAT_D24_UNORM_S8_UINT, Attachable}, // S8Z24 (emulated) | 213 | {VK_FORMAT_D24_UNORM_S8_UINT, Attachable}, // S8_UINT_D24_UNORM (emulated) |
| 203 | {VK_FORMAT_D32_SFLOAT_S8_UINT, Attachable}, // Z32FS8 | 214 | {VK_FORMAT_D32_SFLOAT_S8_UINT, Attachable}, // D32_FLOAT_S8_UINT |
| 204 | }; | 215 | }; |
| 205 | static_assert(std::size(tex_format_tuples) == VideoCore::Surface::MaxPixelFormat); | 216 | static_assert(std::size(tex_format_tuples) == VideoCore::Surface::MaxPixelFormat); |
| 206 | 217 | ||
| @@ -221,7 +232,7 @@ FormatInfo SurfaceFormat(const VKDevice& device, FormatType format_type, PixelFo | |||
| 221 | return {VK_FORMAT_A8B8G8R8_UNORM_PACK32, true, true}; | 232 | return {VK_FORMAT_A8B8G8R8_UNORM_PACK32, true, true}; |
| 222 | } | 233 | } |
| 223 | 234 | ||
| 224 | // Use ABGR8 on hardware that doesn't support ASTC natively | 235 | // Use A8B8G8R8_UNORM on hardware that doesn't support ASTC natively |
| 225 | if (!device.IsOptimalAstcSupported() && VideoCore::Surface::IsPixelFormatASTC(pixel_format)) { | 236 | if (!device.IsOptimalAstcSupported() && VideoCore::Surface::IsPixelFormatASTC(pixel_format)) { |
| 226 | tuple.format = VideoCore::Surface::IsPixelFormatSRGB(pixel_format) | 237 | tuple.format = VideoCore::Surface::IsPixelFormatSRGB(pixel_format) |
| 227 | ? VK_FORMAT_A8B8G8R8_SRGB_PACK32 | 238 | ? VK_FORMAT_A8B8G8R8_SRGB_PACK32 |
diff --git a/src/video_core/renderer_vulkan/vk_blit_screen.cpp b/src/video_core/renderer_vulkan/vk_blit_screen.cpp index 866813465..ce53e5a6b 100644 --- a/src/video_core/renderer_vulkan/vk_blit_screen.cpp +++ b/src/video_core/renderer_vulkan/vk_blit_screen.cpp | |||
| @@ -187,9 +187,9 @@ std::size_t GetSizeInBytes(const Tegra::FramebufferConfig& framebuffer) { | |||
| 187 | 187 | ||
| 188 | VkFormat GetFormat(const Tegra::FramebufferConfig& framebuffer) { | 188 | VkFormat GetFormat(const Tegra::FramebufferConfig& framebuffer) { |
| 189 | switch (framebuffer.pixel_format) { | 189 | switch (framebuffer.pixel_format) { |
| 190 | case Tegra::FramebufferConfig::PixelFormat::ABGR8: | 190 | case Tegra::FramebufferConfig::PixelFormat::A8B8G8R8_UNORM: |
| 191 | return VK_FORMAT_A8B8G8R8_UNORM_PACK32; | 191 | return VK_FORMAT_A8B8G8R8_UNORM_PACK32; |
| 192 | case Tegra::FramebufferConfig::PixelFormat::RGB565: | 192 | case Tegra::FramebufferConfig::PixelFormat::RGB565_UNORM: |
| 193 | return VK_FORMAT_R5G6B5_UNORM_PACK16; | 193 | return VK_FORMAT_R5G6B5_UNORM_PACK16; |
| 194 | default: | 194 | default: |
| 195 | UNIMPLEMENTED_MSG("Unknown framebuffer pixel format: {}", | 195 | UNIMPLEMENTED_MSG("Unknown framebuffer pixel format: {}", |
diff --git a/src/video_core/renderer_vulkan/vk_device.cpp b/src/video_core/renderer_vulkan/vk_device.cpp index 26379ee01..6245e0d78 100644 --- a/src/video_core/renderer_vulkan/vk_device.cpp +++ b/src/video_core/renderer_vulkan/vk_device.cpp | |||
| @@ -84,14 +84,19 @@ std::unordered_map<VkFormat, VkFormatProperties> GetFormatProperties( | |||
| 84 | VK_FORMAT_A8B8G8R8_UNORM_PACK32, | 84 | VK_FORMAT_A8B8G8R8_UNORM_PACK32, |
| 85 | VK_FORMAT_A8B8G8R8_UINT_PACK32, | 85 | VK_FORMAT_A8B8G8R8_UINT_PACK32, |
| 86 | VK_FORMAT_A8B8G8R8_SNORM_PACK32, | 86 | VK_FORMAT_A8B8G8R8_SNORM_PACK32, |
| 87 | VK_FORMAT_A8B8G8R8_SINT_PACK32, | ||
| 87 | VK_FORMAT_A8B8G8R8_SRGB_PACK32, | 88 | VK_FORMAT_A8B8G8R8_SRGB_PACK32, |
| 88 | VK_FORMAT_B5G6R5_UNORM_PACK16, | 89 | VK_FORMAT_B5G6R5_UNORM_PACK16, |
| 89 | VK_FORMAT_A2B10G10R10_UNORM_PACK32, | 90 | VK_FORMAT_A2B10G10R10_UNORM_PACK32, |
| 91 | VK_FORMAT_A2B10G10R10_UINT_PACK32, | ||
| 90 | VK_FORMAT_A1R5G5B5_UNORM_PACK16, | 92 | VK_FORMAT_A1R5G5B5_UNORM_PACK16, |
| 91 | VK_FORMAT_R32G32B32A32_SFLOAT, | 93 | VK_FORMAT_R32G32B32A32_SFLOAT, |
| 94 | VK_FORMAT_R32G32B32A32_SINT, | ||
| 92 | VK_FORMAT_R32G32B32A32_UINT, | 95 | VK_FORMAT_R32G32B32A32_UINT, |
| 93 | VK_FORMAT_R32G32_SFLOAT, | 96 | VK_FORMAT_R32G32_SFLOAT, |
| 97 | VK_FORMAT_R32G32_SINT, | ||
| 94 | VK_FORMAT_R32G32_UINT, | 98 | VK_FORMAT_R32G32_UINT, |
| 99 | VK_FORMAT_R16G16B16A16_SINT, | ||
| 95 | VK_FORMAT_R16G16B16A16_UINT, | 100 | VK_FORMAT_R16G16B16A16_UINT, |
| 96 | VK_FORMAT_R16G16B16A16_SNORM, | 101 | VK_FORMAT_R16G16B16A16_SNORM, |
| 97 | VK_FORMAT_R16G16B16A16_UNORM, | 102 | VK_FORMAT_R16G16B16A16_UNORM, |
| @@ -103,8 +108,11 @@ std::unordered_map<VkFormat, VkFormatProperties> GetFormatProperties( | |||
| 103 | VK_FORMAT_R8G8B8A8_SRGB, | 108 | VK_FORMAT_R8G8B8A8_SRGB, |
| 104 | VK_FORMAT_R8G8_UNORM, | 109 | VK_FORMAT_R8G8_UNORM, |
| 105 | VK_FORMAT_R8G8_SNORM, | 110 | VK_FORMAT_R8G8_SNORM, |
| 111 | VK_FORMAT_R8G8_SINT, | ||
| 106 | VK_FORMAT_R8G8_UINT, | 112 | VK_FORMAT_R8G8_UINT, |
| 107 | VK_FORMAT_R8_UNORM, | 113 | VK_FORMAT_R8_UNORM, |
| 114 | VK_FORMAT_R8_SNORM, | ||
| 115 | VK_FORMAT_R8_SINT, | ||
| 108 | VK_FORMAT_R8_UINT, | 116 | VK_FORMAT_R8_UINT, |
| 109 | VK_FORMAT_B10G11R11_UFLOAT_PACK32, | 117 | VK_FORMAT_B10G11R11_UFLOAT_PACK32, |
| 110 | VK_FORMAT_R32_SFLOAT, | 118 | VK_FORMAT_R32_SFLOAT, |
| @@ -124,6 +132,7 @@ std::unordered_map<VkFormat, VkFormatProperties> GetFormatProperties( | |||
| 124 | VK_FORMAT_BC2_UNORM_BLOCK, | 132 | VK_FORMAT_BC2_UNORM_BLOCK, |
| 125 | VK_FORMAT_BC3_UNORM_BLOCK, | 133 | VK_FORMAT_BC3_UNORM_BLOCK, |
| 126 | VK_FORMAT_BC4_UNORM_BLOCK, | 134 | VK_FORMAT_BC4_UNORM_BLOCK, |
| 135 | VK_FORMAT_BC4_SNORM_BLOCK, | ||
| 127 | VK_FORMAT_BC5_UNORM_BLOCK, | 136 | VK_FORMAT_BC5_UNORM_BLOCK, |
| 128 | VK_FORMAT_BC5_SNORM_BLOCK, | 137 | VK_FORMAT_BC5_SNORM_BLOCK, |
| 129 | VK_FORMAT_BC7_UNORM_BLOCK, | 138 | VK_FORMAT_BC7_UNORM_BLOCK, |
diff --git a/src/video_core/renderer_vulkan/vk_texture_cache.cpp b/src/video_core/renderer_vulkan/vk_texture_cache.cpp index 9bc18c21a..d102e6d27 100644 --- a/src/video_core/renderer_vulkan/vk_texture_cache.cpp +++ b/src/video_core/renderer_vulkan/vk_texture_cache.cpp | |||
| @@ -235,7 +235,7 @@ void CachedSurface::UploadTexture(const std::vector<u8>& staging_buffer) { | |||
| 235 | void CachedSurface::DownloadTexture(std::vector<u8>& staging_buffer) { | 235 | void CachedSurface::DownloadTexture(std::vector<u8>& staging_buffer) { |
| 236 | UNIMPLEMENTED_IF(params.IsBuffer()); | 236 | UNIMPLEMENTED_IF(params.IsBuffer()); |
| 237 | 237 | ||
| 238 | if (params.pixel_format == VideoCore::Surface::PixelFormat::A1B5G5R5U) { | 238 | if (params.pixel_format == VideoCore::Surface::PixelFormat::A1B5G5R5_UNORM) { |
| 239 | LOG_WARNING(Render_Vulkan, "A1B5G5R5 flushing is stubbed"); | 239 | LOG_WARNING(Render_Vulkan, "A1B5G5R5 flushing is stubbed"); |
| 240 | } | 240 | } |
| 241 | 241 | ||
| @@ -385,7 +385,7 @@ VkImageView CachedSurfaceView::GetImageView(SwizzleSource x_source, SwizzleSourc | |||
| 385 | 385 | ||
| 386 | std::array swizzle{MaxwellToVK::SwizzleSource(x_source), MaxwellToVK::SwizzleSource(y_source), | 386 | std::array swizzle{MaxwellToVK::SwizzleSource(x_source), MaxwellToVK::SwizzleSource(y_source), |
| 387 | MaxwellToVK::SwizzleSource(z_source), MaxwellToVK::SwizzleSource(w_source)}; | 387 | MaxwellToVK::SwizzleSource(z_source), MaxwellToVK::SwizzleSource(w_source)}; |
| 388 | if (params.pixel_format == VideoCore::Surface::PixelFormat::A1B5G5R5U) { | 388 | if (params.pixel_format == VideoCore::Surface::PixelFormat::A1B5G5R5_UNORM) { |
| 389 | // A1B5G5R5 is implemented as A1R5G5B5, we have to change the swizzle here. | 389 | // A1B5G5R5 is implemented as A1R5G5B5, we have to change the swizzle here. |
| 390 | std::swap(swizzle[0], swizzle[2]); | 390 | std::swap(swizzle[0], swizzle[2]); |
| 391 | } | 391 | } |
| @@ -397,11 +397,11 @@ VkImageView CachedSurfaceView::GetImageView(SwizzleSource x_source, SwizzleSourc | |||
| 397 | UNIMPLEMENTED_IF(x_source != SwizzleSource::R && x_source != SwizzleSource::G); | 397 | UNIMPLEMENTED_IF(x_source != SwizzleSource::R && x_source != SwizzleSource::G); |
| 398 | const bool is_first = x_source == SwizzleSource::R; | 398 | const bool is_first = x_source == SwizzleSource::R; |
| 399 | switch (params.pixel_format) { | 399 | switch (params.pixel_format) { |
| 400 | case VideoCore::Surface::PixelFormat::Z24S8: | 400 | case VideoCore::Surface::PixelFormat::D24_UNORM_S8_UINT: |
| 401 | case VideoCore::Surface::PixelFormat::Z32FS8: | 401 | case VideoCore::Surface::PixelFormat::D32_FLOAT_S8_UINT: |
| 402 | aspect = is_first ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_STENCIL_BIT; | 402 | aspect = is_first ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_STENCIL_BIT; |
| 403 | break; | 403 | break; |
| 404 | case VideoCore::Surface::PixelFormat::S8Z24: | 404 | case VideoCore::Surface::PixelFormat::S8_UINT_D24_UNORM: |
| 405 | aspect = is_first ? VK_IMAGE_ASPECT_STENCIL_BIT : VK_IMAGE_ASPECT_DEPTH_BIT; | 405 | aspect = is_first ? VK_IMAGE_ASPECT_STENCIL_BIT : VK_IMAGE_ASPECT_DEPTH_BIT; |
| 406 | break; | 406 | break; |
| 407 | default: | 407 | default: |