diff options
Diffstat (limited to '')
| -rw-r--r-- | src/video_core/host_shaders/astc_decoder.comp | 21 |
1 files changed, 10 insertions, 11 deletions
diff --git a/src/video_core/host_shaders/astc_decoder.comp b/src/video_core/host_shaders/astc_decoder.comp index 70f147d80..a5d9c97b4 100644 --- a/src/video_core/host_shaders/astc_decoder.comp +++ b/src/video_core/host_shaders/astc_decoder.comp | |||
| @@ -54,13 +54,13 @@ const uint TRIT = 2u; | |||
| 54 | 54 | ||
| 55 | // ASTC Encodings data, sorted in ascending order based on their BitLength value | 55 | // ASTC Encodings data, sorted in ascending order based on their BitLength value |
| 56 | // (see GetBitLength() function) | 56 | // (see GetBitLength() function) |
| 57 | const uvec4 encoding_values[6] = uvec4[]( | 57 | const uint encoding_values[22] = uint[]( |
| 58 | uvec4((JUST_BITS), (JUST_BITS | (1u << 8u)), (TRIT), (JUST_BITS | (2u << 8u))), | 58 | (JUST_BITS), (JUST_BITS | (1u << 8u)), (TRIT), (JUST_BITS | (2u << 8u)), |
| 59 | uvec4((QUINT), (TRIT | (1u << 8u)), (JUST_BITS | (3u << 8u)), (QUINT | (1u << 8u))), | 59 | (QUINT), (TRIT | (1u << 8u)), (JUST_BITS | (3u << 8u)), (QUINT | (1u << 8u)), |
| 60 | uvec4((TRIT | (2u << 8u)), (JUST_BITS | (4u << 8u)), (QUINT | (2u << 8u)), (TRIT | (3u << 8u))), | 60 | (TRIT | (2u << 8u)), (JUST_BITS | (4u << 8u)), (QUINT | (2u << 8u)), (TRIT | (3u << 8u)), |
| 61 | uvec4((JUST_BITS | (5u << 8u)), (QUINT | (3u << 8u)), (TRIT | (4u << 8u)), (JUST_BITS | (6u << 8u))), | 61 | (JUST_BITS | (5u << 8u)), (QUINT | (3u << 8u)), (TRIT | (4u << 8u)), (JUST_BITS | (6u << 8u)), |
| 62 | uvec4((QUINT | (4u << 8u)), (TRIT | (5u << 8u)), (JUST_BITS | (7u << 8u)), (QUINT | (5u << 8u))), | 62 | (QUINT | (4u << 8u)), (TRIT | (5u << 8u)), (JUST_BITS | (7u << 8u)), (QUINT | (5u << 8u)), |
| 63 | uvec4((TRIT | (6u << 8u)), (JUST_BITS | (8u << 8u)), 0u, 0u)); | 63 | (TRIT | (6u << 8u)), (JUST_BITS | (8u << 8u))); |
| 64 | 64 | ||
| 65 | // Input ASTC texture globals | 65 | // Input ASTC texture globals |
| 66 | int total_bitsread = 0; | 66 | int total_bitsread = 0; |
| @@ -313,8 +313,7 @@ EncodingData GetEncodingFromVector(uint index) { | |||
| 313 | 313 | ||
| 314 | // Returns the number of bits required to encode n_vals values. | 314 | // Returns the number of bits required to encode n_vals values. |
| 315 | uint GetBitLength(uint n_vals, uint encoding_index) { | 315 | uint GetBitLength(uint n_vals, uint encoding_index) { |
| 316 | const EncodingData encoding_value = | 316 | const EncodingData encoding_value = EncodingData(encoding_values[encoding_index]); |
| 317 | EncodingData(encoding_values[encoding_index / 4][encoding_index % 4]); | ||
| 318 | const uint encoding = Encoding(encoding_value); | 317 | const uint encoding = Encoding(encoding_value); |
| 319 | uint total_bits = NumBits(encoding_value) * n_vals; | 318 | uint total_bits = NumBits(encoding_value) * n_vals; |
| 320 | if (encoding == TRIT) { | 319 | if (encoding == TRIT) { |
| @@ -435,7 +434,7 @@ void DecodeTritBlock(uint num_bits) { | |||
| 435 | } | 434 | } |
| 436 | 435 | ||
| 437 | void DecodeIntegerSequence(uint max_range, uint num_values) { | 436 | void DecodeIntegerSequence(uint max_range, uint num_values) { |
| 438 | EncodingData val = EncodingData(encoding_values[max_range / 4][max_range % 4]); | 437 | EncodingData val = EncodingData(encoding_values[max_range]); |
| 439 | const uint encoding = Encoding(val); | 438 | const uint encoding = Encoding(val); |
| 440 | const uint num_bits = NumBits(val); | 439 | const uint num_bits = NumBits(val); |
| 441 | uint vals_decoded = 0; | 440 | uint vals_decoded = 0; |
| @@ -467,7 +466,7 @@ void DecodeColorValues(uvec4 modes, uint num_partitions, uint color_data_bits) { | |||
| 467 | // Find the largest encoding that's within color_data_bits | 466 | // Find the largest encoding that's within color_data_bits |
| 468 | // TODO(ameerj): profile with binary search | 467 | // TODO(ameerj): profile with binary search |
| 469 | int range = 0; | 468 | int range = 0; |
| 470 | while (++range < ((encoding_values.length() * 4) - 2)) { | 469 | while (++range < encoding_values.length()) { |
| 471 | const uint bit_length = GetBitLength(num_values, range); | 470 | const uint bit_length = GetBitLength(num_values, range); |
| 472 | if (bit_length > color_data_bits) { | 471 | if (bit_length > color_data_bits) { |
| 473 | break; | 472 | break; |