astc_decoder: Fix LDR CEM1 endpoint calculation
Per the spec, L1 is clamped to the value 0xff if it is greater than 0xff. An oversight caused us to take the maximum of L1 and 0xff, rather than the minimum. Huge thanks to wwylele for finding this. Co-Authored-By: Weiyi Wang <wwylele@gmail.com>
This commit is contained in:
parent
f9bfeaa2bc
commit
5fc8393125
@ -763,7 +763,7 @@ void ComputeEndpoints(out uvec4 ep1, out uvec4 ep2, uint color_endpoint_mode) {
|
|||||||
case 1: {
|
case 1: {
|
||||||
READ_UINT_VALUES(2)
|
READ_UINT_VALUES(2)
|
||||||
uint L0 = (v[0] >> 2) | (v[1] & 0xC0);
|
uint L0 = (v[0] >> 2) | (v[1] & 0xC0);
|
||||||
uint L1 = max(L0 + (v[1] & 0x3F), 0xFFU);
|
uint L1 = min(L0 + (v[1] & 0x3F), 0xFFU);
|
||||||
ep1 = uvec4(0xFF, L0, L0, L0);
|
ep1 = uvec4(0xFF, L0, L0, L0);
|
||||||
ep2 = uvec4(0xFF, L1, L1, L1);
|
ep2 = uvec4(0xFF, L1, L1, L1);
|
||||||
break;
|
break;
|
||||||
|
@ -1217,7 +1217,7 @@ static void ComputeEndpoints(Pixel& ep1, Pixel& ep2, const u32*& colorValues,
|
|||||||
case 1: {
|
case 1: {
|
||||||
READ_UINT_VALUES(2)
|
READ_UINT_VALUES(2)
|
||||||
u32 L0 = (v[0] >> 2) | (v[1] & 0xC0);
|
u32 L0 = (v[0] >> 2) | (v[1] & 0xC0);
|
||||||
u32 L1 = std::max(L0 + (v[1] & 0x3F), 0xFFU);
|
u32 L1 = std::min(L0 + (v[1] & 0x3F), 0xFFU);
|
||||||
ep1 = Pixel(0xFF, L0, L0, L0);
|
ep1 = Pixel(0xFF, L0, L0, L0);
|
||||||
ep2 = Pixel(0xFF, L1, L1, L1);
|
ep2 = Pixel(0xFF, L1, L1, L1);
|
||||||
} break;
|
} break;
|
||||||
|
Loading…
Reference in New Issue
Block a user