From 5fc8393125ef0084491b7acaec13e62fe593adf1 Mon Sep 17 00:00:00 2001 From: ameerj <52414509+ameerj@users.noreply.github.com> Date: Tue, 15 Jun 2021 20:16:16 -0400 Subject: astc_decoder: Fix LDR CEM1 endpoint calculation Per the spec, L1 is clamped to the value 0xff if it is greater than 0xff. An oversight caused us to take the maximum of L1 and 0xff, rather than the minimum. Huge thanks to wwylele for finding this. Co-Authored-By: Weiyi Wang --- src/video_core/textures/astc.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src/video_core/textures') diff --git a/src/video_core/textures/astc.cpp b/src/video_core/textures/astc.cpp index 6079aa709..9b2177ebd 100644 --- a/src/video_core/textures/astc.cpp +++ b/src/video_core/textures/astc.cpp @@ -1217,7 +1217,7 @@ static void ComputeEndpoints(Pixel& ep1, Pixel& ep2, const u32*& colorValues, case 1: { READ_UINT_VALUES(2) u32 L0 = (v[0] >> 2) | (v[1] & 0xC0); - u32 L1 = std::max(L0 + (v[1] & 0x3F), 0xFFU); + u32 L1 = std::min(L0 + (v[1] & 0x3F), 0xFFU); ep1 = Pixel(0xFF, L0, L0, L0); ep2 = Pixel(0xFF, L1, L1, L1); } break; -- cgit v1.2.3