Fix the bug in ApplyPalette.
The auto-infer logic of detecting the 'Alpha' use case (via check '(palette[i] & 0x00ff00ffu) != 0' is failing for this corner case image with all black pixels (rgb = 0) and different Alpha values. -> switch generic use-LUT detection Change-Id: I982a8b28c8bcc43e3dc68ac358f978a4bcc14c36
This commit is contained in:
parent
2d6ac422cf
commit
afa3450c11
@ -816,15 +816,15 @@ static void ApplyPalette(uint32_t* src, uint32_t* dst,
|
||||
const uint32_t* palette, int palette_size,
|
||||
int width, int height, int xbits, uint8_t* row) {
|
||||
int i, x, y;
|
||||
int is_alpha = 1;
|
||||
int use_LUT = 1;
|
||||
for (i = 0; i < palette_size; ++i) {
|
||||
if ((palette[i] & 0x00ff00ffu) != 0) {
|
||||
is_alpha = 0;
|
||||
if ((palette[i] & 0xffff00ffu) != 0) {
|
||||
use_LUT = 0;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (is_alpha) {
|
||||
if (use_LUT) {
|
||||
int inv_palette[MAX_PALETTE_SIZE] = { 0 };
|
||||
for (i = 0; i < palette_size; ++i) {
|
||||
const int color = (palette[i] >> 8) & 0xff;
|
||||
|
Loading…
Reference in New Issue
Block a user