Insert less hash chain entries from the beginnings of long copies.

This makes the chains more efficient and a larger variety of data is tested.

0.02 % compression gain at q 100, 0.05 % at default quality. 0.8 % speedup by
callgrind.

0.16 % compression gain for lossy alpha ?!

Change-Id: I888120133352799eb14f5f602c7f40ab404bd665
This commit is contained in:
Jyrki Alakuijala 2015-08-17 16:54:27 +00:00 committed by James Zern
parent bd55604d1b
commit 90fcfcd905

View File

@ -314,12 +314,15 @@ static WEBP_INLINE void AddSingleLiteral(uint32_t pixel, int use_color_cache,
VP8LColorCache* const hashers, VP8LColorCache* const hashers,
VP8LBackwardRefs* const refs) { VP8LBackwardRefs* const refs) {
PixOrCopy v; PixOrCopy v;
if (use_color_cache && VP8LColorCacheContains(hashers, pixel)) { if (use_color_cache) {
// push pixel as a PixOrCopyCreateCacheIdx pixel const uint32_t key = VP8LColorCacheGetIndex(hashers, pixel);
const int ix = VP8LColorCacheGetIndex(hashers, pixel); if (VP8LColorCacheLookup(hashers, key) == pixel) {
v = PixOrCopyCreateCacheIdx(ix); v = PixOrCopyCreateCacheIdx(key);
} else {
v = PixOrCopyCreateLiteral(pixel);
VP8LColorCacheSet(hashers, key, pixel);
}
} else { } else {
if (use_color_cache) VP8LColorCacheInsert(hashers, pixel);
v = PixOrCopyCreateLiteral(pixel); v = PixOrCopyCreateLiteral(pixel);
} }
BackwardRefsCursorAdd(refs, v); BackwardRefsCursorAdd(refs, v);
@ -423,9 +426,12 @@ static int BackwardReferencesLz77(int xsize, int ysize,
} }
} }
// Add to the hash_chain (but cannot add the last pixel). // Add to the hash_chain (but cannot add the last pixel).
{ if (offset >= 3 && offset != xsize) {
const int last = (len < pix_count - 1 - i) ? len : pix_count - 1 - i; const int last = (len < pix_count - 1 - i) ? len : pix_count - 1 - i;
for (k = 1; k < last; ++k) { for (k = 2; k < last - 8; k += 2) {
HashChainInsert(hash_chain, &argb[i + k], i + k);
}
for (; k < last; ++k) {
HashChainInsert(hash_chain, &argb[i + k], i + k); HashChainInsert(hash_chain, &argb[i + k], i + k);
} }
} }