a57dbd957b
Experiments with a larger set of contexts and some clean up to replace magic numbers regarding the number of contexts. The starting values and rate of backwards adaption are still suspect and based on a small set of tests. Added forwards adjustment of probabilities. The net result of adding the new context and forward update is small compared to the old context from the legacy find_near function. (down a little on derf but up by a similar amount for HD) HOWEVER.... with the new context and forward update the impact of disabling the reverse update (which may be necessary in some use cases to facilitate parallel decoding) is hugely reduced. For the old context without forward update, the impact of turning off reverse update (Experiment was with SB off) was Derf - 0.9, Yt -1.89, ythd -2.75 and sthd -8.35. The impact was mainly at low data rates. With the new context and forward update enabled the impact for all the test sets was no more than 0.5-1% (again most at the low end). Change-Id: Ic751b414c8ce7f7f3ebc6f19a741d774d2b4b556
115 lines
4.4 KiB
C
115 lines
4.4 KiB
C
/*
|
|
* Copyright (c) 2010 The WebM project authors. All Rights Reserved.
|
|
*
|
|
* Use of this source code is governed by a BSD-style license
|
|
* that can be found in the LICENSE file in the root of the source
|
|
* tree. An additional intellectual property rights grant can be found
|
|
* in the file PATENTS. All contributing project authors may
|
|
* be found in the AUTHORS file in the root of the source tree.
|
|
*/
|
|
|
|
|
|
#ifndef __INC_ENTROPY_H
|
|
#define __INC_ENTROPY_H
|
|
|
|
#include "treecoder.h"
|
|
#include "blockd.h"
|
|
#include "common.h"
|
|
#include "coefupdateprobs.h"
|
|
|
|
extern const int vp9_i8x8_block[4];
|
|
|
|
/* Coefficient token alphabet */
|
|
|
|
#define ZERO_TOKEN 0 /* 0 Extra Bits 0+0 */
|
|
#define ONE_TOKEN 1 /* 1 Extra Bits 0+1 */
|
|
#define TWO_TOKEN 2 /* 2 Extra Bits 0+1 */
|
|
#define THREE_TOKEN 3 /* 3 Extra Bits 0+1 */
|
|
#define FOUR_TOKEN 4 /* 4 Extra Bits 0+1 */
|
|
#define DCT_VAL_CATEGORY1 5 /* 5-6 Extra Bits 1+1 */
|
|
#define DCT_VAL_CATEGORY2 6 /* 7-10 Extra Bits 2+1 */
|
|
#define DCT_VAL_CATEGORY3 7 /* 11-18 Extra Bits 3+1 */
|
|
#define DCT_VAL_CATEGORY4 8 /* 19-34 Extra Bits 4+1 */
|
|
#define DCT_VAL_CATEGORY5 9 /* 35-66 Extra Bits 5+1 */
|
|
#define DCT_VAL_CATEGORY6 10 /* 67+ Extra Bits 13+1 */
|
|
#define DCT_EOB_TOKEN 11 /* EOB Extra Bits 0+0 */
|
|
#define MAX_ENTROPY_TOKENS 12
|
|
#define ENTROPY_NODES 11
|
|
#define EOSB_TOKEN 127 /* Not signalled, encoder only */
|
|
|
|
#define INTER_MODE_CONTEXTS 7
|
|
|
|
extern const vp9_tree_index vp9_coef_tree[];
|
|
|
|
extern struct vp9_token_struct vp9_coef_encodings[MAX_ENTROPY_TOKENS];
|
|
|
|
typedef struct {
|
|
vp9_tree_p tree;
|
|
const vp9_prob *prob;
|
|
int Len;
|
|
int base_val;
|
|
} vp9_extra_bit_struct;
|
|
|
|
extern vp9_extra_bit_struct vp9_extra_bits[12]; /* indexed by token value */
|
|
|
|
#define PROB_UPDATE_BASELINE_COST 7
|
|
|
|
#define MAX_PROB 255
|
|
#define DCT_MAX_VALUE 8192
|
|
|
|
/* Coefficients are predicted via a 3-dimensional probability table. */
|
|
|
|
/* Outside dimension. 0 = Y no DC, 1 = Y2, 2 = UV, 3 = Y with DC */
|
|
#define BLOCK_TYPES 4
|
|
|
|
#define BLOCK_TYPES_8X8 4
|
|
|
|
#define BLOCK_TYPES_16X16 4
|
|
|
|
/* Middle dimension is a coarsening of the coefficient's
|
|
position within the 4x4 DCT. */
|
|
|
|
#define COEF_BANDS 8
|
|
extern DECLARE_ALIGNED(16, const int, vp9_coef_bands[16]);
|
|
extern DECLARE_ALIGNED(64, const int, vp9_coef_bands_8x8[64]);
|
|
extern DECLARE_ALIGNED(16, const int, vp9_coef_bands_16x16[256]);
|
|
|
|
/* Inside dimension is 3-valued measure of nearby complexity, that is,
|
|
the extent to which nearby coefficients are nonzero. For the first
|
|
coefficient (DC, unless block type is 0), we look at the (already encoded)
|
|
blocks above and to the left of the current block. The context index is
|
|
then the number (0,1,or 2) of these blocks having nonzero coefficients.
|
|
After decoding a coefficient, the measure is roughly the size of the
|
|
most recently decoded coefficient (0 for 0, 1 for 1, 2 for >1).
|
|
Note that the intuitive meaning of this measure changes as coefficients
|
|
are decoded, e.g., prior to the first token, a zero means that my neighbors
|
|
are empty while, after the first token, because of the use of end-of-block,
|
|
a zero means we just decoded a zero and hence guarantees that a non-zero
|
|
coefficient will appear later in this block. However, this shift
|
|
in meaning is perfectly OK because our context depends also on the
|
|
coefficient band (and since zigzag positions 0, 1, and 2 are in
|
|
distinct bands). */
|
|
|
|
/*# define DC_TOKEN_CONTEXTS 3*/ /* 00, 0!0, !0!0 */
|
|
#define PREV_COEF_CONTEXTS 4
|
|
|
|
#define SUBEXP_PARAM 4 /* Subexponential code parameter */
|
|
#define MODULUS_PARAM 13 /* Modulus parameter */
|
|
|
|
extern DECLARE_ALIGNED(16, const unsigned char, vp9_prev_token_class[MAX_ENTROPY_TOKENS]);
|
|
|
|
struct VP9Common;
|
|
void vp9_default_coef_probs(struct VP9Common *);
|
|
extern DECLARE_ALIGNED(16, const int, vp9_default_zig_zag1d[16]);
|
|
|
|
extern DECLARE_ALIGNED(16, const int, vp9_col_scan[16]);
|
|
extern DECLARE_ALIGNED(16, const int, vp9_row_scan[16]);
|
|
|
|
extern DECLARE_ALIGNED(64, const int, vp9_default_zig_zag1d_8x8[64]);
|
|
void vp9_coef_tree_initialize(void);
|
|
|
|
extern DECLARE_ALIGNED(16, const int, vp9_default_zig_zag1d_16x16[256]);
|
|
void vp9_adapt_coef_probs(struct VP9Common *);
|
|
|
|
#endif
|