Framework to incorporate switchable dequant levels

Changed experiment to allow switchable QUANT_PROFILES

Change-Id: I8e5e76239a4103273e9ef759d46400104ef55599
This commit is contained in:
Debargha Mukherjee 2016-01-21 16:29:18 -08:00
parent b765dc8e39
commit b8a7d1fe02
15 changed files with 552 additions and 88 deletions

View File

@ -77,8 +77,15 @@ extern "C" {
#endif // CONFIG_MULTI_REF
#if CONFIG_NEW_QUANT
#define QUANT_PROFILES 3
#define DEFAULT_DQ 0
#define QUANT_PROFILES 2
#if QUANT_PROFILES > 1
static INLINE int switchable_dq_profile_used(BLOCK_SIZE bsize) {
return bsize >= BLOCK_16X16;
}
#define Q_THRESHOLD_MIN 0
#define Q_THRESHOLD_MAX 1000
#endif // QUANT_PROFILES > 1
#endif // CONFIG_NEW_QUANT
typedef enum {

View File

@ -944,6 +944,26 @@ const vp9_tree_index vp9_copy_mode_tree[TREE_SIZE(COPY_MODE_COUNT - 1)] = {
};
#endif // CONFIG_COPY_MODE
#if CONFIG_NEW_QUANT
#if QUANT_PROFILES == 2
const vp9_tree_index vp9_dq_profile_tree[TREE_SIZE(QUANT_PROFILES)] = {
-0, -1
};
static const vp9_prob default_dq_profile_prob[QUANT_PROFILES - 1] = {
240
};
#elif QUANT_PROFILES == 3
const vp9_tree_index vp9_dq_profile_tree[TREE_SIZE(QUANT_PROFILES)] = {
-0, 2,
-1, -2
};
static const vp9_prob default_dq_profile_prob[QUANT_PROFILES - 1] = {
240, 128
};
#endif // QUANT_PROFILES != 2 and QUANT_PROFILES != 3
#endif // CONFIG_NEW_QUANT
#if CONFIG_TX64X64
void tx_counts_to_branch_counts_64x64(const unsigned int *tx_count_64x64p,
unsigned int (*ct_64x64p)[2]) {
@ -1096,6 +1116,9 @@ void vp9_init_mode_probs(FRAME_CONTEXT *fc) {
#if CONFIG_WEDGE_PARTITION
vp9_copy(fc->wedge_interinter_prob, default_wedge_interinter_prob);
#endif // CONFIG_WEDGE_PARTITION
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
vp9_copy(fc->dq_profile_prob, default_dq_profile_prob);
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
const vp9_tree_index vp9_switchable_interp_tree
@ -1323,6 +1346,11 @@ void vp9_adapt_mode_probs(VP9_COMMON *cm) {
adapt_prob(pre_fc->palette_uv_enabled_prob[i],
counts->uv_palette_enabled[i]);
#endif // CONFIG_PALETTE
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
adapt_probs(vp9_dq_profile_tree, pre_fc->dq_profile_prob,
counts->dq_profile, fc->dq_profile_prob);
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
static void set_default_lf_deltas(struct loopfilter *lf) {

View File

@ -133,6 +133,9 @@ typedef struct frame_contexts {
#if CONFIG_GLOBAL_MOTION
vp9_prob global_motion_types_prob[GLOBAL_MOTION_TYPES - 1];
#endif // CONFIG_GLOBAL_MOTION
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
vp9_prob dq_profile_prob[QUANT_PROFILES - 1];
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
} FRAME_CONTEXT;
typedef struct {
@ -211,6 +214,9 @@ typedef struct {
#if CONFIG_GLOBAL_MOTION
unsigned int global_motion_types[GLOBAL_MOTION_TYPES];
#endif // CONFIG_GLOBAL_MOTION
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
unsigned int dq_profile[QUANT_PROFILES];
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
} FRAME_COUNTS;
extern const vp9_prob vp9_kf_uv_mode_prob[INTRA_MODES][INTRA_MODES - 1];
@ -256,6 +262,10 @@ extern const vp9_tree_index vp9_inter_compound_mode_tree
[TREE_SIZE(INTER_COMPOUND_MODES)];
#endif // CONFIG_NEW_INTER
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
extern const vp9_tree_index vp9_dq_profile_tree[TREE_SIZE(QUANT_PROFILES)];
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
void vp9_setup_past_independence(struct VP9Common *cm);
#if CONFIG_ROW_TILE
void vp9_dec_setup_past_independence(struct VP9Common *cm,

View File

@ -47,10 +47,6 @@ extern "C" {
#define FRAME_CONTEXTS_LOG2 2
#define FRAME_CONTEXTS (1 << FRAME_CONTEXTS_LOG2)
#if CONFIG_NEW_QUANT
#define QUANT_PROFILES 3
#endif // CONFIG_NEW_QUANT
extern const struct {
PARTITION_CONTEXT above;
PARTITION_CONTEXT left;

View File

@ -41,64 +41,94 @@ static const uint8_t vp9_nuq_knots_lossless[COEF_BANDS][NUQ_KNOTS] = {
#endif // CONFIG_TX_SKIP
};
static const uint8_t vp9_nuq_knots_mid[COEF_BANDS][NUQ_KNOTS] = {
{84, 124, 128}, // dc, band 0
{84, 124, 128}, // band 1
{84, 124, 128}, // band 2
{86, 124, 128}, // band 3
{86, 124, 128}, // band 4
{86, 124, 128}, // band 5
static const uint8_t vp9_nuq_knots[QUANT_PROFILES][COEF_BANDS][NUQ_KNOTS] = {
{
{86, 122, 128}, // dc, band 0
{86, 122, 128}, // band 1
{86, 122, 128}, // band 2
{88, 122, 128}, // band 3
{88, 122, 128}, // band 4
{88, 122, 128}, // band 5
#if CONFIG_TX_SKIP
{84, 124, 128}, // band 6
{86, 122, 128}, // band 6
#endif // CONFIG_TX_SKIP
},
#if QUANT_PROFILES > 1
{
{86, 122, 128}, // dc, band 0
{86, 122, 128}, // band 1
{86, 122, 128}, // band 2
{88, 122, 128}, // band 3
{88, 122, 128}, // band 4
{88, 122, 128}, // band 5
#if CONFIG_TX_SKIP
{86, 122, 128}, // band 6
#endif // CONFIG_TX_SKIP
},
#if QUANT_PROFILES > 2
{
{86, 122, 128}, // dc, band 0
{86, 122, 128}, // band 1
{86, 122, 128}, // band 2
{88, 122, 128}, // band 3
{88, 122, 128}, // band 4
{88, 122, 128}, // band 5
#if CONFIG_TX_SKIP
{86, 122, 128}, // band 6
#endif // CONFIG_TX_SKIP
}
#endif // QUANT_PROFILES > 2
#endif // QUANT_PROFILES > 1
};
static const uint8_t vp9_nuq_doff_lossless[COEF_BANDS] = { 0, 0, 0, 0, 0, 0
static const uint8_t vp9_nuq_doff_lossless[COEF_BANDS] = { 0, 0, 0, 0, 0, 0,
#if CONFIG_TX_SKIP
, 0
0
#endif // CONFIG_TX_SKIP
};
static const uint8_t vp9_nuq_doff_low[COEF_BANDS] = { 5, 13, 14, 19, 20, 21
static const uint8_t vp9_nuq_doff[QUANT_PROFILES][COEF_BANDS] = {
{ 8, 15, 16, 22, 23, 24, // dq_off_index = 0
#if CONFIG_TX_SKIP
, 8
8
#endif // CONFIG_TX_SKIP
};
static const uint8_t vp9_nuq_doff_mid[COEF_BANDS] = { 8, 16, 17, 22, 23, 24
},
#if QUANT_PROFILES > 1
{ 6, 12, 13, 16, 17, 18, // dq_off_index = 1
#if CONFIG_TX_SKIP
, 8
8
#endif // CONFIG_TX_SKIP
};
static const uint8_t vp9_nuq_doff_high[COEF_BANDS] = { 41, 49, 50, 55, 56, 57
},
#if QUANT_PROFILES > 2
{ 10, 18, 19, 23, 25, 26, // dq_off_index = 2
#if CONFIG_TX_SKIP
, 8
8
#endif // CONFIG_TX_SKIP
}
#endif // QUANT_PROFILES > 2
#endif // QUANT_PROFILES > 1
};
// Allow different quantization profiles in different q ranges,
// to enable entropy-constraints in scalar quantization.
static const uint8_t *get_nuq_knots(int lossless, int band) {
static const uint8_t *get_nuq_knots(int lossless, int band, int dq_off_index) {
if (lossless)
return vp9_nuq_knots_lossless[band];
else
return vp9_nuq_knots_mid[band];
return vp9_nuq_knots[dq_off_index][band];
}
static INLINE int16_t quant_to_doff_fixed(int lossless, int band,
int dq_off_index) {
if (lossless)
return vp9_nuq_doff_lossless[band];
else if (!dq_off_index) // dq_off_index == 0
return vp9_nuq_doff_mid[band];
else if (dq_off_index == 1)
return vp9_nuq_doff_low[band];
else // dq_off_index == 2
return vp9_nuq_doff_high[band];
else
return vp9_nuq_doff[dq_off_index][band];
}
static INLINE void get_cumbins_nuq(int q, int lossless, int band,
tran_low_t *cumbins) {
const uint8_t *knots = get_nuq_knots(lossless, band);
tran_low_t *cumbins, int dq_off_index) {
const uint8_t *knots = get_nuq_knots(lossless, band, dq_off_index);
int16_t cumknots[NUQ_KNOTS];
int i;
cumknots[0] = knots[0];
@ -111,12 +141,12 @@ static INLINE void get_cumbins_nuq(int q, int lossless, int band,
void vp9_get_dequant_val_nuq(int q, int lossless, int band,
tran_low_t *dq, tran_low_t *cumbins,
int dq_off_index) {
const uint8_t *knots = get_nuq_knots(lossless, band);
const uint8_t *knots = get_nuq_knots(lossless, band, dq_off_index);
tran_low_t cumbins_[NUQ_KNOTS], *cumbins_ptr;
tran_low_t doff;
int i;
cumbins_ptr = (cumbins ? cumbins : cumbins_);
get_cumbins_nuq(q, lossless, band, cumbins_ptr);
get_cumbins_nuq(q, lossless, band, cumbins_ptr, dq_off_index);
dq[0] = 0;
for (i = 1; i < NUQ_KNOTS; ++i) {
const int16_t qstep = (knots[i] * q + 64) >> 7;

View File

@ -136,6 +136,14 @@ static void read_switchable_interp_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
vp9_diff_update_prob(r, &fc->switchable_interp_prob[j][i]);
}
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
static void read_dq_profile_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
int i;
for (i = 0; i < QUANT_PROFILES - 1; ++i)
vp9_diff_update_prob(r, &fc->dq_profile_prob[i]);
}
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
static void read_inter_mode_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
int i, j;
for (i = 0; i < INTER_MODE_CONTEXTS; ++i)
@ -910,6 +918,9 @@ static void set_param_topblock(VP9_COMMON *const cm, MACROBLOCKD *const xd,
BLOCK_SIZE bsize, int mi_row, int mi_col,
#if CONFIG_EXT_TX
int txfm,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
int dq_off_index,
#endif
int skip) {
const int bw = num_8x8_blocks_wide_lookup[bsize];
@ -927,6 +938,9 @@ static void set_param_topblock(VP9_COMMON *const cm, MACROBLOCKD *const xd,
xd->mi[y * cm->mi_stride + x].mbmi.skip = skip;
#if CONFIG_EXT_TX
xd->mi[y * cm->mi_stride + x].mbmi.ext_txfrm = txfm;
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
xd->mi[y * cm->mi_stride + x].mbmi.dq_off_index = dq_off_index;
#endif
}
}
@ -1685,43 +1699,43 @@ static void decode_block(VP9_COMMON *const cm, MACROBLOCKD *const xd,
#if CONFIG_SUPERTX
if (!supertx_enabled) {
#endif
if (less8x8)
bsize = BLOCK_8X8;
if (less8x8)
bsize = BLOCK_8X8;
if (mbmi->skip) {
reset_skip_context(xd, bsize);
} else {
if (cm->seg.enabled) {
setup_plane_dequants(cm, xd, vp9_get_qindex(&cm->seg, mbmi->segment_id,
cm->base_qindex));
if (mbmi->skip) {
reset_skip_context(xd, bsize);
} else {
if (cm->seg.enabled) {
setup_plane_dequants(cm, xd, vp9_get_qindex(&cm->seg, mbmi->segment_id,
cm->base_qindex));
}
}
}
if (!is_inter_block(mbmi)
if (!is_inter_block(mbmi)
#if CONFIG_INTRABC
&& !is_intrabc_mode(mbmi->mode)
&& !is_intrabc_mode(mbmi->mode)
#endif // CONFIG_INTRABC
) {
struct intra_args arg = { cm, xd, r };
vp9_foreach_transformed_block(xd, bsize,
predict_and_reconstruct_intra_block, &arg);
} else {
// Prediction
vp9_dec_build_inter_predictors_sb(xd, mi_row, mi_col, bsize);
) {
struct intra_args arg = { cm, xd, r };
vp9_foreach_transformed_block(xd, bsize,
predict_and_reconstruct_intra_block, &arg);
} else {
// Prediction
vp9_dec_build_inter_predictors_sb(xd, mi_row, mi_col, bsize);
// Reconstruction
if (!mbmi->skip) {
int eobtotal = 0;
struct inter_args arg = { cm, xd, r, &eobtotal };
// Reconstruction
if (!mbmi->skip) {
int eobtotal = 0;
struct inter_args arg = { cm, xd, r, &eobtotal };
vp9_foreach_transformed_block(xd, bsize, reconstruct_inter_block, &arg);
vp9_foreach_transformed_block(xd, bsize, reconstruct_inter_block, &arg);
#if CONFIG_BITSTREAM_FIXES
#else
if (!less8x8 && eobtotal == 0)
mbmi->skip = 1; // skip loopfilter
if (!less8x8 && eobtotal == 0)
mbmi->skip = 1; // skip loopfilter
#endif
}
}
}
#if CONFIG_SUPERTX
}
#endif
@ -1790,6 +1804,9 @@ static void decode_partition(VP9_COMMON *const cm, MACROBLOCKD *const xd,
#if CONFIG_EXT_TX
int txfm = NORM;
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
int dq_off_index = 0;
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#endif // CONFIG_SUPERTX
if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
@ -1841,6 +1858,21 @@ static void decode_partition(VP9_COMMON *const cm, MACROBLOCKD *const xd,
}
}
#endif // CONFIG_EXT_TX
/*
printf("D[%d/%d, %d %d] sb_type %d skip %d}\n", cm->current_video_frame, cm->show_frame,
mi_row, mi_col, bsize, skip);
*/
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
if (cm->base_qindex > Q_THRESHOLD_MIN &&
cm->base_qindex < Q_THRESHOLD_MAX &&
switchable_dq_profile_used(bsize) && !skip &&
!vp9_segfeature_active(
&cm->seg, xd->mi[0].mbmi.segment_id, SEG_LVL_SKIP)) {
dq_off_index = vp9_read_dq_profile(cm, r);
} else {
dq_off_index = 0;
}
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
#endif // CONFIG_SUPERTX
if (subsize < BLOCK_8X8) {
@ -2092,6 +2124,9 @@ static void decode_partition(VP9_COMMON *const cm, MACROBLOCKD *const xd,
#if CONFIG_EXT_TX
xd->mi[0].mbmi.ext_txfrm = txfm;
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
xd->mi[0].mbmi.dq_off_index = dq_off_index;
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
vp9_foreach_transformed_block(xd, bsize, reconstruct_inter_block, &arg);
if (!(subsize < BLOCK_8X8) && eobtotal == 0)
skip = 1;
@ -2100,6 +2135,9 @@ static void decode_partition(VP9_COMMON *const cm, MACROBLOCKD *const xd,
#if CONFIG_EXT_TX
txfm,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_off_index,
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
skip);
}
#endif // CONFIG_SUPERTX
@ -3482,6 +3520,10 @@ static int read_compressed_header(VP9Decoder *pbi, const uint8_t *data,
read_inter_compound_mode_probs(fc, &r);
#endif // CONFIG_NEW_INTER
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
read_dq_profile_probs(fc, &r);
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
if (cm->interp_filter == SWITCHABLE)
read_switchable_interp_probs(fc, &r);

View File

@ -66,6 +66,17 @@ static PREDICTION_MODE read_inter_compound_mode(VP9_COMMON *cm, vp9_reader *r,
}
#endif // CONFIG_NEW_INTER
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
int vp9_read_dq_profile(VP9_COMMON *cm, vp9_reader *r) {
const int dq_profile = vp9_read_tree(r, vp9_dq_profile_tree,
cm->fc.dq_profile_prob);
if (!cm->frame_parallel_decoding_mode) {
++cm->counts.dq_profile[dq_profile];
}
return dq_profile;
}
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
static PREDICTION_MODE read_inter_mode(VP9_COMMON *cm, vp9_reader *r,
int ctx) {
const int mode = vp9_read_tree(r, vp9_inter_mode_tree,
@ -78,7 +89,7 @@ static PREDICTION_MODE read_inter_mode(VP9_COMMON *cm, vp9_reader *r,
#if CONFIG_COPY_MODE
static COPY_MODE read_copy_mode(VP9_COMMON *cm, vp9_reader *r,
int num_candidate, int ctx) {
COPY_MODE mode;
COPY_MODE mode = 0;
switch (num_candidate) {
case 0:
@ -310,10 +321,6 @@ static void read_intra_frame_mode_info(VP9_COMMON *const cm,
int_mv dv_ref;
#endif // CONFIG_INTRABC
#if CONFIG_NEW_QUANT
mbmi->dq_off_index = DEFAULT_DQ;
#endif // CONFIG_NEW_QUANT
mbmi->segment_id = read_intra_segment_id(cm, xd, mi_row, mi_col, r);
#if CONFIG_MISC_ENTROPY
mbmi->skip = 0;
@ -509,7 +516,19 @@ static void read_intra_frame_mode_info(VP9_COMMON *const cm,
#else // CONFIG_SR_MODE
mbmi->tx_size = read_tx_size(cm, xd, cm->tx_mode, bsize, 1, r);
#endif // CONFIG_SR_MODE
#endif
#endif // CONFIG_PALETTE
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
if (cm->base_qindex > Q_THRESHOLD_MIN && cm->base_qindex < Q_THRESHOLD_MAX &&
switchable_dq_profile_used(mbmi->sb_type) &&
!mbmi->skip &&
!vp9_segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
mbmi->dq_off_index = vp9_read_dq_profile(cm, r);
} else {
mbmi->dq_off_index = 0;
}
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
mbmi->ref_frame[0] = INTRA_FRAME;
mbmi->ref_frame[1] = NONE;
@ -1464,10 +1483,6 @@ static void read_inter_frame_mode_info(VP9_COMMON *const cm,
(void) supertx_enabled;
#endif
#if CONFIG_NEW_QUANT
mbmi->dq_off_index = DEFAULT_DQ;
#endif // CONFIG_NEW_QUANT
mbmi->mv[0].as_int = 0;
mbmi->mv[1].as_int = 0;
@ -1522,6 +1537,13 @@ static void read_inter_frame_mode_info(VP9_COMMON *const cm,
mbmi->mode = NEARESTMV;
mbmi->skip = skip_backup;
mbmi->copy_mode = copy_mode_backup;
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
if (!(cm->base_qindex > Q_THRESHOLD_MIN &&
cm->base_qindex < Q_THRESHOLD_MAX &&
switchable_dq_profile_used(mbmi->sb_type) &&
!vp9_segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)))
mbmi->dq_off_index = 0;
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
#endif // CONFIG_COPY_MODE
@ -1706,6 +1728,24 @@ static void read_inter_frame_mode_info(VP9_COMMON *const cm,
#endif // CONFIG_SR_MODE
#endif // CONFIG_PALETTE
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
if (cm->base_qindex > Q_THRESHOLD_MIN &&
cm->base_qindex < Q_THRESHOLD_MAX &&
switchable_dq_profile_used(mbmi->sb_type) &&
#if CONFIG_SUPERTX
!supertx_enabled &&
#endif
!mbmi->skip &&
!vp9_segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
#if CONFIG_COPY_MODE
if (mbmi->copy_mode == NOREF)
#endif // CONFIG_COPY_MODE
mbmi->dq_off_index = vp9_read_dq_profile(cm, r);
} else {
mbmi->dq_off_index = 0;
}
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#if CONFIG_EXT_TX
if (inter_block &&
#if !CONFIG_WAVELETS

View File

@ -30,6 +30,9 @@ void vp9_read_mode_info(VP9_COMMON *cm, MACROBLOCKD *xd,
#endif
#endif
int mi_row, int mi_col, vp9_reader *r);
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
int vp9_read_dq_profile(VP9_COMMON *cm, vp9_reader *r);
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#ifdef __cplusplus
} // extern "C"

View File

@ -69,6 +69,9 @@ static struct vp9_token inter_compound_mode_encodings[INTER_COMPOUND_MODES];
#if CONFIG_GLOBAL_MOTION
static struct vp9_token global_motion_types_encodings[GLOBAL_MOTION_TYPES];
#endif // CONFIG_GLOBAL_MOTION
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
static struct vp9_token dq_profile_encodings[QUANT_PROFILES];
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#if CONFIG_SUPERTX
static int vp9_check_supertx(VP9_COMMON *cm, int mi_row, int mi_col,
@ -115,6 +118,9 @@ void vp9_entropy_mode_init() {
vp9_tokens_from_tree(global_motion_types_encodings,
vp9_global_motion_types_tree);
#endif // CONFIG_GLOBAL_MOTION
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
vp9_tokens_from_tree(dq_profile_encodings, vp9_dq_profile_tree);
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
static void write_intra_mode(vp9_writer *w, PREDICTION_MODE mode,
@ -240,6 +246,20 @@ static void update_skip_probs(VP9_COMMON *cm, vp9_writer *w) {
vp9_cond_prob_diff_update(w, &cm->fc.skip_probs[k], cm->counts.skip[k]);
}
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
static void write_dq_profile(const VP9_COMMON *cm, int dq_profile,
vp9_writer *w) {
vp9_write_token(w, vp9_dq_profile_tree, cm->fc.dq_profile_prob,
&dq_profile_encodings[dq_profile]);
}
static void update_dq_profile_probs(VP9_COMMON *cm, vp9_writer *w) {
prob_diff_update(vp9_dq_profile_tree,
cm->fc.dq_profile_prob,
cm->counts.dq_profile, QUANT_PROFILES, w);
}
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#if CONFIG_SR_MODE
#if SR_USE_MULTI_F
static int write_sr_usfilter(const VP9_COMMON *cm, const MACROBLOCKD *xd,
@ -607,12 +627,12 @@ static void pack_inter_mode_mvs(VP9_COMP *cpi, const MODE_INFO *mi,
if (!supertx_enabled) {
#endif
#if CONFIG_COPY_MODE
if (mbmi->copy_mode == NOREF)
if (mbmi->copy_mode == NOREF)
#endif
if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
vp9_write(w, is_inter, vp9_get_intra_inter_prob(cm, xd));
if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
vp9_write(w, is_inter, vp9_get_intra_inter_prob(cm, xd));
#if CONFIG_MISC_ENTROPY
skip = write_skip(cm, xd, segment_id, mi, is_inter, w);
skip = write_skip(cm, xd, segment_id, mi, is_inter, w);
#endif
#if CONFIG_SUPERTX
@ -735,6 +755,24 @@ static void pack_inter_mode_mvs(VP9_COMP *cpi, const MODE_INFO *mi,
(skip || vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)))) {
write_selected_tx_size(cm, xd, mbmi->tx_size, bsize, w);
}
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
if (cm->base_qindex > Q_THRESHOLD_MIN && cm->base_qindex < Q_THRESHOLD_MAX &&
switchable_dq_profile_used(mbmi->sb_type) &&
#if CONFIG_SUPERTX
!supertx_enabled &&
#endif // CONFIG_SUPERTX
!mbmi->skip &&
!vp9_segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
#if CONFIG_COPY_MODE
if (mbmi->copy_mode == NOREF)
#endif // CONFIG_COPY_MODE
write_dq_profile(cm, mbmi->dq_off_index, w);
} else {
assert(mbmi->dq_off_index == 0);
}
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#if CONFIG_EXT_TX
if (is_inter &&
#if !CONFIG_WAVELETS
@ -1163,6 +1201,17 @@ static void write_mb_modes_kf(const VP9_COMMON *cm,
write_selected_tx_size(cm, xd, mbmi->tx_size, bsize, w);
}
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
if (cm->base_qindex > Q_THRESHOLD_MIN && cm->base_qindex < Q_THRESHOLD_MAX &&
switchable_dq_profile_used(mbmi->sb_type) &&
!mbmi->skip &&
!vp9_segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
write_dq_profile(cm, mbmi->dq_off_index, w);
} else {
assert(mbmi->dq_off_index == 0);
}
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#if CONFIG_TX_SKIP
if (bsize >= BLOCK_8X8) {
int q_idx = vp9_get_qindex(seg, mbmi->segment_id, cm->base_qindex);
@ -1373,6 +1422,16 @@ static void write_modes_sb(VP9_COMP *cpi,
&ext_tx_encodings[xd->mi[0].mbmi.ext_txfrm]);
#endif // CONFIG_WAVELETS
#endif // CONFIG_EXT_TX
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
if (!xd->mi[0].mbmi.skip &&
cm->base_qindex > Q_THRESHOLD_MIN &&
cm->base_qindex < Q_THRESHOLD_MAX &&
switchable_dq_profile_used(bsize) &&
!vp9_segfeature_active(
&cm->seg, xd->mi[0].mbmi.segment_id, SEG_LVL_SKIP)) {
write_dq_profile(cm, xd->mi[0].mbmi.dq_off_index, w);
}
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
}
#endif // CONFIG_SUPERTX
@ -2683,6 +2742,10 @@ static size_t write_compressed_header(VP9_COMP *cpi, uint8_t *data) {
update_inter_compound_mode_probs(cm, &header_bc);
#endif // CONFIG_NEW_INTER
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
update_dq_profile_probs(cm, &header_bc);
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
if (cm->interp_filter == SWITCHABLE)
update_switchable_interp_probs(cm, &header_bc);

View File

@ -95,6 +95,9 @@ static void rd_supertx_sb(VP9_COMP *cpi, const TileInfo *const tile,
#if CONFIG_EXT_TX
EXT_TX_TYPE *best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
int *dq_index,
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
PC_TREE *pc_tree);
#endif // CONFIG_SUPERTX
@ -1196,6 +1199,9 @@ static void update_supertx_param(VP9_COMP *cpi, PICK_MODE_CONTEXT *ctx,
#if CONFIG_EXT_TX
int best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
int dq_index,
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
TX_SIZE supertx_size) {
MACROBLOCK *const x = &cpi->mb;
@ -1206,6 +1212,9 @@ static void update_supertx_param(VP9_COMP *cpi, PICK_MODE_CONTEXT *ctx,
#if CONFIG_EXT_TX
ctx->mic.mbmi.ext_txfrm = best_tx;
#endif // CONFIG_EXT_TX
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
ctx->mic.mbmi.dq_off_index = dq_index;
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#if CONFIG_TX_SKIP
ctx->mic.mbmi.tx_skip[0] = 0;
ctx->mic.mbmi.tx_skip[1] = 0;
@ -1217,6 +1226,9 @@ static void update_supertx_param_sb(VP9_COMP *cpi, int mi_row, int mi_col,
#if CONFIG_EXT_TX
int best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
int dq_index,
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
TX_SIZE supertx_size, PC_TREE *pc_tree) {
VP9_COMMON *const cm = &cpi->common;
int bsl = b_width_log2_lookup[bsize], hbs = (1 << bsl) / 4;
@ -1234,6 +1246,9 @@ static void update_supertx_param_sb(VP9_COMP *cpi, int mi_row, int mi_col,
update_supertx_param(cpi, &pc_tree->none,
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size);
break;
@ -1241,12 +1256,18 @@ static void update_supertx_param_sb(VP9_COMP *cpi, int mi_row, int mi_col,
update_supertx_param(cpi, &pc_tree->vertical[0],
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size);
if (mi_col + hbs < cm->mi_cols && bsize > BLOCK_8X8)
update_supertx_param(cpi, &pc_tree->vertical[1],
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size);
break;
@ -1254,12 +1275,18 @@ static void update_supertx_param_sb(VP9_COMP *cpi, int mi_row, int mi_col,
update_supertx_param(cpi, &pc_tree->horizontal[0],
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size);
if (mi_row + hbs < cm->mi_rows && bsize > BLOCK_8X8)
update_supertx_param(cpi, &pc_tree->horizontal[1],
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size);
break;
@ -1268,27 +1295,42 @@ static void update_supertx_param_sb(VP9_COMP *cpi, int mi_row, int mi_col,
update_supertx_param(cpi, pc_tree->leaf_split[0],
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size);
} else {
update_supertx_param_sb(cpi, mi_row, mi_col, subsize,
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size, pc_tree->split[0]);
update_supertx_param_sb(cpi, mi_row, mi_col + hbs, subsize,
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size, pc_tree->split[1]);
update_supertx_param_sb(cpi, mi_row + hbs, mi_col, subsize,
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size, pc_tree->split[2]);
update_supertx_param_sb(cpi, mi_row + hbs, mi_col + hbs, subsize,
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size, pc_tree->split[3]);
}
@ -1299,6 +1341,9 @@ static void update_supertx_param_sb(VP9_COMP *cpi, int mi_row, int mi_col,
update_supertx_param(cpi, &pc_tree->horizontala[i],
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size);
break;
@ -1307,6 +1352,9 @@ static void update_supertx_param_sb(VP9_COMP *cpi, int mi_row, int mi_col,
update_supertx_param(cpi, &pc_tree->horizontalb[i],
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size);
break;
@ -1315,6 +1363,9 @@ static void update_supertx_param_sb(VP9_COMP *cpi, int mi_row, int mi_col,
update_supertx_param(cpi, &pc_tree->verticala[i],
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size);
break;
@ -1323,6 +1374,9 @@ static void update_supertx_param_sb(VP9_COMP *cpi, int mi_row, int mi_col,
update_supertx_param(cpi, &pc_tree->verticalb[i],
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size);
break;
@ -1874,6 +1928,16 @@ static void encode_sb(VP9_COMP *cpi, const TileInfo *const tile,
cm->counts.supertx
[partition_supertx_context_lookup[partition]][supertx_size][1]++;
cm->counts.supertx_size[supertx_size]++;
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
if (cm->base_qindex > Q_THRESHOLD_MIN &&
cm->base_qindex < Q_THRESHOLD_MAX &&
switchable_dq_profile_used(bsize) &&
!xd->mi[0].mbmi.skip &&
!vp9_segfeature_active(&cm->seg, xd->mi[0].mbmi.segment_id,
SEG_LVL_SKIP)) {
++cm->counts.dq_profile[xd->mi[0].mbmi.dq_off_index];
}
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#if CONFIG_EXT_TX
#if CONFIG_WAVELETS
if (!xd->mi[0].mbmi.skip)
@ -2862,6 +2926,9 @@ static void rd_test_partition3(VP9_COMP *cpi, const TileInfo *const tile,
#if CONFIG_EXT_TX
EXT_TX_TYPE best_tx = NORM;
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
int dq_index = 0;
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
tmp_rate = sum_rate_nocoef;
tmp_dist = 0;
@ -2870,6 +2937,9 @@ static void rd_test_partition3(VP9_COMP *cpi, const TileInfo *const tile,
#if CONFIG_EXT_TX
&best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
&dq_index,
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
pc_tree);
tmp_rate += vp9_cost_bit(
@ -2884,6 +2954,9 @@ static void rd_test_partition3(VP9_COMP *cpi, const TileInfo *const tile,
update_supertx_param_sb(cpi, mi_row, mi_col, bsize,
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size, pc_tree);
}
@ -3258,6 +3331,9 @@ static void rd_pick_partition(VP9_COMP *cpi, const TileInfo *const tile,
#if CONFIG_EXT_TX
EXT_TX_TYPE best_tx = NORM;
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
int dq_index = 0;
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
tmp_rate = sum_rate_nocoef;
tmp_dist = 0;
@ -3266,6 +3342,9 @@ static void rd_pick_partition(VP9_COMP *cpi, const TileInfo *const tile,
#if CONFIG_EXT_TX
&best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
&dq_index,
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
pc_tree);
tmp_rate += vp9_cost_bit(
@ -3280,6 +3359,9 @@ static void rd_pick_partition(VP9_COMP *cpi, const TileInfo *const tile,
update_supertx_param_sb(cpi, mi_row, mi_col, bsize,
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif
supertx_size, pc_tree);
}
@ -3362,6 +3444,9 @@ static void rd_pick_partition(VP9_COMP *cpi, const TileInfo *const tile,
#if CONFIG_EXT_TX
EXT_TX_TYPE best_tx = NORM;
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
int dq_index = 0;
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
tmp_rate = sum_rate_nocoef;
tmp_dist = 0;
@ -3370,6 +3455,9 @@ static void rd_pick_partition(VP9_COMP *cpi, const TileInfo *const tile,
#if CONFIG_EXT_TX
&best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
&dq_index,
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
pc_tree);
tmp_rate += vp9_cost_bit(
@ -3385,6 +3473,9 @@ static void rd_pick_partition(VP9_COMP *cpi, const TileInfo *const tile,
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
supertx_size, pc_tree);
}
}
@ -3527,6 +3618,9 @@ static void rd_pick_partition(VP9_COMP *cpi, const TileInfo *const tile,
#if CONFIG_EXT_TX
EXT_TX_TYPE best_tx = NORM;
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
int dq_index = 0;
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
tmp_rate = sum_rate_nocoef;
tmp_dist = 0;
@ -3535,6 +3629,9 @@ static void rd_pick_partition(VP9_COMP *cpi, const TileInfo *const tile,
#if CONFIG_EXT_TX
&best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
&dq_index,
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
pc_tree);
tmp_rate += vp9_cost_bit(
@ -3550,6 +3647,9 @@ static void rd_pick_partition(VP9_COMP *cpi, const TileInfo *const tile,
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
supertx_size, pc_tree);
}
}
@ -3681,6 +3781,9 @@ static void rd_pick_partition(VP9_COMP *cpi, const TileInfo *const tile,
#if CONFIG_EXT_TX
EXT_TX_TYPE best_tx = NORM;
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
int dq_index = 0;
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
tmp_rate = sum_rate_nocoef;
tmp_dist = 0;
@ -3689,6 +3792,9 @@ static void rd_pick_partition(VP9_COMP *cpi, const TileInfo *const tile,
#if CONFIG_EXT_TX
&best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
&dq_index,
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
pc_tree);
tmp_rate += vp9_cost_bit(
@ -3704,6 +3810,9 @@ static void rd_pick_partition(VP9_COMP *cpi, const TileInfo *const tile,
#if CONFIG_EXT_TX
best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
dq_index,
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
supertx_size, pc_tree);
}
}
@ -4712,10 +4821,6 @@ static void encode_superblock(VP9_COMP *cpi, TOKENEXTRA **t, int output_enabled,
const int mi_width = num_8x8_blocks_wide_lookup[bsize];
const int mi_height = num_8x8_blocks_high_lookup[bsize];
#if CONFIG_NEW_QUANT
mbmi->dq_off_index = DEFAULT_DQ;
#endif // CONFIG_NEW_QUANT
x->skip_recode = !x->select_tx_size && mbmi->sb_type >= BLOCK_8X8 &&
cpi->oxcf.aq_mode != COMPLEXITY_AQ &&
cpi->oxcf.aq_mode != CYCLIC_REFRESH_AQ &&
@ -4784,8 +4889,6 @@ static void encode_superblock(VP9_COMP *cpi, TOKENEXTRA **t, int output_enabled,
rows * cols * sizeof(xd->plane[1].color_index_map[0]));
}
}
#endif // CONFIG_PALETTE
#if CONFIG_PALETTE
if (frame_is_intra_only(cm) && output_enabled && bsize >= BLOCK_8X8) {
cm->palette_blocks_signalled++;
if (mbmi->palette_enabled[0])
@ -4808,6 +4911,12 @@ static void encode_superblock(VP9_COMP *cpi, TOKENEXTRA **t, int output_enabled,
vp9_encode_sb(x, MAX(bsize, BLOCK_8X8));
vp9_tokenize_sb(cpi, t, !output_enabled, MAX(bsize, BLOCK_8X8));
}
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
// This is not strictly required, but is a good practice.
// If you remove this, the assert in vp9_bitstream.c needs to be removed also.
if (mbmi->skip)
mbmi->dq_off_index = 0;
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#if CONFIG_INTRABC
if (frame_is_intra_only(cm) && output_enabled && bsize >= BLOCK_8X8) {
@ -4874,6 +4983,18 @@ static void encode_superblock(VP9_COMP *cpi, TOKENEXTRA **t, int output_enabled,
++cm->counts.ext_tx[mbmi->tx_size][mbmi->ext_txfrm];
}
#endif // CONFIG_EXT_TX
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
if (cm->base_qindex > Q_THRESHOLD_MIN &&
cm->base_qindex < Q_THRESHOLD_MAX &&
switchable_dq_profile_used(mbmi->sb_type) &&
#if CONFIG_COPY_MODE
(frame_is_intra_only(cm) || mbmi->copy_mode == NOREF) &&
#endif // CONFIG_COPY_MODE
!mbmi->skip &&
!vp9_segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
++cm->counts.dq_profile[mbmi->dq_off_index];
}
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
#if CONFIG_TX_SKIP
if (bsize >= BLOCK_8X8) {
int q_idx = vp9_get_qindex(&cm->seg, mbmi->segment_id, cm->base_qindex);
@ -5834,6 +5955,9 @@ static void rd_supertx_sb(VP9_COMP *cpi, const TileInfo *const tile,
#if CONFIG_EXT_TX
EXT_TX_TYPE *best_tx,
#endif
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
int *dq_index,
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
PC_TREE *pc_tree) {
VP9_COMMON *const cm = &cpi->common;
MACROBLOCK *const x = &cpi->mb;
@ -5852,6 +5976,9 @@ static void rd_supertx_sb(VP9_COMP *cpi, const TileInfo *const tile,
#endif
update_state_sb_supertx(cpi, tile, mi_row, mi_col, bsize, 0, pc_tree);
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
xd->mi[0].mbmi.dq_off_index = 0;
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
vp9_setup_dst_planes(xd->plane, get_frame_new_buffer(cm),
mi_row, mi_col);
for (plane = 0; plane < MAX_MB_PLANE; plane++) {
@ -5941,5 +6068,8 @@ static void rd_supertx_sb(VP9_COMP *cpi, const TileInfo *const tile,
x->skip = skip_tx;
xd->mi[0].mbmi.ext_txfrm = best_tx_nostx;
#endif // CONFIG_EXT_TX
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
*dq_index = 0;
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
#endif // CONFIG_SUPERTX

View File

@ -167,7 +167,6 @@ static int optimize_b(MACROBLOCK *mb, int plane, int block,
const int16_t *dequant_ptr = pd->dequant;
#endif // CONFIG_TX_SKIP
#if CONFIG_NEW_QUANT
int dq = xd->mi->mbmi.dq_off_index;
#if CONFIG_TX_SKIP
const int use_rect_quant = is_rect_quant_used(&xd->mi[0].src_mi->mbmi, plane);
#endif // CONFIG_TX_SKIP
@ -2710,8 +2709,10 @@ void vp9_encode_sb(MACROBLOCK *x, BLOCK_SIZE bsize) {
int plane;
mbmi->skip = 1;
if (x->skip)
if (x->skip) {
mbmi->tx_size = max_txsize_lookup[bsize];
return;
}
for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
if (!x->skip_recode)

View File

@ -3702,8 +3702,6 @@ static void encode_frame_to_data_rate(VP9_COMP *cpi,
// Pick the loop filter level for the frame.
loopfilter_frame(cpi, cm);
// printf("Bilateral level: %d\n", cm->lf.bilateral_level);
// build the bitstream
#if CONFIG_ROW_TILE
if (vp9_pack_bitstream(cpi, dest, size, 1) < 0) {

View File

@ -427,6 +427,10 @@ typedef struct VP9_COMP {
[PALETTE_COLORS];
#endif // CONFIG_PALETTE
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
int dq_profile_costs[QUANT_PROFILES];
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
PICK_MODE_CONTEXT *leaf_tree;
PC_TREE *pc_tree;
PC_TREE *pc_root;

View File

@ -104,6 +104,10 @@ static void fill_mode_costs(VP9_COMP *cpi) {
vp9_cost_tokens(cpi->palette_uv_color_costs[i][j],
fc->palette_uv_color_prob[i][j], vp9_palette_color_tree);
#endif // CONFIG_PALETTE
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
vp9_cost_tokens(cpi->dq_profile_costs, fc->dq_profile_prob,
vp9_dq_profile_tree);
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
}
static void fill_token_costs(vp9_coeff_cost *c,

View File

@ -2357,7 +2357,6 @@ static int64_t rd_pick_intra_sby_mode(VP9_COMP *cpi, MACROBLOCK *x,
palette_enabled_prob[bsize - BLOCK_8X8][palette_ctx], 0);
#endif
this_rd = RDCOST(x->rdmult, x->rddiv, this_rate, this_distortion);
if (this_rd < best_rd) {
mode_selected = mode;
#if CONFIG_FILTERINTRA
@ -2387,6 +2386,34 @@ static int64_t rd_pick_intra_sby_mode(VP9_COMP *cpi, MACROBLOCK *x,
}
}
}
#if CONFIG_NEW_QUANT
mic->mbmi.dq_off_index = 0;
#if QUANT_PROFILES > 1
if (cpi->common.base_qindex > Q_THRESHOLD_MIN &&
cpi->common.base_qindex < Q_THRESHOLD_MAX &&
!xd->lossless && switchable_dq_profile_used(bsize)) {
int64_t local_tx_cache[TX_MODES];
int i;
int best_dq = -1;
for (i = 0; i < QUANT_PROFILES; i++) {
mic->mbmi.dq_off_index = i;
super_block_yrd(cpi, x, &this_rate_tokenonly, &this_distortion,
&s, NULL, bsize, local_tx_cache, INT64_MAX);
this_rate = this_rate_tokenonly + bmode_costs[mic->mbmi.mode] +
cpi->dq_profile_costs[i];
this_rd = RDCOST(x->rdmult, x->rddiv, this_rate, this_distortion);
if (this_rd < best_rd || best_dq == -1) {
best_dq = i;
best_rd = this_rd;
}
}
mic->mbmi.dq_off_index = best_dq;
*rate = this_rate;
*rate_tokenonly = this_rate_tokenonly;
*distortion = this_distortion;
}
#endif // QUANT_PROFILES > 1
#endif // CONFIG_NEW_QUANT
#if CONFIG_TX_SKIP
#if CONFIG_FILTERINTRA
@ -2753,7 +2780,6 @@ static int64_t rd_pick_intra_sby_mode(VP9_COMP *cpi, MACROBLOCK *x,
#endif // CONFIG_FILTERINTRA
}
#endif // CONFIG_PALETTE
return best_rd;
}
@ -6066,6 +6092,39 @@ static int64_t handle_inter_mode(VP9_COMP *cpi, MACROBLOCK *x,
}
#endif // CONFIG_EXT_TX
#if CONFIG_NEW_QUANT
mbmi->dq_off_index = 0;
#if QUANT_PROFILES > 1
// Choose the best dq_index
if (cm->base_qindex > Q_THRESHOLD_MIN &&
cm->base_qindex < Q_THRESHOLD_MAX &&
!xd->lossless && switchable_dq_profile_used(bsize)) {
int64_t rdcost_dq;
int rate_y_dq;
int64_t distortion_y_dq;
int dummy;
int64_t best_rdcost_dq = INT64_MAX;
int best_dq = -1;
for (i = 0; i < QUANT_PROFILES; i++) {
mbmi->dq_off_index = i;
super_block_yrd(cpi, x, &rate_y_dq, &distortion_y_dq, &dummy, psse,
bsize, txfm_cache, INT64_MAX);
assert(rate_y_dq != INT_MAX);
assert(rate_y_dq >= 0);
rate_y_dq += cpi->dq_profile_costs[i];
rdcost_dq = RDCOST(x->rdmult, x->rddiv, rate_y_dq, distortion_y_dq);
rdcost_dq = MIN(rdcost_dq, RDCOST(x->rdmult, x->rddiv, 0, *psse));
assert(rdcost_dq >= 0);
if (rdcost_dq < best_rdcost_dq || best_dq == -1) {
best_dq = i;
best_rdcost_dq = rdcost_dq;
}
}
mbmi->dq_off_index = best_dq;
}
#endif // QUANT_PROFILES > 1
#endif // CONFIG_NEW_QUANT
// Y cost and distortion
super_block_yrd(cpi, x, rate_y, &distortion_y, &skippable_y, psse,
bsize, txfm_cache, ref_best_rd);
@ -7388,6 +7447,38 @@ void vp9_rd_pick_inter_mode_sb(VP9_COMP *cpi, MACROBLOCK *x,
if (this_mode != DC_PRED && this_mode != TM_PRED)
rate2 += intra_cost_penalty;
distortion2 = distortion_y + distortion_uv;
#if CONFIG_NEW_QUANT
mbmi->dq_off_index = 0;
#if QUANT_PROFILES > 1
if (cm->base_qindex > Q_THRESHOLD_MIN &&
cm->base_qindex < Q_THRESHOLD_MAX &&
!xd->lossless && switchable_dq_profile_used(bsize)) {
int64_t rdcost_dq;
int rate_y_dq;
int64_t distortion_y_dq;
int dummy;
int64_t best_rdcost_dq = INT64_MAX;
int best_dq = -1;
for (i = 0; i < QUANT_PROFILES; i++) {
mbmi->dq_off_index = i;
super_block_yrd(cpi, x, &rate_y_dq, &distortion_y_dq, &dummy,
NULL, bsize, tx_cache, INT64_MAX);
assert(rate_y_dq != INT_MAX);
assert(rate_y_dq >= 0);
rate_y_dq += cpi->dq_profile_costs[i];
rdcost_dq = RDCOST(x->rdmult, x->rddiv, rate_y_dq, distortion_y_dq);
assert(rdcost_dq >= 0);
if (rdcost_dq < best_rdcost_dq || best_dq == -1) {
best_dq = i;
best_rdcost_dq = rdcost_dq;
}
}
mbmi->dq_off_index = best_dq;
}
#endif // QUANT_PROFILES > 1
#endif // CONFIG_NEW_QUANT
} else {
#if CONFIG_INTERINTRA
if (second_ref_frame == INTRA_FRAME) {
@ -7843,6 +7934,13 @@ void vp9_rd_pick_inter_mode_sb(VP9_COMP *cpi, MACROBLOCK *x,
mbmi->tx_skip[0] = 0;
mbmi->tx_skip[1] = 0;
#endif // CONFIG_TX_SKIP
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
if (!(cm->base_qindex > Q_THRESHOLD_MIN &&
cm->base_qindex < Q_THRESHOLD_MAX &&
switchable_dq_profile_used(mbmi->sb_type) &&
!vp9_segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)))
mbmi->dq_off_index = 0;
#endif
x->skip = 0;
set_ref_ptrs(cm, xd, mbmi->ref_frame[0], mbmi->ref_frame[1]);
for (i = 0; i < MAX_MB_PLANE; i++) {
@ -7950,6 +8048,11 @@ void vp9_rd_pick_inter_mode_sb(VP9_COMP *cpi, MACROBLOCK *x,
rate2 += rate_copy_mode;
this_rd = RDCOST(x->rdmult, x->rddiv, rate2, distortion2);
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
if (this_skip2 && mbmi->dq_off_index > 0)
mbmi->dq_off_index = 0;
#endif
if (this_rd < best_rd) {
rd_cost->rate = rate2;
rd_cost->dist = distortion2;
@ -7996,6 +8099,10 @@ void vp9_rd_pick_inter_mode_sb(VP9_COMP *cpi, MACROBLOCK *x,
*mbmi = best_mbmode;
if (mbmi->copy_mode != NOREF) {
#if CONFIG_NEW_QUANT && QUANT_PROFILES > 1
if (best_skip2)
assert(mbmi->dq_off_index == 0);
#endif // CONFIG_NEW_QUANT && QUANT_PROFILES > 1
x->skip = best_skip2;
ctx->skip = x->skip;
ctx->skippable = best_mode_skippable;
@ -8016,6 +8123,7 @@ void vp9_rd_pick_inter_mode_sb(VP9_COMP *cpi, MACROBLOCK *x,
vp9_zero(best_tx_diff);
}
#endif // CONFIG_COPY_MODE
#if CONFIG_PALETTE
if (bsize >= BLOCK_8X8 && cpi->common.allow_palette_mode &&
!is_inter_block(mbmi)) {