vpx_dsp/bitreader.h: vp9_->vpx_
Replace vp9_ in names to vpx_ as they are not codec specific. Change-Id: I2e583aa63dee769353ada4b42417aa15c4074ebb
This commit is contained in:
parent
149822e399
commit
bf82514b54
@ -69,8 +69,8 @@ TEST(VP9, TestBitIO) {
|
||||
// First bit should be zero
|
||||
GTEST_ASSERT_EQ(bw_buffer[0] & 0x80, 0);
|
||||
|
||||
vp9_reader br;
|
||||
vp9_reader_init(&br, bw_buffer, kBufferSize, NULL, NULL);
|
||||
vpx_reader br;
|
||||
vpx_reader_init(&br, bw_buffer, kBufferSize, NULL, NULL);
|
||||
bit_rnd.Reset(random_seed);
|
||||
for (int i = 0; i < kBitsToTest; ++i) {
|
||||
if (bit_method == 2) {
|
||||
@ -78,7 +78,7 @@ TEST(VP9, TestBitIO) {
|
||||
} else if (bit_method == 3) {
|
||||
bit = bit_rnd(2);
|
||||
}
|
||||
GTEST_ASSERT_EQ(vp9_read(&br, probas[i]), bit)
|
||||
GTEST_ASSERT_EQ(vpx_read(&br, probas[i]), bit)
|
||||
<< "pos: " << i << " / " << kBitsToTest
|
||||
<< " bit_method: " << bit_method
|
||||
<< " method: " << method;
|
||||
|
@ -166,7 +166,7 @@ typedef struct macroblockd {
|
||||
int up_available;
|
||||
int left_available;
|
||||
|
||||
const vp9_prob (*partition_probs)[PARTITION_TYPES - 1];
|
||||
const vpx_prob (*partition_probs)[PARTITION_TYPES - 1];
|
||||
|
||||
/* Distance of MB away from frame edges */
|
||||
int mb_to_left_edge;
|
||||
@ -262,7 +262,7 @@ static INLINE void reset_skip_context(MACROBLOCKD *xd, BLOCK_SIZE bsize) {
|
||||
}
|
||||
}
|
||||
|
||||
static INLINE const vp9_prob *get_y_mode_probs(const MODE_INFO *mi,
|
||||
static INLINE const vpx_prob *get_y_mode_probs(const MODE_INFO *mi,
|
||||
const MODE_INFO *above_mi,
|
||||
const MODE_INFO *left_mi,
|
||||
int block) {
|
||||
|
@ -27,30 +27,30 @@ const vp9_tree_index vp9_coef_con_tree[TREE_SIZE(ENTROPY_TOKENS)] = {
|
||||
-CATEGORY5_TOKEN, -CATEGORY6_TOKEN // 7 = CAT_FIVE
|
||||
};
|
||||
|
||||
const vp9_prob vp9_cat1_prob[] = { 159 };
|
||||
const vp9_prob vp9_cat2_prob[] = { 165, 145 };
|
||||
const vp9_prob vp9_cat3_prob[] = { 173, 148, 140 };
|
||||
const vp9_prob vp9_cat4_prob[] = { 176, 155, 140, 135 };
|
||||
const vp9_prob vp9_cat5_prob[] = { 180, 157, 141, 134, 130 };
|
||||
const vp9_prob vp9_cat6_prob[] = {
|
||||
const vpx_prob vp9_cat1_prob[] = { 159 };
|
||||
const vpx_prob vp9_cat2_prob[] = { 165, 145 };
|
||||
const vpx_prob vp9_cat3_prob[] = { 173, 148, 140 };
|
||||
const vpx_prob vp9_cat4_prob[] = { 176, 155, 140, 135 };
|
||||
const vpx_prob vp9_cat5_prob[] = { 180, 157, 141, 134, 130 };
|
||||
const vpx_prob vp9_cat6_prob[] = {
|
||||
254, 254, 254, 252, 249, 243, 230, 196, 177, 153, 140, 133, 130, 129
|
||||
};
|
||||
#if CONFIG_VP9_HIGHBITDEPTH
|
||||
const vp9_prob vp9_cat1_prob_high10[] = { 159 };
|
||||
const vp9_prob vp9_cat2_prob_high10[] = { 165, 145 };
|
||||
const vp9_prob vp9_cat3_prob_high10[] = { 173, 148, 140 };
|
||||
const vp9_prob vp9_cat4_prob_high10[] = { 176, 155, 140, 135 };
|
||||
const vp9_prob vp9_cat5_prob_high10[] = { 180, 157, 141, 134, 130 };
|
||||
const vp9_prob vp9_cat6_prob_high10[] = {
|
||||
const vpx_prob vp9_cat1_prob_high10[] = { 159 };
|
||||
const vpx_prob vp9_cat2_prob_high10[] = { 165, 145 };
|
||||
const vpx_prob vp9_cat3_prob_high10[] = { 173, 148, 140 };
|
||||
const vpx_prob vp9_cat4_prob_high10[] = { 176, 155, 140, 135 };
|
||||
const vpx_prob vp9_cat5_prob_high10[] = { 180, 157, 141, 134, 130 };
|
||||
const vpx_prob vp9_cat6_prob_high10[] = {
|
||||
255, 255, 254, 254, 254, 252, 249, 243,
|
||||
230, 196, 177, 153, 140, 133, 130, 129
|
||||
};
|
||||
const vp9_prob vp9_cat1_prob_high12[] = { 159 };
|
||||
const vp9_prob vp9_cat2_prob_high12[] = { 165, 145 };
|
||||
const vp9_prob vp9_cat3_prob_high12[] = { 173, 148, 140 };
|
||||
const vp9_prob vp9_cat4_prob_high12[] = { 176, 155, 140, 135 };
|
||||
const vp9_prob vp9_cat5_prob_high12[] = { 180, 157, 141, 134, 130 };
|
||||
const vp9_prob vp9_cat6_prob_high12[] = {
|
||||
const vpx_prob vp9_cat1_prob_high12[] = { 159 };
|
||||
const vpx_prob vp9_cat2_prob_high12[] = { 165, 145 };
|
||||
const vpx_prob vp9_cat3_prob_high12[] = { 173, 148, 140 };
|
||||
const vpx_prob vp9_cat4_prob_high12[] = { 176, 155, 140, 135 };
|
||||
const vpx_prob vp9_cat5_prob_high12[] = { 180, 157, 141, 134, 130 };
|
||||
const vpx_prob vp9_cat6_prob_high12[] = {
|
||||
255, 255, 255, 255, 254, 254, 254, 252, 249,
|
||||
243, 230, 196, 177, 153, 140, 133, 130, 129
|
||||
};
|
||||
@ -147,7 +147,7 @@ const uint8_t vp9_pt_energy_class[ENTROPY_TOKENS] = {
|
||||
// by averaging :
|
||||
// vp9_pareto8_full[l][node] = (vp9_pareto8_full[l-1][node] +
|
||||
// vp9_pareto8_full[l+1][node] ) >> 1;
|
||||
const vp9_prob vp9_pareto8_full[COEFF_PROB_MODELS][MODEL_NODES] = {
|
||||
const vpx_prob vp9_pareto8_full[COEFF_PROB_MODELS][MODEL_NODES] = {
|
||||
{ 3, 86, 128, 6, 86, 23, 88, 29},
|
||||
{ 6, 86, 128, 11, 87, 42, 91, 52},
|
||||
{ 9, 86, 129, 17, 88, 61, 94, 76},
|
||||
@ -742,14 +742,14 @@ static const vp9_coeff_probs_model default_coef_probs_32x32[PLANE_TYPES] = {
|
||||
}
|
||||
};
|
||||
|
||||
static void extend_to_full_distribution(vp9_prob *probs, vp9_prob p) {
|
||||
static void extend_to_full_distribution(vpx_prob *probs, vpx_prob p) {
|
||||
memcpy(probs, vp9_pareto8_full[p = 0 ? 0 : p - 1],
|
||||
MODEL_NODES * sizeof(vp9_prob));
|
||||
MODEL_NODES * sizeof(vpx_prob));
|
||||
}
|
||||
|
||||
void vp9_model_to_full_probs(const vp9_prob *model, vp9_prob *full) {
|
||||
void vp9_model_to_full_probs(const vpx_prob *model, vpx_prob *full) {
|
||||
if (full != model)
|
||||
memcpy(full, model, sizeof(vp9_prob) * UNCONSTRAINED_NODES);
|
||||
memcpy(full, model, sizeof(vpx_prob) * UNCONSTRAINED_NODES);
|
||||
extend_to_full_distribution(&full[UNCONSTRAINED_NODES], model[PIVOT_NODE]);
|
||||
}
|
||||
|
||||
|
@ -77,7 +77,7 @@ DECLARE_ALIGNED(16, extern const uint8_t, vp9_cat6_prob_high12[18]);
|
||||
|
||||
typedef struct {
|
||||
const vp9_tree_index *tree;
|
||||
const vp9_prob *prob;
|
||||
const vpx_prob *prob;
|
||||
int len;
|
||||
int base_val;
|
||||
const int16_t *cost;
|
||||
@ -161,16 +161,16 @@ static INLINE const uint8_t *get_band_translate(TX_SIZE tx_size) {
|
||||
|
||||
#define MODEL_NODES (ENTROPY_NODES - UNCONSTRAINED_NODES)
|
||||
extern const vp9_tree_index vp9_coef_con_tree[TREE_SIZE(ENTROPY_TOKENS)];
|
||||
extern const vp9_prob vp9_pareto8_full[COEFF_PROB_MODELS][MODEL_NODES];
|
||||
extern const vpx_prob vp9_pareto8_full[COEFF_PROB_MODELS][MODEL_NODES];
|
||||
|
||||
typedef vp9_prob vp9_coeff_probs_model[REF_TYPES][COEF_BANDS]
|
||||
typedef vpx_prob vp9_coeff_probs_model[REF_TYPES][COEF_BANDS]
|
||||
[COEFF_CONTEXTS][UNCONSTRAINED_NODES];
|
||||
|
||||
typedef unsigned int vp9_coeff_count_model[REF_TYPES][COEF_BANDS]
|
||||
[COEFF_CONTEXTS]
|
||||
[UNCONSTRAINED_NODES + 1];
|
||||
|
||||
void vp9_model_to_full_probs(const vp9_prob *model, vp9_prob *full);
|
||||
void vp9_model_to_full_probs(const vpx_prob *model, vpx_prob *full);
|
||||
|
||||
typedef char ENTROPY_CONTEXT;
|
||||
|
||||
|
@ -13,7 +13,7 @@
|
||||
#include "vp9/common/vp9_onyxc_int.h"
|
||||
#include "vp9/common/vp9_seg_common.h"
|
||||
|
||||
const vp9_prob vp9_kf_y_mode_prob[INTRA_MODES][INTRA_MODES][INTRA_MODES - 1] = {
|
||||
const vpx_prob vp9_kf_y_mode_prob[INTRA_MODES][INTRA_MODES][INTRA_MODES - 1] = {
|
||||
{ // above = dc
|
||||
{ 137, 30, 42, 148, 151, 207, 70, 52, 91 }, // left = dc
|
||||
{ 92, 45, 102, 136, 116, 180, 74, 90, 100 }, // left = v
|
||||
@ -127,7 +127,7 @@ const vp9_prob vp9_kf_y_mode_prob[INTRA_MODES][INTRA_MODES][INTRA_MODES - 1] = {
|
||||
}
|
||||
};
|
||||
|
||||
const vp9_prob vp9_kf_uv_mode_prob[INTRA_MODES][INTRA_MODES - 1] = {
|
||||
const vpx_prob vp9_kf_uv_mode_prob[INTRA_MODES][INTRA_MODES - 1] = {
|
||||
{ 144, 11, 54, 157, 195, 130, 46, 58, 108 }, // y = dc
|
||||
{ 118, 15, 123, 148, 131, 101, 44, 93, 131 }, // y = v
|
||||
{ 113, 12, 23, 188, 226, 142, 26, 32, 125 }, // y = h
|
||||
@ -140,14 +140,14 @@ const vp9_prob vp9_kf_uv_mode_prob[INTRA_MODES][INTRA_MODES - 1] = {
|
||||
{ 102, 19, 66, 162, 182, 122, 35, 59, 128 } // y = tm
|
||||
};
|
||||
|
||||
static const vp9_prob default_if_y_probs[BLOCK_SIZE_GROUPS][INTRA_MODES - 1] = {
|
||||
static const vpx_prob default_if_y_probs[BLOCK_SIZE_GROUPS][INTRA_MODES - 1] = {
|
||||
{ 65, 32, 18, 144, 162, 194, 41, 51, 98 }, // block_size < 8x8
|
||||
{ 132, 68, 18, 165, 217, 196, 45, 40, 78 }, // block_size < 16x16
|
||||
{ 173, 80, 19, 176, 240, 193, 64, 35, 46 }, // block_size < 32x32
|
||||
{ 221, 135, 38, 194, 248, 121, 96, 85, 29 } // block_size >= 32x32
|
||||
};
|
||||
|
||||
static const vp9_prob default_if_uv_probs[INTRA_MODES][INTRA_MODES - 1] = {
|
||||
static const vpx_prob default_if_uv_probs[INTRA_MODES][INTRA_MODES - 1] = {
|
||||
{ 120, 7, 76, 176, 208, 126, 28, 54, 103 }, // y = dc
|
||||
{ 48, 12, 154, 155, 139, 90, 34, 117, 119 }, // y = v
|
||||
{ 67, 6, 25, 204, 243, 158, 13, 21, 96 }, // y = h
|
||||
@ -160,7 +160,7 @@ static const vp9_prob default_if_uv_probs[INTRA_MODES][INTRA_MODES - 1] = {
|
||||
{ 101, 21, 107, 181, 192, 103, 19, 67, 125 } // y = tm
|
||||
};
|
||||
|
||||
const vp9_prob vp9_kf_partition_probs[PARTITION_CONTEXTS]
|
||||
const vpx_prob vp9_kf_partition_probs[PARTITION_CONTEXTS]
|
||||
[PARTITION_TYPES - 1] = {
|
||||
// 8x8 -> 4x4
|
||||
{ 158, 97, 94 }, // a/l both not split
|
||||
@ -184,7 +184,7 @@ const vp9_prob vp9_kf_partition_probs[PARTITION_CONTEXTS]
|
||||
{ 12, 3, 3 }, // a/l both split
|
||||
};
|
||||
|
||||
static const vp9_prob default_partition_probs[PARTITION_CONTEXTS]
|
||||
static const vpx_prob default_partition_probs[PARTITION_CONTEXTS]
|
||||
[PARTITION_TYPES - 1] = {
|
||||
// 8x8 -> 4x4
|
||||
{ 199, 122, 141 }, // a/l both not split
|
||||
@ -208,7 +208,7 @@ static const vp9_prob default_partition_probs[PARTITION_CONTEXTS]
|
||||
{ 10, 7, 6 }, // a/l both split
|
||||
};
|
||||
|
||||
static const vp9_prob default_inter_mode_probs[INTER_MODE_CONTEXTS]
|
||||
static const vpx_prob default_inter_mode_probs[INTER_MODE_CONTEXTS]
|
||||
[INTER_MODES - 1] = {
|
||||
{2, 173, 34}, // 0 = both zero mv
|
||||
{7, 145, 85}, // 1 = one zero mv + one a predicted mv
|
||||
@ -244,19 +244,19 @@ const vp9_tree_index vp9_partition_tree[TREE_SIZE(PARTITION_TYPES)] = {
|
||||
-PARTITION_VERT, -PARTITION_SPLIT
|
||||
};
|
||||
|
||||
static const vp9_prob default_intra_inter_p[INTRA_INTER_CONTEXTS] = {
|
||||
static const vpx_prob default_intra_inter_p[INTRA_INTER_CONTEXTS] = {
|
||||
9, 102, 187, 225
|
||||
};
|
||||
|
||||
static const vp9_prob default_comp_inter_p[COMP_INTER_CONTEXTS] = {
|
||||
static const vpx_prob default_comp_inter_p[COMP_INTER_CONTEXTS] = {
|
||||
239, 183, 119, 96, 41
|
||||
};
|
||||
|
||||
static const vp9_prob default_comp_ref_p[REF_CONTEXTS] = {
|
||||
static const vpx_prob default_comp_ref_p[REF_CONTEXTS] = {
|
||||
50, 126, 123, 221, 226
|
||||
};
|
||||
|
||||
static const vp9_prob default_single_ref_p[REF_CONTEXTS][2] = {
|
||||
static const vpx_prob default_single_ref_p[REF_CONTEXTS][2] = {
|
||||
{ 33, 16 },
|
||||
{ 77, 74 },
|
||||
{ 142, 142 },
|
||||
@ -302,11 +302,11 @@ void tx_counts_to_branch_counts_8x8(const unsigned int *tx_count_8x8p,
|
||||
ct_8x8p[0][1] = tx_count_8x8p[TX_8X8];
|
||||
}
|
||||
|
||||
static const vp9_prob default_skip_probs[SKIP_CONTEXTS] = {
|
||||
static const vpx_prob default_skip_probs[SKIP_CONTEXTS] = {
|
||||
192, 128, 64
|
||||
};
|
||||
|
||||
static const vp9_prob default_switchable_interp_prob[SWITCHABLE_FILTER_CONTEXTS]
|
||||
static const vpx_prob default_switchable_interp_prob[SWITCHABLE_FILTER_CONTEXTS]
|
||||
[SWITCHABLE_FILTERS - 1] = {
|
||||
{ 235, 162, },
|
||||
{ 36, 255, },
|
||||
|
@ -28,9 +28,9 @@ extern "C" {
|
||||
struct VP9Common;
|
||||
|
||||
struct tx_probs {
|
||||
vp9_prob p32x32[TX_SIZE_CONTEXTS][TX_SIZES - 1];
|
||||
vp9_prob p16x16[TX_SIZE_CONTEXTS][TX_SIZES - 2];
|
||||
vp9_prob p8x8[TX_SIZE_CONTEXTS][TX_SIZES - 3];
|
||||
vpx_prob p32x32[TX_SIZE_CONTEXTS][TX_SIZES - 1];
|
||||
vpx_prob p16x16[TX_SIZE_CONTEXTS][TX_SIZES - 2];
|
||||
vpx_prob p8x8[TX_SIZE_CONTEXTS][TX_SIZES - 3];
|
||||
};
|
||||
|
||||
struct tx_counts {
|
||||
@ -41,19 +41,19 @@ struct tx_counts {
|
||||
};
|
||||
|
||||
typedef struct frame_contexts {
|
||||
vp9_prob y_mode_prob[BLOCK_SIZE_GROUPS][INTRA_MODES - 1];
|
||||
vp9_prob uv_mode_prob[INTRA_MODES][INTRA_MODES - 1];
|
||||
vp9_prob partition_prob[PARTITION_CONTEXTS][PARTITION_TYPES - 1];
|
||||
vpx_prob y_mode_prob[BLOCK_SIZE_GROUPS][INTRA_MODES - 1];
|
||||
vpx_prob uv_mode_prob[INTRA_MODES][INTRA_MODES - 1];
|
||||
vpx_prob partition_prob[PARTITION_CONTEXTS][PARTITION_TYPES - 1];
|
||||
vp9_coeff_probs_model coef_probs[TX_SIZES][PLANE_TYPES];
|
||||
vp9_prob switchable_interp_prob[SWITCHABLE_FILTER_CONTEXTS]
|
||||
vpx_prob switchable_interp_prob[SWITCHABLE_FILTER_CONTEXTS]
|
||||
[SWITCHABLE_FILTERS - 1];
|
||||
vp9_prob inter_mode_probs[INTER_MODE_CONTEXTS][INTER_MODES - 1];
|
||||
vp9_prob intra_inter_prob[INTRA_INTER_CONTEXTS];
|
||||
vp9_prob comp_inter_prob[COMP_INTER_CONTEXTS];
|
||||
vp9_prob single_ref_prob[REF_CONTEXTS][2];
|
||||
vp9_prob comp_ref_prob[REF_CONTEXTS];
|
||||
vpx_prob inter_mode_probs[INTER_MODE_CONTEXTS][INTER_MODES - 1];
|
||||
vpx_prob intra_inter_prob[INTRA_INTER_CONTEXTS];
|
||||
vpx_prob comp_inter_prob[COMP_INTER_CONTEXTS];
|
||||
vpx_prob single_ref_prob[REF_CONTEXTS][2];
|
||||
vpx_prob comp_ref_prob[REF_CONTEXTS];
|
||||
struct tx_probs tx_probs;
|
||||
vp9_prob skip_probs[SKIP_CONTEXTS];
|
||||
vpx_prob skip_probs[SKIP_CONTEXTS];
|
||||
nmv_context nmvc;
|
||||
int initialized;
|
||||
} FRAME_CONTEXT;
|
||||
@ -77,10 +77,10 @@ typedef struct FRAME_COUNTS {
|
||||
nmv_context_counts mv;
|
||||
} FRAME_COUNTS;
|
||||
|
||||
extern const vp9_prob vp9_kf_uv_mode_prob[INTRA_MODES][INTRA_MODES - 1];
|
||||
extern const vp9_prob vp9_kf_y_mode_prob[INTRA_MODES][INTRA_MODES]
|
||||
extern const vpx_prob vp9_kf_uv_mode_prob[INTRA_MODES][INTRA_MODES - 1];
|
||||
extern const vpx_prob vp9_kf_y_mode_prob[INTRA_MODES][INTRA_MODES]
|
||||
[INTRA_MODES - 1];
|
||||
extern const vp9_prob vp9_kf_partition_probs[PARTITION_CONTEXTS]
|
||||
extern const vpx_prob vp9_kf_partition_probs[PARTITION_CONTEXTS]
|
||||
[PARTITION_TYPES - 1];
|
||||
extern const vp9_tree_index vp9_intra_mode_tree[TREE_SIZE(INTRA_MODES)];
|
||||
extern const vp9_tree_index vp9_inter_mode_tree[TREE_SIZE(INTER_MODES)];
|
||||
|
@ -83,18 +83,18 @@ extern const vp9_tree_index vp9_mv_class0_tree[];
|
||||
extern const vp9_tree_index vp9_mv_fp_tree[];
|
||||
|
||||
typedef struct {
|
||||
vp9_prob sign;
|
||||
vp9_prob classes[MV_CLASSES - 1];
|
||||
vp9_prob class0[CLASS0_SIZE - 1];
|
||||
vp9_prob bits[MV_OFFSET_BITS];
|
||||
vp9_prob class0_fp[CLASS0_SIZE][MV_FP_SIZE - 1];
|
||||
vp9_prob fp[MV_FP_SIZE - 1];
|
||||
vp9_prob class0_hp;
|
||||
vp9_prob hp;
|
||||
vpx_prob sign;
|
||||
vpx_prob classes[MV_CLASSES - 1];
|
||||
vpx_prob class0[CLASS0_SIZE - 1];
|
||||
vpx_prob bits[MV_OFFSET_BITS];
|
||||
vpx_prob class0_fp[CLASS0_SIZE][MV_FP_SIZE - 1];
|
||||
vpx_prob fp[MV_FP_SIZE - 1];
|
||||
vpx_prob class0_hp;
|
||||
vpx_prob hp;
|
||||
} nmv_component;
|
||||
|
||||
typedef struct {
|
||||
vp9_prob joints[MV_JOINTS - 1];
|
||||
vpx_prob joints[MV_JOINTS - 1];
|
||||
nmv_component comps[2];
|
||||
} nmv_context;
|
||||
|
||||
|
@ -345,7 +345,7 @@ static INLINE void set_partition_probs(const VP9_COMMON *const cm,
|
||||
xd->partition_probs =
|
||||
frame_is_intra_only(cm) ?
|
||||
&vp9_kf_partition_probs[0] :
|
||||
(const vp9_prob (*)[PARTITION_TYPES - 1])cm->fc->partition_prob;
|
||||
(const vpx_prob (*)[PARTITION_TYPES - 1])cm->fc->partition_prob;
|
||||
}
|
||||
|
||||
static INLINE void vp9_init_macroblockd(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||
@ -373,7 +373,7 @@ static INLINE void vp9_init_macroblockd(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||
set_partition_probs(cm, xd);
|
||||
}
|
||||
|
||||
static INLINE const vp9_prob* get_partition_probs(const MACROBLOCKD *xd,
|
||||
static INLINE const vpx_prob* get_partition_probs(const MACROBLOCKD *xd,
|
||||
int ctx) {
|
||||
return xd->partition_probs[ctx];
|
||||
}
|
||||
|
@ -47,7 +47,7 @@ static INLINE int vp9_get_pred_context_seg_id(const MACROBLOCKD *xd) {
|
||||
return above_sip + left_sip;
|
||||
}
|
||||
|
||||
static INLINE vp9_prob vp9_get_pred_prob_seg_id(const struct segmentation *seg,
|
||||
static INLINE vpx_prob vp9_get_pred_prob_seg_id(const struct segmentation *seg,
|
||||
const MACROBLOCKD *xd) {
|
||||
return seg->pred_probs[vp9_get_pred_context_seg_id(xd)];
|
||||
}
|
||||
@ -60,7 +60,7 @@ static INLINE int vp9_get_skip_context(const MACROBLOCKD *xd) {
|
||||
return above_skip + left_skip;
|
||||
}
|
||||
|
||||
static INLINE vp9_prob vp9_get_skip_prob(const VP9_COMMON *cm,
|
||||
static INLINE vpx_prob vp9_get_skip_prob(const VP9_COMMON *cm,
|
||||
const MACROBLOCKD *xd) {
|
||||
return cm->fc->skip_probs[vp9_get_skip_context(xd)];
|
||||
}
|
||||
@ -69,14 +69,14 @@ int vp9_get_pred_context_switchable_interp(const MACROBLOCKD *xd);
|
||||
|
||||
int vp9_get_intra_inter_context(const MACROBLOCKD *xd);
|
||||
|
||||
static INLINE vp9_prob vp9_get_intra_inter_prob(const VP9_COMMON *cm,
|
||||
static INLINE vpx_prob vp9_get_intra_inter_prob(const VP9_COMMON *cm,
|
||||
const MACROBLOCKD *xd) {
|
||||
return cm->fc->intra_inter_prob[vp9_get_intra_inter_context(xd)];
|
||||
}
|
||||
|
||||
int vp9_get_reference_mode_context(const VP9_COMMON *cm, const MACROBLOCKD *xd);
|
||||
|
||||
static INLINE vp9_prob vp9_get_reference_mode_prob(const VP9_COMMON *cm,
|
||||
static INLINE vpx_prob vp9_get_reference_mode_prob(const VP9_COMMON *cm,
|
||||
const MACROBLOCKD *xd) {
|
||||
return cm->fc->comp_inter_prob[vp9_get_reference_mode_context(cm, xd)];
|
||||
}
|
||||
@ -84,7 +84,7 @@ static INLINE vp9_prob vp9_get_reference_mode_prob(const VP9_COMMON *cm,
|
||||
int vp9_get_pred_context_comp_ref_p(const VP9_COMMON *cm,
|
||||
const MACROBLOCKD *xd);
|
||||
|
||||
static INLINE vp9_prob vp9_get_pred_prob_comp_ref_p(const VP9_COMMON *cm,
|
||||
static INLINE vpx_prob vp9_get_pred_prob_comp_ref_p(const VP9_COMMON *cm,
|
||||
const MACROBLOCKD *xd) {
|
||||
const int pred_context = vp9_get_pred_context_comp_ref_p(cm, xd);
|
||||
return cm->fc->comp_ref_prob[pred_context];
|
||||
@ -92,14 +92,14 @@ static INLINE vp9_prob vp9_get_pred_prob_comp_ref_p(const VP9_COMMON *cm,
|
||||
|
||||
int vp9_get_pred_context_single_ref_p1(const MACROBLOCKD *xd);
|
||||
|
||||
static INLINE vp9_prob vp9_get_pred_prob_single_ref_p1(const VP9_COMMON *cm,
|
||||
static INLINE vpx_prob vp9_get_pred_prob_single_ref_p1(const VP9_COMMON *cm,
|
||||
const MACROBLOCKD *xd) {
|
||||
return cm->fc->single_ref_prob[vp9_get_pred_context_single_ref_p1(xd)][0];
|
||||
}
|
||||
|
||||
int vp9_get_pred_context_single_ref_p2(const MACROBLOCKD *xd);
|
||||
|
||||
static INLINE vp9_prob vp9_get_pred_prob_single_ref_p2(const VP9_COMMON *cm,
|
||||
static INLINE vpx_prob vp9_get_pred_prob_single_ref_p2(const VP9_COMMON *cm,
|
||||
const MACROBLOCKD *xd) {
|
||||
return cm->fc->single_ref_prob[vp9_get_pred_context_single_ref_p2(xd)][1];
|
||||
}
|
||||
@ -127,7 +127,7 @@ static INLINE int get_tx_size_context(const MACROBLOCKD *xd) {
|
||||
return (above_ctx + left_ctx) > max_tx_size;
|
||||
}
|
||||
|
||||
static INLINE const vp9_prob *get_tx_probs(TX_SIZE max_tx_size, int ctx,
|
||||
static INLINE const vpx_prob *get_tx_probs(TX_SIZE max_tx_size, int ctx,
|
||||
const struct tx_probs *tx_probs) {
|
||||
switch (max_tx_size) {
|
||||
case TX_8X8:
|
||||
@ -142,7 +142,7 @@ static INLINE const vp9_prob *get_tx_probs(TX_SIZE max_tx_size, int ctx,
|
||||
}
|
||||
}
|
||||
|
||||
static INLINE const vp9_prob *get_tx_probs2(TX_SIZE max_tx_size,
|
||||
static INLINE const vpx_prob *get_tx_probs2(TX_SIZE max_tx_size,
|
||||
const MACROBLOCKD *xd,
|
||||
const struct tx_probs *tx_probs) {
|
||||
return get_tx_probs(max_tx_size, get_tx_size_context(xd), tx_probs);
|
||||
|
@ -42,8 +42,8 @@ struct segmentation {
|
||||
uint8_t abs_delta;
|
||||
uint8_t temporal_update;
|
||||
|
||||
vp9_prob tree_probs[SEG_TREE_PROBS];
|
||||
vp9_prob pred_probs[PREDICTION_PROBS];
|
||||
vpx_prob tree_probs[SEG_TREE_PROBS];
|
||||
vpx_prob pred_probs[PREDICTION_PROBS];
|
||||
|
||||
int16_t feature_data[MAX_SEGMENTS][SEG_LVL_MAX];
|
||||
unsigned int feature_mask[MAX_SEGMENTS];
|
||||
|
@ -74,19 +74,19 @@ static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
|
||||
return len != 0 && len <= (size_t)(end - start);
|
||||
}
|
||||
|
||||
static int decode_unsigned_max(struct vp9_read_bit_buffer *rb, int max) {
|
||||
static int decode_unsigned_max(struct vpx_read_bit_buffer *rb, int max) {
|
||||
const int data = vp9_rb_read_literal(rb, get_unsigned_bits(max));
|
||||
return data > max ? max : data;
|
||||
}
|
||||
|
||||
static TX_MODE read_tx_mode(vp9_reader *r) {
|
||||
TX_MODE tx_mode = vp9_read_literal(r, 2);
|
||||
static TX_MODE read_tx_mode(vpx_reader *r) {
|
||||
TX_MODE tx_mode = vpx_read_literal(r, 2);
|
||||
if (tx_mode == ALLOW_32X32)
|
||||
tx_mode += vp9_read_bit(r);
|
||||
tx_mode += vpx_read_bit(r);
|
||||
return tx_mode;
|
||||
}
|
||||
|
||||
static void read_tx_mode_probs(struct tx_probs *tx_probs, vp9_reader *r) {
|
||||
static void read_tx_mode_probs(struct tx_probs *tx_probs, vpx_reader *r) {
|
||||
int i, j;
|
||||
|
||||
for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
|
||||
@ -102,14 +102,14 @@ static void read_tx_mode_probs(struct tx_probs *tx_probs, vp9_reader *r) {
|
||||
vp9_diff_update_prob(r, &tx_probs->p32x32[i][j]);
|
||||
}
|
||||
|
||||
static void read_switchable_interp_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
|
||||
static void read_switchable_interp_probs(FRAME_CONTEXT *fc, vpx_reader *r) {
|
||||
int i, j;
|
||||
for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
|
||||
for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i)
|
||||
vp9_diff_update_prob(r, &fc->switchable_interp_prob[j][i]);
|
||||
}
|
||||
|
||||
static void read_inter_mode_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
|
||||
static void read_inter_mode_probs(FRAME_CONTEXT *fc, vpx_reader *r) {
|
||||
int i, j;
|
||||
for (i = 0; i < INTER_MODE_CONTEXTS; ++i)
|
||||
for (j = 0; j < INTER_MODES - 1; ++j)
|
||||
@ -117,9 +117,9 @@ static void read_inter_mode_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
|
||||
}
|
||||
|
||||
static REFERENCE_MODE read_frame_reference_mode(const VP9_COMMON *cm,
|
||||
vp9_reader *r) {
|
||||
vpx_reader *r) {
|
||||
if (is_compound_reference_allowed(cm)) {
|
||||
return vp9_read_bit(r) ? (vp9_read_bit(r) ? REFERENCE_MODE_SELECT
|
||||
return vpx_read_bit(r) ? (vpx_read_bit(r) ? REFERENCE_MODE_SELECT
|
||||
: COMPOUND_REFERENCE)
|
||||
: SINGLE_REFERENCE;
|
||||
} else {
|
||||
@ -127,7 +127,7 @@ static REFERENCE_MODE read_frame_reference_mode(const VP9_COMMON *cm,
|
||||
}
|
||||
}
|
||||
|
||||
static void read_frame_reference_mode_probs(VP9_COMMON *cm, vp9_reader *r) {
|
||||
static void read_frame_reference_mode_probs(VP9_COMMON *cm, vpx_reader *r) {
|
||||
FRAME_CONTEXT *const fc = cm->fc;
|
||||
int i;
|
||||
|
||||
@ -146,14 +146,14 @@ static void read_frame_reference_mode_probs(VP9_COMMON *cm, vp9_reader *r) {
|
||||
vp9_diff_update_prob(r, &fc->comp_ref_prob[i]);
|
||||
}
|
||||
|
||||
static void update_mv_probs(vp9_prob *p, int n, vp9_reader *r) {
|
||||
static void update_mv_probs(vpx_prob *p, int n, vpx_reader *r) {
|
||||
int i;
|
||||
for (i = 0; i < n; ++i)
|
||||
if (vp9_read(r, MV_UPDATE_PROB))
|
||||
p[i] = (vp9_read_literal(r, 7) << 1) | 1;
|
||||
if (vpx_read(r, MV_UPDATE_PROB))
|
||||
p[i] = (vpx_read_literal(r, 7) << 1) | 1;
|
||||
}
|
||||
|
||||
static void read_mv_probs(nmv_context *ctx, int allow_hp, vp9_reader *r) {
|
||||
static void read_mv_probs(nmv_context *ctx, int allow_hp, vpx_reader *r) {
|
||||
int i, j;
|
||||
|
||||
update_mv_probs(ctx->joints, MV_JOINTS - 1, r);
|
||||
@ -362,7 +362,7 @@ static void inverse_transform_block_intra(MACROBLOCKD* xd, int plane,
|
||||
}
|
||||
|
||||
static void predict_and_reconstruct_intra_block(MACROBLOCKD *const xd,
|
||||
vp9_reader *r,
|
||||
vpx_reader *r,
|
||||
MB_MODE_INFO *const mbmi,
|
||||
int plane,
|
||||
int row, int col,
|
||||
@ -392,7 +392,7 @@ static void predict_and_reconstruct_intra_block(MACROBLOCKD *const xd,
|
||||
}
|
||||
}
|
||||
|
||||
static int reconstruct_inter_block(MACROBLOCKD *const xd, vp9_reader *r,
|
||||
static int reconstruct_inter_block(MACROBLOCKD *const xd, vpx_reader *r,
|
||||
MB_MODE_INFO *const mbmi, int plane,
|
||||
int row, int col, TX_SIZE tx_size) {
|
||||
struct macroblockd_plane *const pd = &xd->plane[plane];
|
||||
@ -812,7 +812,7 @@ static MB_MODE_INFO *set_offsets(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
||||
|
||||
static void decode_block(VP9Decoder *const pbi, MACROBLOCKD *const xd,
|
||||
int mi_row, int mi_col,
|
||||
vp9_reader *r, BLOCK_SIZE bsize,
|
||||
vpx_reader *r, BLOCK_SIZE bsize,
|
||||
int bwl, int bhl) {
|
||||
VP9_COMMON *const cm = &pbi->common;
|
||||
const int less8x8 = bsize < BLOCK_8X8;
|
||||
@ -832,7 +832,7 @@ static void decode_block(VP9Decoder *const pbi, MACROBLOCKD *const xd,
|
||||
VPX_CODEC_CORRUPT_FRAME, "Invalid block size.");
|
||||
}
|
||||
|
||||
vp9_read_mode_info(pbi, xd, mi_row, mi_col, r, x_mis, y_mis);
|
||||
vpx_read_mode_info(pbi, xd, mi_row, mi_col, r, x_mis, y_mis);
|
||||
|
||||
if (mbmi->skip) {
|
||||
dec_reset_skip_context(xd);
|
||||
@ -893,7 +893,7 @@ static void decode_block(VP9Decoder *const pbi, MACROBLOCKD *const xd,
|
||||
}
|
||||
}
|
||||
|
||||
xd->corrupted |= vp9_reader_has_error(r);
|
||||
xd->corrupted |= vpx_reader_has_error(r);
|
||||
}
|
||||
|
||||
static INLINE int dec_partition_plane_context(const MACROBLOCKD *xd,
|
||||
@ -923,19 +923,19 @@ static INLINE void dec_update_partition_context(MACROBLOCKD *xd,
|
||||
}
|
||||
|
||||
static PARTITION_TYPE read_partition(MACROBLOCKD *xd, int mi_row, int mi_col,
|
||||
vp9_reader *r,
|
||||
vpx_reader *r,
|
||||
int has_rows, int has_cols, int bsl) {
|
||||
const int ctx = dec_partition_plane_context(xd, mi_row, mi_col, bsl);
|
||||
const vp9_prob *const probs = get_partition_probs(xd, ctx);
|
||||
const vpx_prob *const probs = get_partition_probs(xd, ctx);
|
||||
FRAME_COUNTS *counts = xd->counts;
|
||||
PARTITION_TYPE p;
|
||||
|
||||
if (has_rows && has_cols)
|
||||
p = (PARTITION_TYPE)vp9_read_tree(r, vp9_partition_tree, probs);
|
||||
p = (PARTITION_TYPE)vpx_read_tree(r, vp9_partition_tree, probs);
|
||||
else if (!has_rows && has_cols)
|
||||
p = vp9_read(r, probs[1]) ? PARTITION_SPLIT : PARTITION_HORZ;
|
||||
p = vpx_read(r, probs[1]) ? PARTITION_SPLIT : PARTITION_HORZ;
|
||||
else if (has_rows && !has_cols)
|
||||
p = vp9_read(r, probs[2]) ? PARTITION_SPLIT : PARTITION_VERT;
|
||||
p = vpx_read(r, probs[2]) ? PARTITION_SPLIT : PARTITION_VERT;
|
||||
else
|
||||
p = PARTITION_SPLIT;
|
||||
|
||||
@ -948,7 +948,7 @@ static PARTITION_TYPE read_partition(MACROBLOCKD *xd, int mi_row, int mi_col,
|
||||
// TODO(slavarnway): eliminate bsize and subsize in future commits
|
||||
static void decode_partition(VP9Decoder *const pbi, MACROBLOCKD *const xd,
|
||||
int mi_row, int mi_col,
|
||||
vp9_reader* r, BLOCK_SIZE bsize, int n4x4_l2) {
|
||||
vpx_reader* r, BLOCK_SIZE bsize, int n4x4_l2) {
|
||||
VP9_COMMON *const cm = &pbi->common;
|
||||
const int n8x8_l2 = n4x4_l2 - 1;
|
||||
const int num_8x8_wh = 1 << n8x8_l2;
|
||||
@ -1008,7 +1008,7 @@ static void setup_token_decoder(const uint8_t *data,
|
||||
const uint8_t *data_end,
|
||||
size_t read_size,
|
||||
struct vpx_internal_error_info *error_info,
|
||||
vp9_reader *r,
|
||||
vpx_reader *r,
|
||||
vpx_decrypt_cb decrypt_cb,
|
||||
void *decrypt_state) {
|
||||
// Validate the calculated partition length. If the buffer
|
||||
@ -1018,16 +1018,16 @@ static void setup_token_decoder(const uint8_t *data,
|
||||
vpx_internal_error(error_info, VPX_CODEC_CORRUPT_FRAME,
|
||||
"Truncated packet or corrupt tile length");
|
||||
|
||||
if (vp9_reader_init(r, data, read_size, decrypt_cb, decrypt_state))
|
||||
if (vpx_reader_init(r, data, read_size, decrypt_cb, decrypt_state))
|
||||
vpx_internal_error(error_info, VPX_CODEC_MEM_ERROR,
|
||||
"Failed to allocate bool decoder %d", 1);
|
||||
}
|
||||
|
||||
static void read_coef_probs_common(vp9_coeff_probs_model *coef_probs,
|
||||
vp9_reader *r) {
|
||||
vpx_reader *r) {
|
||||
int i, j, k, l, m;
|
||||
|
||||
if (vp9_read_bit(r))
|
||||
if (vpx_read_bit(r))
|
||||
for (i = 0; i < PLANE_TYPES; ++i)
|
||||
for (j = 0; j < REF_TYPES; ++j)
|
||||
for (k = 0; k < COEF_BANDS; ++k)
|
||||
@ -1037,7 +1037,7 @@ static void read_coef_probs_common(vp9_coeff_probs_model *coef_probs,
|
||||
}
|
||||
|
||||
static void read_coef_probs(FRAME_CONTEXT *fc, TX_MODE tx_mode,
|
||||
vp9_reader *r) {
|
||||
vpx_reader *r) {
|
||||
const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
|
||||
TX_SIZE tx_size;
|
||||
for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
|
||||
@ -1045,7 +1045,7 @@ static void read_coef_probs(FRAME_CONTEXT *fc, TX_MODE tx_mode,
|
||||
}
|
||||
|
||||
static void setup_segmentation(struct segmentation *seg,
|
||||
struct vp9_read_bit_buffer *rb) {
|
||||
struct vpx_read_bit_buffer *rb) {
|
||||
int i, j;
|
||||
|
||||
seg->update_map = 0;
|
||||
@ -1097,7 +1097,7 @@ static void setup_segmentation(struct segmentation *seg,
|
||||
}
|
||||
|
||||
static void setup_loopfilter(struct loopfilter *lf,
|
||||
struct vp9_read_bit_buffer *rb) {
|
||||
struct vpx_read_bit_buffer *rb) {
|
||||
lf->filter_level = vp9_rb_read_literal(rb, 6);
|
||||
lf->sharpness_level = vp9_rb_read_literal(rb, 3);
|
||||
|
||||
@ -1122,12 +1122,12 @@ static void setup_loopfilter(struct loopfilter *lf,
|
||||
}
|
||||
}
|
||||
|
||||
static INLINE int read_delta_q(struct vp9_read_bit_buffer *rb) {
|
||||
static INLINE int read_delta_q(struct vpx_read_bit_buffer *rb) {
|
||||
return vp9_rb_read_bit(rb) ? vp9_rb_read_signed_literal(rb, 4) : 0;
|
||||
}
|
||||
|
||||
static void setup_quantization(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
||||
struct vp9_read_bit_buffer *rb) {
|
||||
struct vpx_read_bit_buffer *rb) {
|
||||
cm->base_qindex = vp9_rb_read_literal(rb, QINDEX_BITS);
|
||||
cm->y_dc_delta_q = read_delta_q(rb);
|
||||
cm->uv_dc_delta_q = read_delta_q(rb);
|
||||
@ -1170,7 +1170,7 @@ static void setup_segmentation_dequant(VP9_COMMON *const cm) {
|
||||
}
|
||||
}
|
||||
|
||||
static INTERP_FILTER read_interp_filter(struct vp9_read_bit_buffer *rb) {
|
||||
static INTERP_FILTER read_interp_filter(struct vpx_read_bit_buffer *rb) {
|
||||
const INTERP_FILTER literal_to_filter[] = { EIGHTTAP_SMOOTH,
|
||||
EIGHTTAP,
|
||||
EIGHTTAP_SHARP,
|
||||
@ -1179,7 +1179,7 @@ static INTERP_FILTER read_interp_filter(struct vp9_read_bit_buffer *rb) {
|
||||
: literal_to_filter[vp9_rb_read_literal(rb, 2)];
|
||||
}
|
||||
|
||||
static void setup_display_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
|
||||
static void setup_display_size(VP9_COMMON *cm, struct vpx_read_bit_buffer *rb) {
|
||||
cm->display_width = cm->width;
|
||||
cm->display_height = cm->height;
|
||||
if (vp9_rb_read_bit(rb))
|
||||
@ -1226,7 +1226,7 @@ static void resize_context_buffers(VP9_COMMON *cm, int width, int height) {
|
||||
}
|
||||
}
|
||||
|
||||
static void setup_frame_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
|
||||
static void setup_frame_size(VP9_COMMON *cm, struct vpx_read_bit_buffer *rb) {
|
||||
int width, height;
|
||||
BufferPool *const pool = cm->buffer_pool;
|
||||
vp9_read_frame_size(rb, &width, &height);
|
||||
@ -1265,7 +1265,7 @@ static INLINE int valid_ref_frame_img_fmt(vpx_bit_depth_t ref_bit_depth,
|
||||
}
|
||||
|
||||
static void setup_frame_size_with_refs(VP9_COMMON *cm,
|
||||
struct vp9_read_bit_buffer *rb) {
|
||||
struct vpx_read_bit_buffer *rb) {
|
||||
int width, height;
|
||||
int found = 0, i;
|
||||
int has_valid_ref_frame = 0;
|
||||
@ -1337,7 +1337,7 @@ static void setup_frame_size_with_refs(VP9_COMMON *cm,
|
||||
pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
|
||||
}
|
||||
|
||||
static void setup_tile_info(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
|
||||
static void setup_tile_info(VP9_COMMON *cm, struct vpx_read_bit_buffer *rb) {
|
||||
int min_log2_tile_cols, max_log2_tile_cols, max_ones;
|
||||
vp9_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
|
||||
|
||||
@ -1556,7 +1556,7 @@ static const uint8_t *decode_tiles(VP9Decoder *pbi,
|
||||
|
||||
if (pbi->frame_parallel_decode)
|
||||
vp9_frameworker_broadcast(pbi->cur_buf, INT_MAX);
|
||||
return vp9_reader_find_end(&tile_data->bit_reader);
|
||||
return vpx_reader_find_end(&tile_data->bit_reader);
|
||||
}
|
||||
|
||||
static int tile_worker_hook(TileWorkerData *const tile_data,
|
||||
@ -1735,7 +1735,7 @@ static const uint8_t *decode_tiles_mt(VP9Decoder *pbi,
|
||||
if (final_worker > -1) {
|
||||
TileWorkerData *const tile_data =
|
||||
(TileWorkerData*)pbi->tile_workers[final_worker].data1;
|
||||
bit_reader_end = vp9_reader_find_end(&tile_data->bit_reader);
|
||||
bit_reader_end = vpx_reader_find_end(&tile_data->bit_reader);
|
||||
final_worker = -1;
|
||||
}
|
||||
|
||||
@ -1758,7 +1758,7 @@ static void error_handler(void *data) {
|
||||
}
|
||||
|
||||
static void read_bitdepth_colorspace_sampling(
|
||||
VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
|
||||
VP9_COMMON *cm, struct vpx_read_bit_buffer *rb) {
|
||||
if (cm->profile >= PROFILE_2) {
|
||||
cm->bit_depth = vp9_rb_read_bit(rb) ? VPX_BITS_12 : VPX_BITS_10;
|
||||
#if CONFIG_VP9_HIGHBITDEPTH
|
||||
@ -1801,7 +1801,7 @@ static void read_bitdepth_colorspace_sampling(
|
||||
}
|
||||
|
||||
static size_t read_uncompressed_header(VP9Decoder *pbi,
|
||||
struct vp9_read_bit_buffer *rb) {
|
||||
struct vpx_read_bit_buffer *rb) {
|
||||
VP9_COMMON *const cm = &pbi->common;
|
||||
BufferPool *const pool = cm->buffer_pool;
|
||||
RefCntBuffer *const frame_bufs = pool->frame_bufs;
|
||||
@ -2007,10 +2007,10 @@ static int read_compressed_header(VP9Decoder *pbi, const uint8_t *data,
|
||||
VP9_COMMON *const cm = &pbi->common;
|
||||
MACROBLOCKD *const xd = &pbi->mb;
|
||||
FRAME_CONTEXT *const fc = cm->fc;
|
||||
vp9_reader r;
|
||||
vpx_reader r;
|
||||
int k;
|
||||
|
||||
if (vp9_reader_init(&r, data, partition_size, pbi->decrypt_cb,
|
||||
if (vpx_reader_init(&r, data, partition_size, pbi->decrypt_cb,
|
||||
pbi->decrypt_state))
|
||||
vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
|
||||
"Failed to allocate bool decoder 0");
|
||||
@ -2051,7 +2051,7 @@ static int read_compressed_header(VP9Decoder *pbi, const uint8_t *data,
|
||||
read_mv_probs(nmvc, cm->allow_high_precision_mv, &r);
|
||||
}
|
||||
|
||||
return vp9_reader_has_error(&r);
|
||||
return vpx_reader_has_error(&r);
|
||||
}
|
||||
|
||||
#ifdef NDEBUG
|
||||
@ -2091,9 +2091,9 @@ static void debug_check_frame_counts(const VP9_COMMON *const cm) {
|
||||
}
|
||||
#endif // NDEBUG
|
||||
|
||||
static struct vp9_read_bit_buffer *init_read_bit_buffer(
|
||||
static struct vpx_read_bit_buffer *init_read_bit_buffer(
|
||||
VP9Decoder *pbi,
|
||||
struct vp9_read_bit_buffer *rb,
|
||||
struct vpx_read_bit_buffer *rb,
|
||||
const uint8_t *data,
|
||||
const uint8_t *data_end,
|
||||
uint8_t clear_data[MAX_VP9_HEADER_SIZE]) {
|
||||
@ -2114,19 +2114,19 @@ static struct vp9_read_bit_buffer *init_read_bit_buffer(
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
int vp9_read_sync_code(struct vp9_read_bit_buffer *const rb) {
|
||||
int vp9_read_sync_code(struct vpx_read_bit_buffer *const rb) {
|
||||
return vp9_rb_read_literal(rb, 8) == VP9_SYNC_CODE_0 &&
|
||||
vp9_rb_read_literal(rb, 8) == VP9_SYNC_CODE_1 &&
|
||||
vp9_rb_read_literal(rb, 8) == VP9_SYNC_CODE_2;
|
||||
}
|
||||
|
||||
void vp9_read_frame_size(struct vp9_read_bit_buffer *rb,
|
||||
void vp9_read_frame_size(struct vpx_read_bit_buffer *rb,
|
||||
int *width, int *height) {
|
||||
*width = vp9_rb_read_literal(rb, 16) + 1;
|
||||
*height = vp9_rb_read_literal(rb, 16) + 1;
|
||||
}
|
||||
|
||||
BITSTREAM_PROFILE vp9_read_profile(struct vp9_read_bit_buffer *rb) {
|
||||
BITSTREAM_PROFILE vp9_read_profile(struct vpx_read_bit_buffer *rb) {
|
||||
int profile = vp9_rb_read_bit(rb);
|
||||
profile |= vp9_rb_read_bit(rb) << 1;
|
||||
if (profile > 2)
|
||||
@ -2139,7 +2139,7 @@ void vp9_decode_frame(VP9Decoder *pbi,
|
||||
const uint8_t **p_data_end) {
|
||||
VP9_COMMON *const cm = &pbi->common;
|
||||
MACROBLOCKD *const xd = &pbi->mb;
|
||||
struct vp9_read_bit_buffer rb;
|
||||
struct vpx_read_bit_buffer rb;
|
||||
int context_updated = 0;
|
||||
uint8_t clear_data[MAX_VP9_HEADER_SIZE];
|
||||
const size_t first_partition_size = read_uncompressed_header(pbi,
|
||||
|
@ -17,12 +17,12 @@ extern "C" {
|
||||
#endif
|
||||
|
||||
struct VP9Decoder;
|
||||
struct vp9_read_bit_buffer;
|
||||
struct vpx_read_bit_buffer;
|
||||
|
||||
int vp9_read_sync_code(struct vp9_read_bit_buffer *const rb);
|
||||
void vp9_read_frame_size(struct vp9_read_bit_buffer *rb,
|
||||
int vp9_read_sync_code(struct vpx_read_bit_buffer *const rb);
|
||||
void vp9_read_frame_size(struct vpx_read_bit_buffer *rb,
|
||||
int *width, int *height);
|
||||
BITSTREAM_PROFILE vp9_read_profile(struct vp9_read_bit_buffer *rb);
|
||||
BITSTREAM_PROFILE vp9_read_profile(struct vpx_read_bit_buffer *rb);
|
||||
|
||||
void vp9_decode_frame(struct VP9Decoder *pbi,
|
||||
const uint8_t *data, const uint8_t *data_end,
|
||||
|
@ -22,12 +22,12 @@
|
||||
#include "vp9/decoder/vp9_decodemv.h"
|
||||
#include "vp9/decoder/vp9_decodeframe.h"
|
||||
|
||||
static PREDICTION_MODE read_intra_mode(vp9_reader *r, const vp9_prob *p) {
|
||||
return (PREDICTION_MODE)vp9_read_tree(r, vp9_intra_mode_tree, p);
|
||||
static PREDICTION_MODE read_intra_mode(vpx_reader *r, const vpx_prob *p) {
|
||||
return (PREDICTION_MODE)vpx_read_tree(r, vp9_intra_mode_tree, p);
|
||||
}
|
||||
|
||||
static PREDICTION_MODE read_intra_mode_y(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||
vp9_reader *r, int size_group) {
|
||||
vpx_reader *r, int size_group) {
|
||||
const PREDICTION_MODE y_mode =
|
||||
read_intra_mode(r, cm->fc->y_mode_prob[size_group]);
|
||||
FRAME_COUNTS *counts = xd->counts;
|
||||
@ -37,7 +37,7 @@ static PREDICTION_MODE read_intra_mode_y(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||
}
|
||||
|
||||
static PREDICTION_MODE read_intra_mode_uv(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||
vp9_reader *r,
|
||||
vpx_reader *r,
|
||||
PREDICTION_MODE y_mode) {
|
||||
const PREDICTION_MODE uv_mode = read_intra_mode(r,
|
||||
cm->fc->uv_mode_prob[y_mode]);
|
||||
@ -48,8 +48,8 @@ static PREDICTION_MODE read_intra_mode_uv(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||
}
|
||||
|
||||
static PREDICTION_MODE read_inter_mode(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||
vp9_reader *r, int ctx) {
|
||||
const int mode = vp9_read_tree(r, vp9_inter_mode_tree,
|
||||
vpx_reader *r, int ctx) {
|
||||
const int mode = vpx_read_tree(r, vp9_inter_mode_tree,
|
||||
cm->fc->inter_mode_probs[ctx]);
|
||||
FRAME_COUNTS *counts = xd->counts;
|
||||
if (counts)
|
||||
@ -58,20 +58,20 @@ static PREDICTION_MODE read_inter_mode(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||
return NEARESTMV + mode;
|
||||
}
|
||||
|
||||
static int read_segment_id(vp9_reader *r, const struct segmentation *seg) {
|
||||
return vp9_read_tree(r, vp9_segment_tree, seg->tree_probs);
|
||||
static int read_segment_id(vpx_reader *r, const struct segmentation *seg) {
|
||||
return vpx_read_tree(r, vp9_segment_tree, seg->tree_probs);
|
||||
}
|
||||
|
||||
static TX_SIZE read_selected_tx_size(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||
TX_SIZE max_tx_size, vp9_reader *r) {
|
||||
TX_SIZE max_tx_size, vpx_reader *r) {
|
||||
FRAME_COUNTS *counts = xd->counts;
|
||||
const int ctx = get_tx_size_context(xd);
|
||||
const vp9_prob *tx_probs = get_tx_probs(max_tx_size, ctx, &cm->fc->tx_probs);
|
||||
int tx_size = vp9_read(r, tx_probs[0]);
|
||||
const vpx_prob *tx_probs = get_tx_probs(max_tx_size, ctx, &cm->fc->tx_probs);
|
||||
int tx_size = vpx_read(r, tx_probs[0]);
|
||||
if (tx_size != TX_4X4 && max_tx_size >= TX_16X16) {
|
||||
tx_size += vp9_read(r, tx_probs[1]);
|
||||
tx_size += vpx_read(r, tx_probs[1]);
|
||||
if (tx_size != TX_8X8 && max_tx_size >= TX_32X32)
|
||||
tx_size += vp9_read(r, tx_probs[2]);
|
||||
tx_size += vpx_read(r, tx_probs[2]);
|
||||
}
|
||||
|
||||
if (counts)
|
||||
@ -80,7 +80,7 @@ static TX_SIZE read_selected_tx_size(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||
}
|
||||
|
||||
static TX_SIZE read_tx_size(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||
int allow_select, vp9_reader *r) {
|
||||
int allow_select, vpx_reader *r) {
|
||||
TX_MODE tx_mode = cm->tx_mode;
|
||||
BLOCK_SIZE bsize = xd->mi[0]->mbmi.sb_type;
|
||||
const TX_SIZE max_tx_size = max_txsize_lookup[bsize];
|
||||
@ -128,7 +128,7 @@ static void copy_segment_id(const VP9_COMMON *cm,
|
||||
|
||||
static int read_intra_segment_id(VP9_COMMON *const cm, int mi_offset,
|
||||
int x_mis, int y_mis,
|
||||
vp9_reader *r) {
|
||||
vpx_reader *r) {
|
||||
struct segmentation *const seg = &cm->seg;
|
||||
int segment_id;
|
||||
|
||||
@ -147,7 +147,7 @@ static int read_intra_segment_id(VP9_COMMON *const cm, int mi_offset,
|
||||
}
|
||||
|
||||
static int read_inter_segment_id(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
||||
int mi_row, int mi_col, vp9_reader *r) {
|
||||
int mi_row, int mi_col, vpx_reader *r) {
|
||||
struct segmentation *const seg = &cm->seg;
|
||||
MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
|
||||
int predicted_segment_id, segment_id;
|
||||
@ -173,8 +173,8 @@ static int read_inter_segment_id(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
||||
}
|
||||
|
||||
if (seg->temporal_update) {
|
||||
const vp9_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
|
||||
mbmi->seg_id_predicted = vp9_read(r, pred_prob);
|
||||
const vpx_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
|
||||
mbmi->seg_id_predicted = vpx_read(r, pred_prob);
|
||||
segment_id = mbmi->seg_id_predicted ? predicted_segment_id
|
||||
: read_segment_id(r, seg);
|
||||
} else {
|
||||
@ -185,12 +185,12 @@ static int read_inter_segment_id(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
||||
}
|
||||
|
||||
static int read_skip(VP9_COMMON *cm, const MACROBLOCKD *xd,
|
||||
int segment_id, vp9_reader *r) {
|
||||
int segment_id, vpx_reader *r) {
|
||||
if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
|
||||
return 1;
|
||||
} else {
|
||||
const int ctx = vp9_get_skip_context(xd);
|
||||
const int skip = vp9_read(r, cm->fc->skip_probs[ctx]);
|
||||
const int skip = vpx_read(r, cm->fc->skip_probs[ctx]);
|
||||
FRAME_COUNTS *counts = xd->counts;
|
||||
if (counts)
|
||||
++counts->skip[ctx][skip];
|
||||
@ -200,7 +200,7 @@ static int read_skip(VP9_COMMON *cm, const MACROBLOCKD *xd,
|
||||
|
||||
static void read_intra_frame_mode_info(VP9_COMMON *const cm,
|
||||
MACROBLOCKD *const xd,
|
||||
int mi_row, int mi_col, vp9_reader *r) {
|
||||
int mi_row, int mi_col, vpx_reader *r) {
|
||||
MODE_INFO *const mi = xd->mi[0];
|
||||
MB_MODE_INFO *const mbmi = &mi->mbmi;
|
||||
const MODE_INFO *above_mi = xd->above_mi;
|
||||
@ -248,16 +248,16 @@ static void read_intra_frame_mode_info(VP9_COMMON *const cm,
|
||||
mbmi->uv_mode = read_intra_mode(r, vp9_kf_uv_mode_prob[mbmi->mode]);
|
||||
}
|
||||
|
||||
static int read_mv_component(vp9_reader *r,
|
||||
static int read_mv_component(vpx_reader *r,
|
||||
const nmv_component *mvcomp, int usehp) {
|
||||
int mag, d, fr, hp;
|
||||
const int sign = vp9_read(r, mvcomp->sign);
|
||||
const int mv_class = vp9_read_tree(r, vp9_mv_class_tree, mvcomp->classes);
|
||||
const int sign = vpx_read(r, mvcomp->sign);
|
||||
const int mv_class = vpx_read_tree(r, vp9_mv_class_tree, mvcomp->classes);
|
||||
const int class0 = mv_class == MV_CLASS_0;
|
||||
|
||||
// Integer part
|
||||
if (class0) {
|
||||
d = vp9_read_tree(r, vp9_mv_class0_tree, mvcomp->class0);
|
||||
d = vpx_read_tree(r, vp9_mv_class0_tree, mvcomp->class0);
|
||||
mag = 0;
|
||||
} else {
|
||||
int i;
|
||||
@ -265,16 +265,16 @@ static int read_mv_component(vp9_reader *r,
|
||||
|
||||
d = 0;
|
||||
for (i = 0; i < n; ++i)
|
||||
d |= vp9_read(r, mvcomp->bits[i]) << i;
|
||||
d |= vpx_read(r, mvcomp->bits[i]) << i;
|
||||
mag = CLASS0_SIZE << (mv_class + 2);
|
||||
}
|
||||
|
||||
// Fractional part
|
||||
fr = vp9_read_tree(r, vp9_mv_fp_tree, class0 ? mvcomp->class0_fp[d]
|
||||
fr = vpx_read_tree(r, vp9_mv_fp_tree, class0 ? mvcomp->class0_fp[d]
|
||||
: mvcomp->fp);
|
||||
|
||||
// High precision part (if hp is not used, the default value of the hp is 1)
|
||||
hp = usehp ? vp9_read(r, class0 ? mvcomp->class0_hp : mvcomp->hp)
|
||||
hp = usehp ? vpx_read(r, class0 ? mvcomp->class0_hp : mvcomp->hp)
|
||||
: 1;
|
||||
|
||||
// Result
|
||||
@ -282,11 +282,11 @@ static int read_mv_component(vp9_reader *r,
|
||||
return sign ? -mag : mag;
|
||||
}
|
||||
|
||||
static INLINE void read_mv(vp9_reader *r, MV *mv, const MV *ref,
|
||||
static INLINE void read_mv(vpx_reader *r, MV *mv, const MV *ref,
|
||||
const nmv_context *ctx,
|
||||
nmv_context_counts *counts, int allow_hp) {
|
||||
const MV_JOINT_TYPE joint_type =
|
||||
(MV_JOINT_TYPE)vp9_read_tree(r, vp9_mv_joint_tree, ctx->joints);
|
||||
(MV_JOINT_TYPE)vpx_read_tree(r, vp9_mv_joint_tree, ctx->joints);
|
||||
const int use_hp = allow_hp && vp9_use_mv_hp(ref);
|
||||
MV diff = {0, 0};
|
||||
|
||||
@ -304,11 +304,11 @@ static INLINE void read_mv(vp9_reader *r, MV *mv, const MV *ref,
|
||||
|
||||
static REFERENCE_MODE read_block_reference_mode(VP9_COMMON *cm,
|
||||
const MACROBLOCKD *xd,
|
||||
vp9_reader *r) {
|
||||
vpx_reader *r) {
|
||||
if (cm->reference_mode == REFERENCE_MODE_SELECT) {
|
||||
const int ctx = vp9_get_reference_mode_context(cm, xd);
|
||||
const REFERENCE_MODE mode =
|
||||
(REFERENCE_MODE)vp9_read(r, cm->fc->comp_inter_prob[ctx]);
|
||||
(REFERENCE_MODE)vpx_read(r, cm->fc->comp_inter_prob[ctx]);
|
||||
FRAME_COUNTS *counts = xd->counts;
|
||||
if (counts)
|
||||
++counts->comp_inter[ctx][mode];
|
||||
@ -320,7 +320,7 @@ static REFERENCE_MODE read_block_reference_mode(VP9_COMMON *cm,
|
||||
|
||||
// Read the referncence frame
|
||||
static void read_ref_frames(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
||||
vp9_reader *r,
|
||||
vpx_reader *r,
|
||||
int segment_id, MV_REFERENCE_FRAME ref_frame[2]) {
|
||||
FRAME_CONTEXT *const fc = cm->fc;
|
||||
FRAME_COUNTS *counts = xd->counts;
|
||||
@ -335,19 +335,19 @@ static void read_ref_frames(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
||||
if (mode == COMPOUND_REFERENCE) {
|
||||
const int idx = cm->ref_frame_sign_bias[cm->comp_fixed_ref];
|
||||
const int ctx = vp9_get_pred_context_comp_ref_p(cm, xd);
|
||||
const int bit = vp9_read(r, fc->comp_ref_prob[ctx]);
|
||||
const int bit = vpx_read(r, fc->comp_ref_prob[ctx]);
|
||||
if (counts)
|
||||
++counts->comp_ref[ctx][bit];
|
||||
ref_frame[idx] = cm->comp_fixed_ref;
|
||||
ref_frame[!idx] = cm->comp_var_ref[bit];
|
||||
} else if (mode == SINGLE_REFERENCE) {
|
||||
const int ctx0 = vp9_get_pred_context_single_ref_p1(xd);
|
||||
const int bit0 = vp9_read(r, fc->single_ref_prob[ctx0][0]);
|
||||
const int bit0 = vpx_read(r, fc->single_ref_prob[ctx0][0]);
|
||||
if (counts)
|
||||
++counts->single_ref[ctx0][0][bit0];
|
||||
if (bit0) {
|
||||
const int ctx1 = vp9_get_pred_context_single_ref_p2(xd);
|
||||
const int bit1 = vp9_read(r, fc->single_ref_prob[ctx1][1]);
|
||||
const int bit1 = vpx_read(r, fc->single_ref_prob[ctx1][1]);
|
||||
if (counts)
|
||||
++counts->single_ref[ctx1][1][bit1];
|
||||
ref_frame[0] = bit1 ? ALTREF_FRAME : GOLDEN_FRAME;
|
||||
@ -365,10 +365,10 @@ static void read_ref_frames(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
||||
|
||||
static INLINE INTERP_FILTER read_switchable_interp_filter(
|
||||
VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
||||
vp9_reader *r) {
|
||||
vpx_reader *r) {
|
||||
const int ctx = vp9_get_pred_context_switchable_interp(xd);
|
||||
const INTERP_FILTER type =
|
||||
(INTERP_FILTER)vp9_read_tree(r, vp9_switchable_interp_tree,
|
||||
(INTERP_FILTER)vpx_read_tree(r, vp9_switchable_interp_tree,
|
||||
cm->fc->switchable_interp_prob[ctx]);
|
||||
FRAME_COUNTS *counts = xd->counts;
|
||||
if (counts)
|
||||
@ -378,7 +378,7 @@ static INLINE INTERP_FILTER read_switchable_interp_filter(
|
||||
|
||||
static void read_intra_block_mode_info(VP9_COMMON *const cm,
|
||||
MACROBLOCKD *const xd, MODE_INFO *mi,
|
||||
vp9_reader *r) {
|
||||
vpx_reader *r) {
|
||||
MB_MODE_INFO *const mbmi = &mi->mbmi;
|
||||
const BLOCK_SIZE bsize = mi->mbmi.sb_type;
|
||||
int i;
|
||||
@ -420,7 +420,7 @@ static INLINE int assign_mv(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||
PREDICTION_MODE mode,
|
||||
int_mv mv[2], int_mv ref_mv[2],
|
||||
int_mv nearest_mv[2], int_mv near_mv[2],
|
||||
int is_compound, int allow_hp, vp9_reader *r) {
|
||||
int is_compound, int allow_hp, vpx_reader *r) {
|
||||
int i;
|
||||
int ret = 1;
|
||||
|
||||
@ -461,12 +461,12 @@ static INLINE int assign_mv(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||
}
|
||||
|
||||
static int read_is_inter_block(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
||||
int segment_id, vp9_reader *r) {
|
||||
int segment_id, vpx_reader *r) {
|
||||
if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
|
||||
return get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME) != INTRA_FRAME;
|
||||
} else {
|
||||
const int ctx = vp9_get_intra_inter_context(xd);
|
||||
const int is_inter = vp9_read(r, cm->fc->intra_inter_prob[ctx]);
|
||||
const int is_inter = vpx_read(r, cm->fc->intra_inter_prob[ctx]);
|
||||
FRAME_COUNTS *counts = xd->counts;
|
||||
if (counts)
|
||||
++counts->intra_inter[ctx][is_inter];
|
||||
@ -483,7 +483,7 @@ static void fpm_sync(void *const data, int mi_row) {
|
||||
static void read_inter_block_mode_info(VP9Decoder *const pbi,
|
||||
MACROBLOCKD *const xd,
|
||||
MODE_INFO *const mi,
|
||||
int mi_row, int mi_col, vp9_reader *r) {
|
||||
int mi_row, int mi_col, vpx_reader *r) {
|
||||
VP9_COMMON *const cm = &pbi->common;
|
||||
MB_MODE_INFO *const mbmi = &mi->mbmi;
|
||||
const BLOCK_SIZE bsize = mbmi->sb_type;
|
||||
@ -585,7 +585,7 @@ static void read_inter_block_mode_info(VP9Decoder *const pbi,
|
||||
|
||||
static void read_inter_frame_mode_info(VP9Decoder *const pbi,
|
||||
MACROBLOCKD *const xd,
|
||||
int mi_row, int mi_col, vp9_reader *r) {
|
||||
int mi_row, int mi_col, vpx_reader *r) {
|
||||
VP9_COMMON *const cm = &pbi->common;
|
||||
MODE_INFO *const mi = xd->mi[0];
|
||||
MB_MODE_INFO *const mbmi = &mi->mbmi;
|
||||
@ -604,8 +604,8 @@ static void read_inter_frame_mode_info(VP9Decoder *const pbi,
|
||||
read_intra_block_mode_info(cm, xd, mi, r);
|
||||
}
|
||||
|
||||
void vp9_read_mode_info(VP9Decoder *const pbi, MACROBLOCKD *xd,
|
||||
int mi_row, int mi_col, vp9_reader *r,
|
||||
void vpx_read_mode_info(VP9Decoder *const pbi, MACROBLOCKD *xd,
|
||||
int mi_row, int mi_col, vpx_reader *r,
|
||||
int x_mis, int y_mis) {
|
||||
VP9_COMMON *const cm = &pbi->common;
|
||||
MODE_INFO *const mi = xd->mi[0];
|
||||
|
@ -19,8 +19,8 @@
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
void vp9_read_mode_info(VP9Decoder *const pbi, MACROBLOCKD *xd,
|
||||
int mi_row, int mi_col, vp9_reader *r,
|
||||
void vpx_read_mode_info(VP9Decoder *const pbi, MACROBLOCKD *xd,
|
||||
int mi_row, int mi_col, vpx_reader *r,
|
||||
int x_mis, int y_mis);
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
@ -30,7 +30,7 @@ extern "C" {
|
||||
// TODO(hkuang): combine this with TileWorkerData.
|
||||
typedef struct TileData {
|
||||
VP9_COMMON *cm;
|
||||
vp9_reader bit_reader;
|
||||
vpx_reader bit_reader;
|
||||
DECLARE_ALIGNED(16, MACROBLOCKD, xd);
|
||||
/* dqcoeff are shared by all the planes. So planes must be decoded serially */
|
||||
DECLARE_ALIGNED(16, tran_low_t, dqcoeff[32 * 32]);
|
||||
@ -38,7 +38,7 @@ typedef struct TileData {
|
||||
|
||||
typedef struct TileWorkerData {
|
||||
struct VP9Decoder *pbi;
|
||||
vp9_reader bit_reader;
|
||||
vpx_reader bit_reader;
|
||||
FRAME_COUNTS counts;
|
||||
DECLARE_ALIGNED(16, MACROBLOCKD, xd);
|
||||
/* dqcoeff are shared by all the planes. So planes must be decoded serially */
|
||||
|
@ -38,10 +38,10 @@
|
||||
++coef_counts[band][ctx][token]; \
|
||||
} while (0)
|
||||
|
||||
static INLINE int read_coeff(const vp9_prob *probs, int n, vp9_reader *r) {
|
||||
static INLINE int read_coeff(const vpx_prob *probs, int n, vpx_reader *r) {
|
||||
int i, val = 0;
|
||||
for (i = 0; i < n; ++i)
|
||||
val = (val << 1) | vp9_read(r, probs[i]);
|
||||
val = (val << 1) | vpx_read(r, probs[i]);
|
||||
return val;
|
||||
}
|
||||
|
||||
@ -49,15 +49,15 @@ static int decode_coefs(const MACROBLOCKD *xd,
|
||||
PLANE_TYPE type,
|
||||
tran_low_t *dqcoeff, TX_SIZE tx_size, const int16_t *dq,
|
||||
int ctx, const int16_t *scan, const int16_t *nb,
|
||||
vp9_reader *r) {
|
||||
vpx_reader *r) {
|
||||
FRAME_COUNTS *counts = xd->counts;
|
||||
const int max_eob = 16 << (tx_size << 1);
|
||||
const FRAME_CONTEXT *const fc = xd->fc;
|
||||
const int ref = is_inter_block(&xd->mi[0]->mbmi);
|
||||
int band, c = 0;
|
||||
const vp9_prob (*coef_probs)[COEFF_CONTEXTS][UNCONSTRAINED_NODES] =
|
||||
const vpx_prob (*coef_probs)[COEFF_CONTEXTS][UNCONSTRAINED_NODES] =
|
||||
fc->coef_probs[tx_size][type][ref];
|
||||
const vp9_prob *prob;
|
||||
const vpx_prob *prob;
|
||||
unsigned int (*coef_counts)[COEFF_CONTEXTS][UNCONSTRAINED_NODES + 1];
|
||||
unsigned int (*eob_branch_count)[COEFF_CONTEXTS];
|
||||
uint8_t token_cache[32 * 32];
|
||||
@ -117,12 +117,12 @@ static int decode_coefs(const MACROBLOCKD *xd,
|
||||
prob = coef_probs[band][ctx];
|
||||
if (counts)
|
||||
++eob_branch_count[band][ctx];
|
||||
if (!vp9_read(r, prob[EOB_CONTEXT_NODE])) {
|
||||
if (!vpx_read(r, prob[EOB_CONTEXT_NODE])) {
|
||||
INCREMENT_COUNT(EOB_MODEL_TOKEN);
|
||||
break;
|
||||
}
|
||||
|
||||
while (!vp9_read(r, prob[ZERO_CONTEXT_NODE])) {
|
||||
while (!vpx_read(r, prob[ZERO_CONTEXT_NODE])) {
|
||||
INCREMENT_COUNT(ZERO_TOKEN);
|
||||
dqv = dq[1];
|
||||
token_cache[scan[c]] = 0;
|
||||
@ -134,13 +134,13 @@ static int decode_coefs(const MACROBLOCKD *xd,
|
||||
prob = coef_probs[band][ctx];
|
||||
}
|
||||
|
||||
if (!vp9_read(r, prob[ONE_CONTEXT_NODE])) {
|
||||
if (!vpx_read(r, prob[ONE_CONTEXT_NODE])) {
|
||||
INCREMENT_COUNT(ONE_TOKEN);
|
||||
token = ONE_TOKEN;
|
||||
val = 1;
|
||||
} else {
|
||||
INCREMENT_COUNT(TWO_TOKEN);
|
||||
token = vp9_read_tree(r, vp9_coef_con_tree,
|
||||
token = vpx_read_tree(r, vp9_coef_con_tree,
|
||||
vp9_pareto8_full[prob[PIVOT_NODE] - 1]);
|
||||
switch (token) {
|
||||
case TWO_TOKEN:
|
||||
@ -188,13 +188,13 @@ static int decode_coefs(const MACROBLOCKD *xd,
|
||||
v = (val * dqv) >> dq_shift;
|
||||
#if CONFIG_COEFFICIENT_RANGE_CHECKING
|
||||
#if CONFIG_VP9_HIGHBITDEPTH
|
||||
dqcoeff[scan[c]] = highbd_check_range((vp9_read_bit(r) ? -v : v),
|
||||
dqcoeff[scan[c]] = highbd_check_range((vpx_read_bit(r) ? -v : v),
|
||||
xd->bd);
|
||||
#else
|
||||
dqcoeff[scan[c]] = check_range(vp9_read_bit(r) ? -v : v);
|
||||
dqcoeff[scan[c]] = check_range(vpx_read_bit(r) ? -v : v);
|
||||
#endif // CONFIG_VP9_HIGHBITDEPTH
|
||||
#else
|
||||
dqcoeff[scan[c]] = vp9_read_bit(r) ? -v : v;
|
||||
dqcoeff[scan[c]] = vpx_read_bit(r) ? -v : v;
|
||||
#endif // CONFIG_COEFFICIENT_RANGE_CHECKING
|
||||
token_cache[scan[c]] = vp9_pt_energy_class[token];
|
||||
++c;
|
||||
@ -253,7 +253,7 @@ void dec_set_contexts(const MACROBLOCKD *xd, struct macroblockd_plane *pd,
|
||||
int vp9_decode_block_tokens(MACROBLOCKD *xd,
|
||||
int plane, const scan_order *sc,
|
||||
int x, int y,
|
||||
TX_SIZE tx_size, vp9_reader *r,
|
||||
TX_SIZE tx_size, vpx_reader *r,
|
||||
int seg_id) {
|
||||
struct macroblockd_plane *const pd = &xd->plane[plane];
|
||||
const int16_t *const dequant = pd->seg_dequant[seg_id];
|
||||
|
@ -23,7 +23,7 @@ extern "C" {
|
||||
int vp9_decode_block_tokens(MACROBLOCKD *xd,
|
||||
int plane, const scan_order *sc,
|
||||
int x, int y,
|
||||
TX_SIZE tx_size, vp9_reader *r,
|
||||
TX_SIZE tx_size, vpx_reader *r,
|
||||
int seg_id);
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
@ -21,11 +21,11 @@ static int inv_recenter_nonneg(int v, int m) {
|
||||
return (v & 1) ? m - ((v + 1) >> 1) : m + (v >> 1);
|
||||
}
|
||||
|
||||
static int decode_uniform(vp9_reader *r) {
|
||||
static int decode_uniform(vpx_reader *r) {
|
||||
const int l = 8;
|
||||
const int m = (1 << l) - 191;
|
||||
const int v = vp9_read_literal(r, l - 1);
|
||||
return v < m ? v : (v << 1) - m + vp9_read_bit(r);
|
||||
const int v = vpx_read_literal(r, l - 1);
|
||||
return v < m ? v : (v << 1) - m + vpx_read_bit(r);
|
||||
}
|
||||
|
||||
static int inv_remap_prob(int v, int m) {
|
||||
@ -58,19 +58,19 @@ static int inv_remap_prob(int v, int m) {
|
||||
}
|
||||
}
|
||||
|
||||
static int decode_term_subexp(vp9_reader *r) {
|
||||
if (!vp9_read_bit(r))
|
||||
return vp9_read_literal(r, 4);
|
||||
if (!vp9_read_bit(r))
|
||||
return vp9_read_literal(r, 4) + 16;
|
||||
if (!vp9_read_bit(r))
|
||||
return vp9_read_literal(r, 5) + 32;
|
||||
static int decode_term_subexp(vpx_reader *r) {
|
||||
if (!vpx_read_bit(r))
|
||||
return vpx_read_literal(r, 4);
|
||||
if (!vpx_read_bit(r))
|
||||
return vpx_read_literal(r, 4) + 16;
|
||||
if (!vpx_read_bit(r))
|
||||
return vpx_read_literal(r, 5) + 32;
|
||||
return decode_uniform(r) + 64;
|
||||
}
|
||||
|
||||
void vp9_diff_update_prob(vp9_reader *r, vp9_prob* p) {
|
||||
if (vp9_read(r, DIFF_UPDATE_PROB)) {
|
||||
void vp9_diff_update_prob(vpx_reader *r, vpx_prob* p) {
|
||||
if (vpx_read(r, DIFF_UPDATE_PROB)) {
|
||||
const int delp = decode_term_subexp(r);
|
||||
*p = (vp9_prob)inv_remap_prob(delp, *p);
|
||||
*p = (vpx_prob)inv_remap_prob(delp, *p);
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,7 @@
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
void vp9_diff_update_prob(vp9_reader *r, vp9_prob* p);
|
||||
void vp9_diff_update_prob(vpx_reader *r, vpx_prob* p);
|
||||
|
||||
#ifdef __cplusplus
|
||||
} // extern "C"
|
||||
|
@ -45,12 +45,12 @@ static const struct vp9_token inter_mode_encodings[INTER_MODES] =
|
||||
{{2, 2}, {6, 3}, {0, 1}, {7, 3}};
|
||||
|
||||
static void write_intra_mode(vp9_writer *w, PREDICTION_MODE mode,
|
||||
const vp9_prob *probs) {
|
||||
const vpx_prob *probs) {
|
||||
vp9_write_token(w, vp9_intra_mode_tree, probs, &intra_mode_encodings[mode]);
|
||||
}
|
||||
|
||||
static void write_inter_mode(vp9_writer *w, PREDICTION_MODE mode,
|
||||
const vp9_prob *probs) {
|
||||
const vpx_prob *probs) {
|
||||
assert(is_inter_mode(mode));
|
||||
vp9_write_token(w, vp9_inter_mode_tree, probs,
|
||||
&inter_mode_encodings[INTER_OFFSET(mode)]);
|
||||
@ -62,7 +62,7 @@ static void encode_unsigned_max(struct vp9_write_bit_buffer *wb,
|
||||
}
|
||||
|
||||
static void prob_diff_update(const vp9_tree_index *tree,
|
||||
vp9_prob probs[/*n - 1*/],
|
||||
vpx_prob probs[/*n - 1*/],
|
||||
const unsigned int counts[/*n - 1*/],
|
||||
int n, vp9_writer *w) {
|
||||
int i;
|
||||
@ -81,7 +81,7 @@ static void write_selected_tx_size(const VP9_COMMON *cm,
|
||||
TX_SIZE tx_size = xd->mi[0]->mbmi.tx_size;
|
||||
BLOCK_SIZE bsize = xd->mi[0]->mbmi.sb_type;
|
||||
const TX_SIZE max_tx_size = max_txsize_lookup[bsize];
|
||||
const vp9_prob *const tx_probs = get_tx_probs2(max_tx_size, xd,
|
||||
const vpx_prob *const tx_probs = get_tx_probs2(max_tx_size, xd,
|
||||
&cm->fc->tx_probs);
|
||||
vp9_write(w, tx_size != TX_4X4, tx_probs[0]);
|
||||
if (tx_size != TX_4X4 && max_tx_size >= TX_16X16) {
|
||||
@ -254,7 +254,7 @@ static void pack_inter_mode_mvs(VP9_COMP *cpi, const MODE_INFO *mi,
|
||||
if (seg->update_map) {
|
||||
if (seg->temporal_update) {
|
||||
const int pred_flag = mbmi->seg_id_predicted;
|
||||
vp9_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
|
||||
vpx_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
|
||||
vp9_write(w, pred_flag, pred_prob);
|
||||
if (!pred_flag)
|
||||
write_segment_id(w, seg, segment_id);
|
||||
@ -290,7 +290,7 @@ static void pack_inter_mode_mvs(VP9_COMP *cpi, const MODE_INFO *mi,
|
||||
write_intra_mode(w, mbmi->uv_mode, cm->fc->uv_mode_prob[mode]);
|
||||
} else {
|
||||
const int mode_ctx = mbmi_ext->mode_context[mbmi->ref_frame[0]];
|
||||
const vp9_prob *const inter_probs = cm->fc->inter_mode_probs[mode_ctx];
|
||||
const vpx_prob *const inter_probs = cm->fc->inter_mode_probs[mode_ctx];
|
||||
write_ref_frames(cm, xd, w);
|
||||
|
||||
// If segment skip is not enabled code the mode.
|
||||
@ -407,7 +407,7 @@ static void write_partition(const VP9_COMMON *const cm,
|
||||
int hbs, int mi_row, int mi_col,
|
||||
PARTITION_TYPE p, BLOCK_SIZE bsize, vp9_writer *w) {
|
||||
const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
|
||||
const vp9_prob *const probs = xd->partition_probs[ctx];
|
||||
const vpx_prob *const probs = xd->partition_probs[ctx];
|
||||
const int has_rows = (mi_row + hbs) < cm->mi_rows;
|
||||
const int has_cols = (mi_col + hbs) < cm->mi_cols;
|
||||
|
||||
@ -533,7 +533,7 @@ static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi,
|
||||
vp9_coeff_stats *frame_branch_ct,
|
||||
vp9_coeff_probs_model *new_coef_probs) {
|
||||
vp9_coeff_probs_model *old_coef_probs = cpi->common.fc->coef_probs[tx_size];
|
||||
const vp9_prob upd = DIFF_UPDATE_PROB;
|
||||
const vpx_prob upd = DIFF_UPDATE_PROB;
|
||||
const int entropy_nodes_update = UNCONSTRAINED_NODES;
|
||||
int i, j, k, l, t;
|
||||
int stepsize = cpi->sf.coeff_prob_appx_step;
|
||||
@ -548,8 +548,8 @@ static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi,
|
||||
for (k = 0; k < COEF_BANDS; ++k) {
|
||||
for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
|
||||
for (t = 0; t < entropy_nodes_update; ++t) {
|
||||
vp9_prob newp = new_coef_probs[i][j][k][l][t];
|
||||
const vp9_prob oldp = old_coef_probs[i][j][k][l][t];
|
||||
vpx_prob newp = new_coef_probs[i][j][k][l][t];
|
||||
const vpx_prob oldp = old_coef_probs[i][j][k][l][t];
|
||||
int s;
|
||||
int u = 0;
|
||||
if (t == PIVOT_NODE)
|
||||
@ -585,9 +585,9 @@ static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi,
|
||||
for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
|
||||
// calc probs and branch cts for this frame only
|
||||
for (t = 0; t < entropy_nodes_update; ++t) {
|
||||
vp9_prob newp = new_coef_probs[i][j][k][l][t];
|
||||
vp9_prob *oldp = old_coef_probs[i][j][k][l] + t;
|
||||
const vp9_prob upd = DIFF_UPDATE_PROB;
|
||||
vpx_prob newp = new_coef_probs[i][j][k][l][t];
|
||||
vpx_prob *oldp = old_coef_probs[i][j][k][l] + t;
|
||||
const vpx_prob upd = DIFF_UPDATE_PROB;
|
||||
int s;
|
||||
int u = 0;
|
||||
if (t == PIVOT_NODE)
|
||||
@ -623,8 +623,8 @@ static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi,
|
||||
for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
|
||||
// calc probs and branch cts for this frame only
|
||||
for (t = 0; t < entropy_nodes_update; ++t) {
|
||||
vp9_prob newp = new_coef_probs[i][j][k][l][t];
|
||||
vp9_prob *oldp = old_coef_probs[i][j][k][l] + t;
|
||||
vpx_prob newp = new_coef_probs[i][j][k][l][t];
|
||||
vpx_prob *oldp = old_coef_probs[i][j][k][l] + t;
|
||||
int s;
|
||||
int u = 0;
|
||||
|
||||
|
@ -35,9 +35,9 @@ const unsigned int vp9_prob_cost[256] = {
|
||||
22, 21, 19, 18, 16, 15, 13, 12, 10, 9, 7, 6,
|
||||
4, 3, 1, 1};
|
||||
|
||||
static void cost(int *costs, vp9_tree tree, const vp9_prob *probs,
|
||||
static void cost(int *costs, vp9_tree tree, const vpx_prob *probs,
|
||||
int i, int c) {
|
||||
const vp9_prob prob = probs[i / 2];
|
||||
const vpx_prob prob = probs[i / 2];
|
||||
int b;
|
||||
|
||||
for (b = 0; b <= 1; ++b) {
|
||||
@ -51,11 +51,11 @@ static void cost(int *costs, vp9_tree tree, const vp9_prob *probs,
|
||||
}
|
||||
}
|
||||
|
||||
void vp9_cost_tokens(int *costs, const vp9_prob *probs, vp9_tree tree) {
|
||||
void vp9_cost_tokens(int *costs, const vpx_prob *probs, vp9_tree tree) {
|
||||
cost(costs, tree, probs, 0, 0);
|
||||
}
|
||||
|
||||
void vp9_cost_tokens_skip(int *costs, const vp9_prob *probs, vp9_tree tree) {
|
||||
void vp9_cost_tokens_skip(int *costs, const vpx_prob *probs, vp9_tree tree) {
|
||||
assert(tree[0] <= 0 && tree[1] > 0);
|
||||
|
||||
costs[-tree[0]] = vp9_cost_bit(probs[0], 0);
|
||||
|
@ -27,11 +27,11 @@ extern const unsigned int vp9_prob_cost[256];
|
||||
: (prob))
|
||||
|
||||
static INLINE unsigned int cost_branch256(const unsigned int ct[2],
|
||||
vp9_prob p) {
|
||||
vpx_prob p) {
|
||||
return ct[0] * vp9_cost_zero(p) + ct[1] * vp9_cost_one(p);
|
||||
}
|
||||
|
||||
static INLINE int treed_cost(vp9_tree tree, const vp9_prob *probs,
|
||||
static INLINE int treed_cost(vp9_tree tree, const vpx_prob *probs,
|
||||
int bits, int len) {
|
||||
int cost = 0;
|
||||
vp9_tree_index i = 0;
|
||||
@ -45,8 +45,8 @@ static INLINE int treed_cost(vp9_tree tree, const vp9_prob *probs,
|
||||
return cost;
|
||||
}
|
||||
|
||||
void vp9_cost_tokens(int *costs, const vp9_prob *probs, vp9_tree tree);
|
||||
void vp9_cost_tokens_skip(int *costs, const vp9_prob *probs, vp9_tree tree);
|
||||
void vp9_cost_tokens(int *costs, const vpx_prob *probs, vp9_tree tree);
|
||||
void vp9_cost_tokens_skip(int *costs, const vpx_prob *probs, vp9_tree tree);
|
||||
|
||||
#ifdef __cplusplus
|
||||
} // extern "C"
|
||||
|
@ -133,9 +133,9 @@ static void build_nmv_component_cost_table(int *mvcost,
|
||||
}
|
||||
}
|
||||
|
||||
static int update_mv(vp9_writer *w, const unsigned int ct[2], vp9_prob *cur_p,
|
||||
vp9_prob upd_p) {
|
||||
const vp9_prob new_p = get_binary_prob(ct[0], ct[1]) | 1;
|
||||
static int update_mv(vp9_writer *w, const unsigned int ct[2], vpx_prob *cur_p,
|
||||
vpx_prob upd_p) {
|
||||
const vpx_prob new_p = get_binary_prob(ct[0], ct[1]) | 1;
|
||||
const int update = cost_branch256(ct, *cur_p) + vp9_cost_zero(upd_p) >
|
||||
cost_branch256(ct, new_p) + vp9_cost_one(upd_p) + 7 * 256;
|
||||
vp9_write(w, update, upd_p);
|
||||
@ -147,7 +147,7 @@ static int update_mv(vp9_writer *w, const unsigned int ct[2], vp9_prob *cur_p,
|
||||
}
|
||||
|
||||
static void write_mv_update(const vp9_tree_index *tree,
|
||||
vp9_prob probs[/*n - 1*/],
|
||||
vpx_prob probs[/*n - 1*/],
|
||||
const unsigned int counts[/*n - 1*/],
|
||||
int n, vp9_writer *w) {
|
||||
int i;
|
||||
|
@ -55,7 +55,7 @@ typedef struct {
|
||||
int nmvcosts[2][MV_VALS];
|
||||
int nmvcosts_hp[2][MV_VALS];
|
||||
|
||||
vp9_prob segment_pred_probs[PREDICTION_PROBS];
|
||||
vpx_prob segment_pred_probs[PREDICTION_PROBS];
|
||||
|
||||
unsigned char *last_frame_seg_map_copy;
|
||||
|
||||
|
@ -1019,9 +1019,9 @@ void vp9_pick_intra_mode(VP9_COMP *cpi, MACROBLOCK *x, RD_COST *rd_cost,
|
||||
static void init_ref_frame_cost(VP9_COMMON *const cm,
|
||||
MACROBLOCKD *const xd,
|
||||
int ref_frame_cost[MAX_REF_FRAMES]) {
|
||||
vp9_prob intra_inter_p = vp9_get_intra_inter_prob(cm, xd);
|
||||
vp9_prob ref_single_p1 = vp9_get_pred_prob_single_ref_p1(cm, xd);
|
||||
vp9_prob ref_single_p2 = vp9_get_pred_prob_single_ref_p2(cm, xd);
|
||||
vpx_prob intra_inter_p = vp9_get_intra_inter_prob(cm, xd);
|
||||
vpx_prob ref_single_p1 = vp9_get_pred_prob_single_ref_p1(cm, xd);
|
||||
vpx_prob ref_single_p2 = vp9_get_pred_prob_single_ref_p2(cm, xd);
|
||||
|
||||
ref_frame_cost[INTRA_FRAME] = vp9_cost_bit(intra_inter_p, 0);
|
||||
ref_frame_cost[LAST_FRAME] = ref_frame_cost[GOLDEN_FRAME] =
|
||||
|
@ -93,7 +93,7 @@ static void fill_token_costs(vp9_coeff_cost *c,
|
||||
for (j = 0; j < REF_TYPES; ++j)
|
||||
for (k = 0; k < COEF_BANDS; ++k)
|
||||
for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
|
||||
vp9_prob probs[ENTROPY_NODES];
|
||||
vpx_prob probs[ENTROPY_NODES];
|
||||
vp9_model_to_full_probs(p[t][i][j][k][l], probs);
|
||||
vp9_cost_tokens((int *)c[t][i][j][k][0][l], probs,
|
||||
vp9_coef_tree);
|
||||
|
@ -641,7 +641,7 @@ static void choose_tx_size_from_rd(VP9_COMP *cpi, MACROBLOCK *x,
|
||||
VP9_COMMON *const cm = &cpi->common;
|
||||
MACROBLOCKD *const xd = &x->e_mbd;
|
||||
MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
|
||||
vp9_prob skip_prob = vp9_get_skip_prob(cm, xd);
|
||||
vpx_prob skip_prob = vp9_get_skip_prob(cm, xd);
|
||||
int r[TX_SIZES][2], s[TX_SIZES];
|
||||
int64_t d[TX_SIZES], sse[TX_SIZES];
|
||||
int64_t rd[TX_SIZES][2] = {{INT64_MAX, INT64_MAX},
|
||||
@ -654,7 +654,7 @@ static void choose_tx_size_from_rd(VP9_COMP *cpi, MACROBLOCK *x,
|
||||
int64_t best_rd = INT64_MAX;
|
||||
TX_SIZE best_tx = max_tx_size;
|
||||
|
||||
const vp9_prob *tx_probs = get_tx_probs2(max_tx_size, xd, &cm->fc->tx_probs);
|
||||
const vpx_prob *tx_probs = get_tx_probs2(max_tx_size, xd, &cm->fc->tx_probs);
|
||||
assert(skip_prob > 0);
|
||||
s0 = vp9_cost_bit(skip_prob, 0);
|
||||
s1 = vp9_cost_bit(skip_prob, 1);
|
||||
@ -2118,7 +2118,7 @@ static void estimate_ref_frame_costs(const VP9_COMMON *cm,
|
||||
int segment_id,
|
||||
unsigned int *ref_costs_single,
|
||||
unsigned int *ref_costs_comp,
|
||||
vp9_prob *comp_mode_p) {
|
||||
vpx_prob *comp_mode_p) {
|
||||
int seg_ref_active = segfeature_active(&cm->seg, segment_id,
|
||||
SEG_LVL_REF_FRAME);
|
||||
if (seg_ref_active) {
|
||||
@ -2126,8 +2126,8 @@ static void estimate_ref_frame_costs(const VP9_COMMON *cm,
|
||||
memset(ref_costs_comp, 0, MAX_REF_FRAMES * sizeof(*ref_costs_comp));
|
||||
*comp_mode_p = 128;
|
||||
} else {
|
||||
vp9_prob intra_inter_p = vp9_get_intra_inter_prob(cm, xd);
|
||||
vp9_prob comp_inter_p = 128;
|
||||
vpx_prob intra_inter_p = vp9_get_intra_inter_prob(cm, xd);
|
||||
vpx_prob comp_inter_p = 128;
|
||||
|
||||
if (cm->reference_mode == REFERENCE_MODE_SELECT) {
|
||||
comp_inter_p = vp9_get_reference_mode_prob(cm, xd);
|
||||
@ -2139,8 +2139,8 @@ static void estimate_ref_frame_costs(const VP9_COMMON *cm,
|
||||
ref_costs_single[INTRA_FRAME] = vp9_cost_bit(intra_inter_p, 0);
|
||||
|
||||
if (cm->reference_mode != COMPOUND_REFERENCE) {
|
||||
vp9_prob ref_single_p1 = vp9_get_pred_prob_single_ref_p1(cm, xd);
|
||||
vp9_prob ref_single_p2 = vp9_get_pred_prob_single_ref_p2(cm, xd);
|
||||
vpx_prob ref_single_p1 = vp9_get_pred_prob_single_ref_p1(cm, xd);
|
||||
vpx_prob ref_single_p2 = vp9_get_pred_prob_single_ref_p2(cm, xd);
|
||||
unsigned int base_cost = vp9_cost_bit(intra_inter_p, 1);
|
||||
|
||||
if (cm->reference_mode == REFERENCE_MODE_SELECT)
|
||||
@ -2159,7 +2159,7 @@ static void estimate_ref_frame_costs(const VP9_COMMON *cm,
|
||||
ref_costs_single[ALTREF_FRAME] = 512;
|
||||
}
|
||||
if (cm->reference_mode != SINGLE_REFERENCE) {
|
||||
vp9_prob ref_comp_p = vp9_get_pred_prob_comp_ref_p(cm, xd);
|
||||
vpx_prob ref_comp_p = vp9_get_pred_prob_comp_ref_p(cm, xd);
|
||||
unsigned int base_cost = vp9_cost_bit(intra_inter_p, 1);
|
||||
|
||||
if (cm->reference_mode == REFERENCE_MODE_SELECT)
|
||||
@ -3003,7 +3003,7 @@ void vp9_rd_pick_inter_mode_sb(VP9_COMP *cpi,
|
||||
int best_mode_skippable = 0;
|
||||
int midx, best_mode_index = -1;
|
||||
unsigned int ref_costs_single[MAX_REF_FRAMES], ref_costs_comp[MAX_REF_FRAMES];
|
||||
vp9_prob comp_mode_p;
|
||||
vpx_prob comp_mode_p;
|
||||
int64_t best_intra_rd = INT64_MAX;
|
||||
unsigned int best_pred_sse = UINT_MAX;
|
||||
PREDICTION_MODE best_intra_mode = DC_PRED;
|
||||
@ -3696,7 +3696,7 @@ void vp9_rd_pick_inter_mode_sb_seg_skip(VP9_COMP *cpi,
|
||||
int64_t best_pred_diff[REFERENCE_MODES];
|
||||
int64_t best_filter_diff[SWITCHABLE_FILTER_CONTEXTS];
|
||||
unsigned int ref_costs_single[MAX_REF_FRAMES], ref_costs_comp[MAX_REF_FRAMES];
|
||||
vp9_prob comp_mode_p;
|
||||
vpx_prob comp_mode_p;
|
||||
INTERP_FILTER best_filter = SWITCHABLE;
|
||||
int64_t this_rd = INT64_MAX;
|
||||
int rate2 = 0;
|
||||
@ -3812,7 +3812,7 @@ void vp9_rd_pick_inter_mode_sub8x8(VP9_COMP *cpi,
|
||||
MB_MODE_INFO best_mbmode;
|
||||
int ref_index, best_ref_index = 0;
|
||||
unsigned int ref_costs_single[MAX_REF_FRAMES], ref_costs_comp[MAX_REF_FRAMES];
|
||||
vp9_prob comp_mode_p;
|
||||
vpx_prob comp_mode_p;
|
||||
INTERP_FILTER tmp_best_filter = SWITCHABLE;
|
||||
int rate_uv_intra, rate_uv_tokenonly;
|
||||
int64_t dist_uv;
|
||||
|
@ -49,7 +49,7 @@ void vp9_clear_segdata(struct segmentation *seg, int segment_id,
|
||||
}
|
||||
|
||||
// Based on set of segment counts calculate a probability tree
|
||||
static void calc_segtree_probs(int *segcounts, vp9_prob *segment_tree_probs) {
|
||||
static void calc_segtree_probs(int *segcounts, vpx_prob *segment_tree_probs) {
|
||||
// Work out probabilities of each segment
|
||||
const int c01 = segcounts[0] + segcounts[1];
|
||||
const int c23 = segcounts[2] + segcounts[3];
|
||||
@ -66,7 +66,7 @@ static void calc_segtree_probs(int *segcounts, vp9_prob *segment_tree_probs) {
|
||||
}
|
||||
|
||||
// Based on set of segment counts and probabilities calculate a cost estimate
|
||||
static int cost_segmap(int *segcounts, vp9_prob *probs) {
|
||||
static int cost_segmap(int *segcounts, vpx_prob *probs) {
|
||||
const int c01 = segcounts[0] + segcounts[1];
|
||||
const int c23 = segcounts[2] + segcounts[3];
|
||||
const int c45 = segcounts[4] + segcounts[5];
|
||||
@ -207,9 +207,9 @@ void vp9_choose_segmap_coding_method(VP9_COMMON *cm, MACROBLOCKD *xd) {
|
||||
int no_pred_segcounts[MAX_SEGMENTS] = { 0 };
|
||||
int t_unpred_seg_counts[MAX_SEGMENTS] = { 0 };
|
||||
|
||||
vp9_prob no_pred_tree[SEG_TREE_PROBS];
|
||||
vp9_prob t_pred_tree[SEG_TREE_PROBS];
|
||||
vp9_prob t_nopred_prob[PREDICTION_PROBS];
|
||||
vpx_prob no_pred_tree[SEG_TREE_PROBS];
|
||||
vpx_prob t_pred_tree[SEG_TREE_PROBS];
|
||||
vpx_prob t_nopred_prob[PREDICTION_PROBS];
|
||||
|
||||
// Set default state for the segment tree probabilities and the
|
||||
// temporal coding probabilities
|
||||
|
@ -78,7 +78,7 @@ static int remap_prob(int v, int m) {
|
||||
return i;
|
||||
}
|
||||
|
||||
static int prob_diff_update_cost(vp9_prob newp, vp9_prob oldp) {
|
||||
static int prob_diff_update_cost(vpx_prob newp, vpx_prob oldp) {
|
||||
int delp = remap_prob(newp, oldp);
|
||||
return update_bits[delp] * 256;
|
||||
}
|
||||
@ -111,17 +111,17 @@ static void encode_term_subexp(vp9_writer *w, int word) {
|
||||
}
|
||||
}
|
||||
|
||||
void vp9_write_prob_diff_update(vp9_writer *w, vp9_prob newp, vp9_prob oldp) {
|
||||
void vp9_write_prob_diff_update(vp9_writer *w, vpx_prob newp, vpx_prob oldp) {
|
||||
const int delp = remap_prob(newp, oldp);
|
||||
encode_term_subexp(w, delp);
|
||||
}
|
||||
|
||||
int vp9_prob_diff_update_savings_search(const unsigned int *ct,
|
||||
vp9_prob oldp, vp9_prob *bestp,
|
||||
vp9_prob upd) {
|
||||
vpx_prob oldp, vpx_prob *bestp,
|
||||
vpx_prob upd) {
|
||||
const int old_b = cost_branch256(ct, oldp);
|
||||
int bestsavings = 0;
|
||||
vp9_prob newp, bestnewp = oldp;
|
||||
vpx_prob newp, bestnewp = oldp;
|
||||
const int step = *bestp > oldp ? -1 : 1;
|
||||
|
||||
for (newp = *bestp; newp != oldp; newp += step) {
|
||||
@ -138,15 +138,15 @@ int vp9_prob_diff_update_savings_search(const unsigned int *ct,
|
||||
}
|
||||
|
||||
int vp9_prob_diff_update_savings_search_model(const unsigned int *ct,
|
||||
const vp9_prob *oldp,
|
||||
vp9_prob *bestp,
|
||||
vp9_prob upd,
|
||||
const vpx_prob *oldp,
|
||||
vpx_prob *bestp,
|
||||
vpx_prob upd,
|
||||
int stepsize) {
|
||||
int i, old_b, new_b, update_b, savings, bestsavings, step;
|
||||
int newp;
|
||||
vp9_prob bestnewp, newplist[ENTROPY_NODES], oldplist[ENTROPY_NODES];
|
||||
vpx_prob bestnewp, newplist[ENTROPY_NODES], oldplist[ENTROPY_NODES];
|
||||
vp9_model_to_full_probs(oldp, oldplist);
|
||||
memcpy(newplist, oldp, sizeof(vp9_prob) * UNCONSTRAINED_NODES);
|
||||
memcpy(newplist, oldp, sizeof(vpx_prob) * UNCONSTRAINED_NODES);
|
||||
for (i = UNCONSTRAINED_NODES, old_b = 0; i < ENTROPY_NODES; ++i)
|
||||
old_b += cost_branch256(ct + 2 * i, oldplist[i]);
|
||||
old_b += cost_branch256(ct + 2 * PIVOT_NODE, oldplist[PIVOT_NODE]);
|
||||
@ -196,10 +196,10 @@ int vp9_prob_diff_update_savings_search_model(const unsigned int *ct,
|
||||
return bestsavings;
|
||||
}
|
||||
|
||||
void vp9_cond_prob_diff_update(vp9_writer *w, vp9_prob *oldp,
|
||||
void vp9_cond_prob_diff_update(vp9_writer *w, vpx_prob *oldp,
|
||||
const unsigned int ct[2]) {
|
||||
const vp9_prob upd = DIFF_UPDATE_PROB;
|
||||
vp9_prob newp = get_binary_prob(ct[0], ct[1]);
|
||||
const vpx_prob upd = DIFF_UPDATE_PROB;
|
||||
vpx_prob newp = get_binary_prob(ct[0], ct[1]);
|
||||
const int savings = vp9_prob_diff_update_savings_search(ct, *oldp, &newp,
|
||||
upd);
|
||||
assert(newp >= 1);
|
||||
|
@ -21,20 +21,20 @@ extern "C" {
|
||||
struct vp9_writer;
|
||||
|
||||
void vp9_write_prob_diff_update(struct vp9_writer *w,
|
||||
vp9_prob newp, vp9_prob oldp);
|
||||
vpx_prob newp, vpx_prob oldp);
|
||||
|
||||
void vp9_cond_prob_diff_update(struct vp9_writer *w, vp9_prob *oldp,
|
||||
void vp9_cond_prob_diff_update(struct vp9_writer *w, vpx_prob *oldp,
|
||||
const unsigned int ct[2]);
|
||||
|
||||
int vp9_prob_diff_update_savings_search(const unsigned int *ct,
|
||||
vp9_prob oldp, vp9_prob *bestp,
|
||||
vp9_prob upd);
|
||||
vpx_prob oldp, vpx_prob *bestp,
|
||||
vpx_prob upd);
|
||||
|
||||
|
||||
int vp9_prob_diff_update_savings_search_model(const unsigned int *ct,
|
||||
const vp9_prob *oldp,
|
||||
vp9_prob *bestp,
|
||||
vp9_prob upd,
|
||||
const vpx_prob *oldp,
|
||||
vpx_prob *bestp,
|
||||
vpx_prob upd,
|
||||
int stepsize);
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
@ -457,7 +457,7 @@ static void set_entropy_context_b(int plane, int block, BLOCK_SIZE plane_bsize,
|
||||
aoff, loff);
|
||||
}
|
||||
|
||||
static INLINE void add_token(TOKENEXTRA **t, const vp9_prob *context_tree,
|
||||
static INLINE void add_token(TOKENEXTRA **t, const vpx_prob *context_tree,
|
||||
int32_t extra, uint8_t token,
|
||||
uint8_t skip_eob_node,
|
||||
unsigned int *counts) {
|
||||
@ -470,7 +470,7 @@ static INLINE void add_token(TOKENEXTRA **t, const vp9_prob *context_tree,
|
||||
}
|
||||
|
||||
static INLINE void add_token_no_extra(TOKENEXTRA **t,
|
||||
const vp9_prob *context_tree,
|
||||
const vpx_prob *context_tree,
|
||||
uint8_t token,
|
||||
uint8_t skip_eob_node,
|
||||
unsigned int *counts) {
|
||||
@ -511,7 +511,7 @@ static void tokenize_b(int plane, int block, BLOCK_SIZE plane_bsize,
|
||||
const int ref = is_inter_block(mbmi);
|
||||
unsigned int (*const counts)[COEFF_CONTEXTS][ENTROPY_TOKENS] =
|
||||
td->rd_counts.coef_counts[tx_size][type][ref];
|
||||
vp9_prob (*const coef_probs)[COEFF_CONTEXTS][UNCONSTRAINED_NODES] =
|
||||
vpx_prob (*const coef_probs)[COEFF_CONTEXTS][UNCONSTRAINED_NODES] =
|
||||
cpi->common.fc->coef_probs[tx_size][type][ref];
|
||||
unsigned int (*const eob_branch)[COEFF_CONTEXTS] =
|
||||
td->counts->eob_branch[tx_size][type][ref];
|
||||
|
@ -35,7 +35,7 @@ typedef struct {
|
||||
} TOKENVALUE;
|
||||
|
||||
typedef struct {
|
||||
const vp9_prob *context_tree;
|
||||
const vpx_prob *context_tree;
|
||||
EXTRABIT extra;
|
||||
uint8_t token;
|
||||
uint8_t skip_eob_node;
|
||||
|
@ -29,7 +29,7 @@ struct vp9_token {
|
||||
void vp9_tokens_from_tree(struct vp9_token*, const vp9_tree_index *);
|
||||
|
||||
static INLINE void vp9_write_tree(vp9_writer *w, const vp9_tree_index *tree,
|
||||
const vp9_prob *probs, int bits, int len,
|
||||
const vpx_prob *probs, int bits, int len,
|
||||
vp9_tree_index i) {
|
||||
do {
|
||||
const int bit = (bits >> --len) & 1;
|
||||
@ -39,7 +39,7 @@ static INLINE void vp9_write_tree(vp9_writer *w, const vp9_tree_index *tree,
|
||||
}
|
||||
|
||||
static INLINE void vp9_write_token(vp9_writer *w, const vp9_tree_index *tree,
|
||||
const vp9_prob *probs,
|
||||
const vpx_prob *probs,
|
||||
const struct vp9_token *token) {
|
||||
vp9_write_tree(w, tree, probs, token->value, token->len, 0);
|
||||
}
|
||||
|
@ -145,7 +145,7 @@ static vpx_codec_err_t decoder_destroy(vpx_codec_alg_priv_t *ctx) {
|
||||
}
|
||||
|
||||
static int parse_bitdepth_colorspace_sampling(
|
||||
BITSTREAM_PROFILE profile, struct vp9_read_bit_buffer *rb) {
|
||||
BITSTREAM_PROFILE profile, struct vpx_read_bit_buffer *rb) {
|
||||
vpx_color_space_t color_space;
|
||||
if (profile >= PROFILE_2)
|
||||
rb->bit_offset += 1; // Bit-depth 10 or 12.
|
||||
@ -191,7 +191,7 @@ static vpx_codec_err_t decoder_peek_si_internal(const uint8_t *data,
|
||||
{
|
||||
int show_frame;
|
||||
int error_resilient;
|
||||
struct vp9_read_bit_buffer rb = { data, data + data_sz, 0, NULL, NULL };
|
||||
struct vpx_read_bit_buffer rb = { data, data + data_sz, 0, NULL, NULL };
|
||||
const int frame_marker = vp9_rb_read_literal(&rb, 2);
|
||||
const BITSTREAM_PROFILE profile = vp9_read_profile(&rb);
|
||||
|
||||
|
@ -13,7 +13,7 @@
|
||||
#include "vpx_ports/mem.h"
|
||||
#include "vpx_mem/vpx_mem.h"
|
||||
|
||||
int vp9_reader_init(vp9_reader *r,
|
||||
int vpx_reader_init(vpx_reader *r,
|
||||
const uint8_t *buffer,
|
||||
size_t size,
|
||||
vpx_decrypt_cb decrypt_cb,
|
||||
@ -28,12 +28,12 @@ int vp9_reader_init(vp9_reader *r,
|
||||
r->range = 255;
|
||||
r->decrypt_cb = decrypt_cb;
|
||||
r->decrypt_state = decrypt_state;
|
||||
vp9_reader_fill(r);
|
||||
return vp9_read_bit(r) != 0; // marker bit
|
||||
vpx_reader_fill(r);
|
||||
return vpx_read_bit(r) != 0; // marker bit
|
||||
}
|
||||
}
|
||||
|
||||
void vp9_reader_fill(vp9_reader *r) {
|
||||
void vpx_reader_fill(vpx_reader *r) {
|
||||
const uint8_t *const buffer_end = r->buffer_end;
|
||||
const uint8_t *buffer = r->buffer;
|
||||
const uint8_t *buffer_start = buffer;
|
||||
@ -73,7 +73,7 @@ void vp9_reader_fill(vp9_reader *r) {
|
||||
r->count = count;
|
||||
}
|
||||
|
||||
const uint8_t *vp9_reader_find_end(vp9_reader *r) {
|
||||
const uint8_t *vpx_reader_find_end(vpx_reader *r) {
|
||||
// Find the end of the coded buffer
|
||||
while (r->count > CHAR_BIT && r->count < BD_VALUE_SIZE) {
|
||||
r->count -= CHAR_BIT;
|
||||
|
@ -43,19 +43,19 @@ typedef struct {
|
||||
vpx_decrypt_cb decrypt_cb;
|
||||
void *decrypt_state;
|
||||
uint8_t clear_buffer[sizeof(BD_VALUE) + 1];
|
||||
} vp9_reader;
|
||||
} vpx_reader;
|
||||
|
||||
int vp9_reader_init(vp9_reader *r,
|
||||
int vpx_reader_init(vpx_reader *r,
|
||||
const uint8_t *buffer,
|
||||
size_t size,
|
||||
vpx_decrypt_cb decrypt_cb,
|
||||
void *decrypt_state);
|
||||
|
||||
void vp9_reader_fill(vp9_reader *r);
|
||||
void vpx_reader_fill(vpx_reader *r);
|
||||
|
||||
const uint8_t *vp9_reader_find_end(vp9_reader *r);
|
||||
const uint8_t *vpx_reader_find_end(vpx_reader *r);
|
||||
|
||||
static INLINE int vp9_reader_has_error(vp9_reader *r) {
|
||||
static INLINE int vpx_reader_has_error(vpx_reader *r) {
|
||||
// Check if we have reached the end of the buffer.
|
||||
//
|
||||
// Variable 'count' stores the number of bits in the 'value' buffer, minus
|
||||
@ -73,7 +73,7 @@ static INLINE int vp9_reader_has_error(vp9_reader *r) {
|
||||
return r->count > BD_VALUE_SIZE && r->count < LOTS_OF_BITS;
|
||||
}
|
||||
|
||||
static INLINE int vp9_read(vp9_reader *r, int prob) {
|
||||
static INLINE int vpx_read(vpx_reader *r, int prob) {
|
||||
unsigned int bit = 0;
|
||||
BD_VALUE value;
|
||||
BD_VALUE bigsplit;
|
||||
@ -82,7 +82,7 @@ static INLINE int vp9_read(vp9_reader *r, int prob) {
|
||||
unsigned int split = (r->range * prob + (256 - prob)) >> CHAR_BIT;
|
||||
|
||||
if (r->count < 0)
|
||||
vp9_reader_fill(r);
|
||||
vpx_reader_fill(r);
|
||||
|
||||
value = r->value;
|
||||
count = r->count;
|
||||
@ -110,24 +110,24 @@ static INLINE int vp9_read(vp9_reader *r, int prob) {
|
||||
return bit;
|
||||
}
|
||||
|
||||
static INLINE int vp9_read_bit(vp9_reader *r) {
|
||||
return vp9_read(r, 128); // vp9_prob_half
|
||||
static INLINE int vpx_read_bit(vpx_reader *r) {
|
||||
return vpx_read(r, 128); // vpx_prob_half
|
||||
}
|
||||
|
||||
static INLINE int vp9_read_literal(vp9_reader *r, int bits) {
|
||||
static INLINE int vpx_read_literal(vpx_reader *r, int bits) {
|
||||
int literal = 0, bit;
|
||||
|
||||
for (bit = bits - 1; bit >= 0; bit--)
|
||||
literal |= vp9_read_bit(r) << bit;
|
||||
literal |= vpx_read_bit(r) << bit;
|
||||
|
||||
return literal;
|
||||
}
|
||||
|
||||
static INLINE int vp9_read_tree(vp9_reader *r, const vp9_tree_index *tree,
|
||||
const vp9_prob *probs) {
|
||||
static INLINE int vpx_read_tree(vpx_reader *r, const vp9_tree_index *tree,
|
||||
const vpx_prob *probs) {
|
||||
vp9_tree_index i = 0;
|
||||
|
||||
while ((i = tree[i + vp9_read(r, probs[i >> 1])]) > 0)
|
||||
while ((i = tree[i + vpx_read(r, probs[i >> 1])]) > 0)
|
||||
continue;
|
||||
|
||||
return -i;
|
||||
|
@ -9,11 +9,11 @@
|
||||
*/
|
||||
#include "./bitreader_buffer.h"
|
||||
|
||||
size_t vp9_rb_bytes_read(struct vp9_read_bit_buffer *rb) {
|
||||
size_t vp9_rb_bytes_read(struct vpx_read_bit_buffer *rb) {
|
||||
return (rb->bit_offset + 7) >> 3;
|
||||
}
|
||||
|
||||
int vp9_rb_read_bit(struct vp9_read_bit_buffer *rb) {
|
||||
int vp9_rb_read_bit(struct vpx_read_bit_buffer *rb) {
|
||||
const size_t off = rb->bit_offset;
|
||||
const size_t p = off >> 3;
|
||||
const int q = 7 - (int)(off & 0x7);
|
||||
@ -27,14 +27,14 @@ int vp9_rb_read_bit(struct vp9_read_bit_buffer *rb) {
|
||||
}
|
||||
}
|
||||
|
||||
int vp9_rb_read_literal(struct vp9_read_bit_buffer *rb, int bits) {
|
||||
int vp9_rb_read_literal(struct vpx_read_bit_buffer *rb, int bits) {
|
||||
int value = 0, bit;
|
||||
for (bit = bits - 1; bit >= 0; bit--)
|
||||
value |= vp9_rb_read_bit(rb) << bit;
|
||||
return value;
|
||||
}
|
||||
|
||||
int vp9_rb_read_signed_literal(struct vp9_read_bit_buffer *rb,
|
||||
int vp9_rb_read_signed_literal(struct vpx_read_bit_buffer *rb,
|
||||
int bits) {
|
||||
const int value = vp9_rb_read_literal(rb, bits);
|
||||
return vp9_rb_read_bit(rb) ? -value : value;
|
||||
|
@ -21,7 +21,7 @@ extern "C" {
|
||||
|
||||
typedef void (*vp9_rb_error_handler)(void *data);
|
||||
|
||||
struct vp9_read_bit_buffer {
|
||||
struct vpx_read_bit_buffer {
|
||||
const uint8_t *bit_buffer;
|
||||
const uint8_t *bit_buffer_end;
|
||||
size_t bit_offset;
|
||||
@ -30,13 +30,13 @@ struct vp9_read_bit_buffer {
|
||||
vp9_rb_error_handler error_handler;
|
||||
};
|
||||
|
||||
size_t vp9_rb_bytes_read(struct vp9_read_bit_buffer *rb);
|
||||
size_t vp9_rb_bytes_read(struct vpx_read_bit_buffer *rb);
|
||||
|
||||
int vp9_rb_read_bit(struct vp9_read_bit_buffer *rb);
|
||||
int vp9_rb_read_bit(struct vpx_read_bit_buffer *rb);
|
||||
|
||||
int vp9_rb_read_literal(struct vp9_read_bit_buffer *rb, int bits);
|
||||
int vp9_rb_read_literal(struct vpx_read_bit_buffer *rb, int bits);
|
||||
|
||||
int vp9_rb_read_signed_literal(struct vp9_read_bit_buffer *rb, int bits);
|
||||
int vp9_rb_read_signed_literal(struct vpx_read_bit_buffer *rb, int bits);
|
||||
|
||||
#ifdef __cplusplus
|
||||
} // extern "C"
|
||||
|
@ -79,7 +79,7 @@ static INLINE void vp9_write(vp9_writer *br, int bit, int probability) {
|
||||
}
|
||||
|
||||
static INLINE void vp9_write_bit(vp9_writer *w, int bit) {
|
||||
vp9_write(w, bit, 128); // vp9_prob_half
|
||||
vp9_write(w, bit, 128); // vpx_prob_half
|
||||
}
|
||||
|
||||
static INLINE void vp9_write_literal(vp9_writer *w, int data, int bits) {
|
||||
|
@ -31,9 +31,9 @@ const uint8_t vp9_norm[256] = {
|
||||
|
||||
static unsigned int tree_merge_probs_impl(unsigned int i,
|
||||
const vp9_tree_index *tree,
|
||||
const vp9_prob *pre_probs,
|
||||
const vpx_prob *pre_probs,
|
||||
const unsigned int *counts,
|
||||
vp9_prob *probs) {
|
||||
vpx_prob *probs) {
|
||||
const int l = tree[i];
|
||||
const unsigned int left_count = (l <= 0)
|
||||
? counts[-l]
|
||||
@ -47,7 +47,7 @@ static unsigned int tree_merge_probs_impl(unsigned int i,
|
||||
return left_count + right_count;
|
||||
}
|
||||
|
||||
void vp9_tree_merge_probs(const vp9_tree_index *tree, const vp9_prob *pre_probs,
|
||||
const unsigned int *counts, vp9_prob *probs) {
|
||||
void vp9_tree_merge_probs(const vp9_tree_index *tree, const vpx_prob *pre_probs,
|
||||
const unsigned int *counts, vpx_prob *probs) {
|
||||
tree_merge_probs_impl(0, tree, pre_probs, counts, probs);
|
||||
}
|
||||
|
@ -20,11 +20,11 @@
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
typedef uint8_t vp9_prob;
|
||||
typedef uint8_t vpx_prob;
|
||||
|
||||
#define MAX_PROB 255
|
||||
|
||||
#define vp9_prob_half ((vp9_prob) 128)
|
||||
#define vpx_prob_half ((vpx_prob) 128)
|
||||
|
||||
typedef int8_t vp9_tree_index;
|
||||
|
||||
@ -43,28 +43,28 @@ typedef int8_t vp9_tree_index;
|
||||
|
||||
typedef const vp9_tree_index vp9_tree[];
|
||||
|
||||
static INLINE vp9_prob clip_prob(int p) {
|
||||
static INLINE vpx_prob clip_prob(int p) {
|
||||
return (p > 255) ? 255 : (p < 1) ? 1 : p;
|
||||
}
|
||||
|
||||
static INLINE vp9_prob get_prob(int num, int den) {
|
||||
static INLINE vpx_prob get_prob(int num, int den) {
|
||||
return (den == 0) ? 128u : clip_prob(((int64_t)num * 256 + (den >> 1)) / den);
|
||||
}
|
||||
|
||||
static INLINE vp9_prob get_binary_prob(int n0, int n1) {
|
||||
static INLINE vpx_prob get_binary_prob(int n0, int n1) {
|
||||
return get_prob(n0, n0 + n1);
|
||||
}
|
||||
|
||||
/* This function assumes prob1 and prob2 are already within [1,255] range. */
|
||||
static INLINE vp9_prob weighted_prob(int prob1, int prob2, int factor) {
|
||||
static INLINE vpx_prob weighted_prob(int prob1, int prob2, int factor) {
|
||||
return ROUND_POWER_OF_TWO(prob1 * (256 - factor) + prob2 * factor, 8);
|
||||
}
|
||||
|
||||
static INLINE vp9_prob merge_probs(vp9_prob pre_prob,
|
||||
static INLINE vpx_prob merge_probs(vpx_prob pre_prob,
|
||||
const unsigned int ct[2],
|
||||
unsigned int count_sat,
|
||||
unsigned int max_update_factor) {
|
||||
const vp9_prob prob = get_binary_prob(ct[0], ct[1]);
|
||||
const vpx_prob prob = get_binary_prob(ct[0], ct[1]);
|
||||
const unsigned int count = MIN(ct[0] + ct[1], count_sat);
|
||||
const unsigned int factor = max_update_factor * count / count_sat;
|
||||
return weighted_prob(pre_prob, prob, factor);
|
||||
@ -76,7 +76,7 @@ static const int count_to_update_factor[MODE_MV_COUNT_SAT + 1] = {
|
||||
70, 76, 83, 89, 96, 102, 108, 115, 121, 128
|
||||
};
|
||||
|
||||
static INLINE vp9_prob mode_mv_merge_probs(vp9_prob pre_prob,
|
||||
static INLINE vpx_prob mode_mv_merge_probs(vpx_prob pre_prob,
|
||||
const unsigned int ct[2]) {
|
||||
const unsigned int den = ct[0] + ct[1];
|
||||
if (den == 0) {
|
||||
@ -84,14 +84,14 @@ static INLINE vp9_prob mode_mv_merge_probs(vp9_prob pre_prob,
|
||||
} else {
|
||||
const unsigned int count = MIN(den, MODE_MV_COUNT_SAT);
|
||||
const unsigned int factor = count_to_update_factor[count];
|
||||
const vp9_prob prob =
|
||||
const vpx_prob prob =
|
||||
clip_prob(((int64_t)(ct[0]) * 256 + (den >> 1)) / den);
|
||||
return weighted_prob(pre_prob, prob, factor);
|
||||
}
|
||||
}
|
||||
|
||||
void vp9_tree_merge_probs(const vp9_tree_index *tree, const vp9_prob *pre_probs,
|
||||
const unsigned int *counts, vp9_prob *probs);
|
||||
void vp9_tree_merge_probs(const vp9_tree_index *tree, const vpx_prob *pre_probs,
|
||||
const unsigned int *counts, vpx_prob *probs);
|
||||
|
||||
|
||||
DECLARE_ALIGNED(16, extern const uint8_t, vp9_norm[256]);
|
||||
|
Loading…
x
Reference in New Issue
Block a user