vp9/decoder,vp9/*.[hc]: apply clang-format
Change-Id: Ic38ea06c7b2fb3e8e94a4c0910e82672a1acaea7
This commit is contained in:
parent
8ff40f8bec
commit
08131055e4
File diff suppressed because it is too large
Load Diff
@ -8,7 +8,6 @@
|
|||||||
* be found in the AUTHORS file in the root of the source tree.
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
||||||
#ifndef VP9_DECODER_VP9_DECODEFRAME_H_
|
#ifndef VP9_DECODER_VP9_DECODEFRAME_H_
|
||||||
#define VP9_DECODER_VP9_DECODEFRAME_H_
|
#define VP9_DECODER_VP9_DECODEFRAME_H_
|
||||||
|
|
||||||
@ -22,13 +21,12 @@ struct VP9Decoder;
|
|||||||
struct vpx_read_bit_buffer;
|
struct vpx_read_bit_buffer;
|
||||||
|
|
||||||
int vp9_read_sync_code(struct vpx_read_bit_buffer *const rb);
|
int vp9_read_sync_code(struct vpx_read_bit_buffer *const rb);
|
||||||
void vp9_read_frame_size(struct vpx_read_bit_buffer *rb,
|
void vp9_read_frame_size(struct vpx_read_bit_buffer *rb, int *width,
|
||||||
int *width, int *height);
|
int *height);
|
||||||
BITSTREAM_PROFILE vp9_read_profile(struct vpx_read_bit_buffer *rb);
|
BITSTREAM_PROFILE vp9_read_profile(struct vpx_read_bit_buffer *rb);
|
||||||
|
|
||||||
void vp9_decode_frame(struct VP9Decoder *pbi,
|
void vp9_decode_frame(struct VP9Decoder *pbi, const uint8_t *data,
|
||||||
const uint8_t *data, const uint8_t *data_end,
|
const uint8_t *data_end, const uint8_t **p_data_end);
|
||||||
const uint8_t **p_data_end);
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
} // extern "C"
|
} // extern "C"
|
||||||
|
@ -33,29 +33,26 @@ static PREDICTION_MODE read_intra_mode_y(VP9_COMMON *cm, MACROBLOCKD *xd,
|
|||||||
const PREDICTION_MODE y_mode =
|
const PREDICTION_MODE y_mode =
|
||||||
read_intra_mode(r, cm->fc->y_mode_prob[size_group]);
|
read_intra_mode(r, cm->fc->y_mode_prob[size_group]);
|
||||||
FRAME_COUNTS *counts = xd->counts;
|
FRAME_COUNTS *counts = xd->counts;
|
||||||
if (counts)
|
if (counts) ++counts->y_mode[size_group][y_mode];
|
||||||
++counts->y_mode[size_group][y_mode];
|
|
||||||
return y_mode;
|
return y_mode;
|
||||||
}
|
}
|
||||||
|
|
||||||
static PREDICTION_MODE read_intra_mode_uv(VP9_COMMON *cm, MACROBLOCKD *xd,
|
static PREDICTION_MODE read_intra_mode_uv(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||||
vpx_reader *r,
|
vpx_reader *r,
|
||||||
PREDICTION_MODE y_mode) {
|
PREDICTION_MODE y_mode) {
|
||||||
const PREDICTION_MODE uv_mode = read_intra_mode(r,
|
const PREDICTION_MODE uv_mode =
|
||||||
cm->fc->uv_mode_prob[y_mode]);
|
read_intra_mode(r, cm->fc->uv_mode_prob[y_mode]);
|
||||||
FRAME_COUNTS *counts = xd->counts;
|
FRAME_COUNTS *counts = xd->counts;
|
||||||
if (counts)
|
if (counts) ++counts->uv_mode[y_mode][uv_mode];
|
||||||
++counts->uv_mode[y_mode][uv_mode];
|
|
||||||
return uv_mode;
|
return uv_mode;
|
||||||
}
|
}
|
||||||
|
|
||||||
static PREDICTION_MODE read_inter_mode(VP9_COMMON *cm, MACROBLOCKD *xd,
|
static PREDICTION_MODE read_inter_mode(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||||
vpx_reader *r, int ctx) {
|
vpx_reader *r, int ctx) {
|
||||||
const int mode = vpx_read_tree(r, vp9_inter_mode_tree,
|
const int mode =
|
||||||
cm->fc->inter_mode_probs[ctx]);
|
vpx_read_tree(r, vp9_inter_mode_tree, cm->fc->inter_mode_probs[ctx]);
|
||||||
FRAME_COUNTS *counts = xd->counts;
|
FRAME_COUNTS *counts = xd->counts;
|
||||||
if (counts)
|
if (counts) ++counts->inter_mode[ctx][mode];
|
||||||
++counts->inter_mode[ctx][mode];
|
|
||||||
|
|
||||||
return NEARESTMV + mode;
|
return NEARESTMV + mode;
|
||||||
}
|
}
|
||||||
@ -76,8 +73,7 @@ static TX_SIZE read_selected_tx_size(VP9_COMMON *cm, MACROBLOCKD *xd,
|
|||||||
tx_size += vpx_read(r, tx_probs[2]);
|
tx_size += vpx_read(r, tx_probs[2]);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (counts)
|
if (counts) ++get_tx_counts(max_tx_size, ctx, &counts->tx)[tx_size];
|
||||||
++get_tx_counts(max_tx_size, ctx, &counts->tx)[tx_size];
|
|
||||||
return (TX_SIZE)tx_size;
|
return (TX_SIZE)tx_size;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -105,8 +101,8 @@ static int dec_get_segment_id(const VP9_COMMON *cm, const uint8_t *segment_ids,
|
|||||||
return segment_id;
|
return segment_id;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void set_segment_id(VP9_COMMON *cm, int mi_offset,
|
static void set_segment_id(VP9_COMMON *cm, int mi_offset, int x_mis, int y_mis,
|
||||||
int x_mis, int y_mis, int segment_id) {
|
int segment_id) {
|
||||||
int x, y;
|
int x, y;
|
||||||
|
|
||||||
assert(segment_id >= 0 && segment_id < MAX_SEGMENTS);
|
assert(segment_id >= 0 && segment_id < MAX_SEGMENTS);
|
||||||
@ -118,24 +114,23 @@ static void set_segment_id(VP9_COMMON *cm, int mi_offset,
|
|||||||
|
|
||||||
static void copy_segment_id(const VP9_COMMON *cm,
|
static void copy_segment_id(const VP9_COMMON *cm,
|
||||||
const uint8_t *last_segment_ids,
|
const uint8_t *last_segment_ids,
|
||||||
uint8_t *current_segment_ids,
|
uint8_t *current_segment_ids, int mi_offset,
|
||||||
int mi_offset, int x_mis, int y_mis) {
|
int x_mis, int y_mis) {
|
||||||
int x, y;
|
int x, y;
|
||||||
|
|
||||||
for (y = 0; y < y_mis; y++)
|
for (y = 0; y < y_mis; y++)
|
||||||
for (x = 0; x < x_mis; x++)
|
for (x = 0; x < x_mis; x++)
|
||||||
current_segment_ids[mi_offset + y * cm->mi_cols + x] = last_segment_ids ?
|
current_segment_ids[mi_offset + y * cm->mi_cols + x] =
|
||||||
last_segment_ids[mi_offset + y * cm->mi_cols + x] : 0;
|
last_segment_ids ? last_segment_ids[mi_offset + y * cm->mi_cols + x]
|
||||||
|
: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int read_intra_segment_id(VP9_COMMON *const cm, int mi_offset,
|
static int read_intra_segment_id(VP9_COMMON *const cm, int mi_offset, int x_mis,
|
||||||
int x_mis, int y_mis,
|
int y_mis, vpx_reader *r) {
|
||||||
vpx_reader *r) {
|
|
||||||
struct segmentation *const seg = &cm->seg;
|
struct segmentation *const seg = &cm->seg;
|
||||||
int segment_id;
|
int segment_id;
|
||||||
|
|
||||||
if (!seg->enabled)
|
if (!seg->enabled) return 0; // Default for disabled segmentation
|
||||||
return 0; // Default for disabled segmentation
|
|
||||||
|
|
||||||
if (!seg->update_map) {
|
if (!seg->update_map) {
|
||||||
copy_segment_id(cm, cm->last_frame_seg_map, cm->current_frame_seg_map,
|
copy_segment_id(cm, cm->last_frame_seg_map, cm->current_frame_seg_map,
|
||||||
@ -156,12 +151,12 @@ static int read_inter_segment_id(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
|||||||
int predicted_segment_id, segment_id;
|
int predicted_segment_id, segment_id;
|
||||||
const int mi_offset = mi_row * cm->mi_cols + mi_col;
|
const int mi_offset = mi_row * cm->mi_cols + mi_col;
|
||||||
|
|
||||||
if (!seg->enabled)
|
if (!seg->enabled) return 0; // Default for disabled segmentation
|
||||||
return 0; // Default for disabled segmentation
|
|
||||||
|
|
||||||
predicted_segment_id = cm->last_frame_seg_map ?
|
predicted_segment_id = cm->last_frame_seg_map
|
||||||
dec_get_segment_id(cm, cm->last_frame_seg_map, mi_offset, x_mis, y_mis) :
|
? dec_get_segment_id(cm, cm->last_frame_seg_map,
|
||||||
0;
|
mi_offset, x_mis, y_mis)
|
||||||
|
: 0;
|
||||||
|
|
||||||
if (!seg->update_map) {
|
if (!seg->update_map) {
|
||||||
copy_segment_id(cm, cm->last_frame_seg_map, cm->current_frame_seg_map,
|
copy_segment_id(cm, cm->last_frame_seg_map, cm->current_frame_seg_map,
|
||||||
@ -172,8 +167,8 @@ static int read_inter_segment_id(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
|||||||
if (seg->temporal_update) {
|
if (seg->temporal_update) {
|
||||||
const vpx_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
|
const vpx_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
|
||||||
mi->seg_id_predicted = vpx_read(r, pred_prob);
|
mi->seg_id_predicted = vpx_read(r, pred_prob);
|
||||||
segment_id = mi->seg_id_predicted ? predicted_segment_id
|
segment_id =
|
||||||
: read_segment_id(r, seg);
|
mi->seg_id_predicted ? predicted_segment_id : read_segment_id(r, seg);
|
||||||
} else {
|
} else {
|
||||||
segment_id = read_segment_id(r, seg);
|
segment_id = read_segment_id(r, seg);
|
||||||
}
|
}
|
||||||
@ -181,24 +176,23 @@ static int read_inter_segment_id(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
|||||||
return segment_id;
|
return segment_id;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int read_skip(VP9_COMMON *cm, const MACROBLOCKD *xd,
|
static int read_skip(VP9_COMMON *cm, const MACROBLOCKD *xd, int segment_id,
|
||||||
int segment_id, vpx_reader *r) {
|
vpx_reader *r) {
|
||||||
if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
|
if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
|
||||||
return 1;
|
return 1;
|
||||||
} else {
|
} else {
|
||||||
const int ctx = vp9_get_skip_context(xd);
|
const int ctx = vp9_get_skip_context(xd);
|
||||||
const int skip = vpx_read(r, cm->fc->skip_probs[ctx]);
|
const int skip = vpx_read(r, cm->fc->skip_probs[ctx]);
|
||||||
FRAME_COUNTS *counts = xd->counts;
|
FRAME_COUNTS *counts = xd->counts;
|
||||||
if (counts)
|
if (counts) ++counts->skip[ctx][skip];
|
||||||
++counts->skip[ctx][skip];
|
|
||||||
return skip;
|
return skip;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void read_intra_frame_mode_info(VP9_COMMON *const cm,
|
static void read_intra_frame_mode_info(VP9_COMMON *const cm,
|
||||||
MACROBLOCKD *const xd,
|
MACROBLOCKD *const xd, int mi_row,
|
||||||
int mi_row, int mi_col, vpx_reader *r,
|
int mi_col, vpx_reader *r, int x_mis,
|
||||||
int x_mis, int y_mis) {
|
int y_mis) {
|
||||||
MODE_INFO *const mi = xd->mi[0];
|
MODE_INFO *const mi = xd->mi[0];
|
||||||
const MODE_INFO *above_mi = xd->above_mi;
|
const MODE_INFO *above_mi = xd->above_mi;
|
||||||
const MODE_INFO *left_mi = xd->left_mi;
|
const MODE_INFO *left_mi = xd->left_mi;
|
||||||
@ -232,15 +226,14 @@ static void read_intra_frame_mode_info(VP9_COMMON *const cm,
|
|||||||
read_intra_mode(r, get_y_mode_probs(mi, above_mi, left_mi, 2));
|
read_intra_mode(r, get_y_mode_probs(mi, above_mi, left_mi, 2));
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
mi->mode = read_intra_mode(r,
|
mi->mode = read_intra_mode(r, get_y_mode_probs(mi, above_mi, left_mi, 0));
|
||||||
get_y_mode_probs(mi, above_mi, left_mi, 0));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
mi->uv_mode = read_intra_mode(r, vp9_kf_uv_mode_prob[mi->mode]);
|
mi->uv_mode = read_intra_mode(r, vp9_kf_uv_mode_prob[mi->mode]);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int read_mv_component(vpx_reader *r,
|
static int read_mv_component(vpx_reader *r, const nmv_component *mvcomp,
|
||||||
const nmv_component *mvcomp, int usehp) {
|
int usehp) {
|
||||||
int mag, d, fr, hp;
|
int mag, d, fr, hp;
|
||||||
const int sign = vpx_read(r, mvcomp->sign);
|
const int sign = vpx_read(r, mvcomp->sign);
|
||||||
const int mv_class = vpx_read_tree(r, vp9_mv_class_tree, mvcomp->classes);
|
const int mv_class = vpx_read_tree(r, vp9_mv_class_tree, mvcomp->classes);
|
||||||
@ -255,18 +248,16 @@ static int read_mv_component(vpx_reader *r,
|
|||||||
const int n = mv_class + CLASS0_BITS - 1; // number of bits
|
const int n = mv_class + CLASS0_BITS - 1; // number of bits
|
||||||
|
|
||||||
d = 0;
|
d = 0;
|
||||||
for (i = 0; i < n; ++i)
|
for (i = 0; i < n; ++i) d |= vpx_read(r, mvcomp->bits[i]) << i;
|
||||||
d |= vpx_read(r, mvcomp->bits[i]) << i;
|
|
||||||
mag = CLASS0_SIZE << (mv_class + 2);
|
mag = CLASS0_SIZE << (mv_class + 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fractional part
|
// Fractional part
|
||||||
fr = vpx_read_tree(r, vp9_mv_fp_tree, class0 ? mvcomp->class0_fp[d]
|
fr = vpx_read_tree(r, vp9_mv_fp_tree,
|
||||||
: mvcomp->fp);
|
class0 ? mvcomp->class0_fp[d] : mvcomp->fp);
|
||||||
|
|
||||||
// High precision part (if hp is not used, the default value of the hp is 1)
|
// High precision part (if hp is not used, the default value of the hp is 1)
|
||||||
hp = usehp ? vpx_read(r, class0 ? mvcomp->class0_hp : mvcomp->hp)
|
hp = usehp ? vpx_read(r, class0 ? mvcomp->class0_hp : mvcomp->hp) : 1;
|
||||||
: 1;
|
|
||||||
|
|
||||||
// Result
|
// Result
|
||||||
mag += ((d << 3) | (fr << 1) | hp) + 1;
|
mag += ((d << 3) | (fr << 1) | hp) + 1;
|
||||||
@ -274,8 +265,8 @@ static int read_mv_component(vpx_reader *r,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static INLINE void read_mv(vpx_reader *r, MV *mv, const MV *ref,
|
static INLINE void read_mv(vpx_reader *r, MV *mv, const MV *ref,
|
||||||
const nmv_context *ctx,
|
const nmv_context *ctx, nmv_context_counts *counts,
|
||||||
nmv_context_counts *counts, int allow_hp) {
|
int allow_hp) {
|
||||||
const MV_JOINT_TYPE joint_type =
|
const MV_JOINT_TYPE joint_type =
|
||||||
(MV_JOINT_TYPE)vpx_read_tree(r, vp9_mv_joint_tree, ctx->joints);
|
(MV_JOINT_TYPE)vpx_read_tree(r, vp9_mv_joint_tree, ctx->joints);
|
||||||
const int use_hp = allow_hp && use_mv_hp(ref);
|
const int use_hp = allow_hp && use_mv_hp(ref);
|
||||||
@ -301,8 +292,7 @@ static REFERENCE_MODE read_block_reference_mode(VP9_COMMON *cm,
|
|||||||
const REFERENCE_MODE mode =
|
const REFERENCE_MODE mode =
|
||||||
(REFERENCE_MODE)vpx_read(r, cm->fc->comp_inter_prob[ctx]);
|
(REFERENCE_MODE)vpx_read(r, cm->fc->comp_inter_prob[ctx]);
|
||||||
FRAME_COUNTS *counts = xd->counts;
|
FRAME_COUNTS *counts = xd->counts;
|
||||||
if (counts)
|
if (counts) ++counts->comp_inter[ctx][mode];
|
||||||
++counts->comp_inter[ctx][mode];
|
|
||||||
return mode; // SINGLE_REFERENCE or COMPOUND_REFERENCE
|
return mode; // SINGLE_REFERENCE or COMPOUND_REFERENCE
|
||||||
} else {
|
} else {
|
||||||
return cm->reference_mode;
|
return cm->reference_mode;
|
||||||
@ -311,8 +301,8 @@ static REFERENCE_MODE read_block_reference_mode(VP9_COMMON *cm,
|
|||||||
|
|
||||||
// Read the referncence frame
|
// Read the referncence frame
|
||||||
static void read_ref_frames(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
static void read_ref_frames(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
||||||
vpx_reader *r,
|
vpx_reader *r, int segment_id,
|
||||||
int segment_id, MV_REFERENCE_FRAME ref_frame[2]) {
|
MV_REFERENCE_FRAME ref_frame[2]) {
|
||||||
FRAME_CONTEXT *const fc = cm->fc;
|
FRAME_CONTEXT *const fc = cm->fc;
|
||||||
FRAME_COUNTS *counts = xd->counts;
|
FRAME_COUNTS *counts = xd->counts;
|
||||||
|
|
||||||
@ -327,20 +317,17 @@ static void read_ref_frames(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
|||||||
const int idx = cm->ref_frame_sign_bias[cm->comp_fixed_ref];
|
const int idx = cm->ref_frame_sign_bias[cm->comp_fixed_ref];
|
||||||
const int ctx = vp9_get_pred_context_comp_ref_p(cm, xd);
|
const int ctx = vp9_get_pred_context_comp_ref_p(cm, xd);
|
||||||
const int bit = vpx_read(r, fc->comp_ref_prob[ctx]);
|
const int bit = vpx_read(r, fc->comp_ref_prob[ctx]);
|
||||||
if (counts)
|
if (counts) ++counts->comp_ref[ctx][bit];
|
||||||
++counts->comp_ref[ctx][bit];
|
|
||||||
ref_frame[idx] = cm->comp_fixed_ref;
|
ref_frame[idx] = cm->comp_fixed_ref;
|
||||||
ref_frame[!idx] = cm->comp_var_ref[bit];
|
ref_frame[!idx] = cm->comp_var_ref[bit];
|
||||||
} else if (mode == SINGLE_REFERENCE) {
|
} else if (mode == SINGLE_REFERENCE) {
|
||||||
const int ctx0 = vp9_get_pred_context_single_ref_p1(xd);
|
const int ctx0 = vp9_get_pred_context_single_ref_p1(xd);
|
||||||
const int bit0 = vpx_read(r, fc->single_ref_prob[ctx0][0]);
|
const int bit0 = vpx_read(r, fc->single_ref_prob[ctx0][0]);
|
||||||
if (counts)
|
if (counts) ++counts->single_ref[ctx0][0][bit0];
|
||||||
++counts->single_ref[ctx0][0][bit0];
|
|
||||||
if (bit0) {
|
if (bit0) {
|
||||||
const int ctx1 = vp9_get_pred_context_single_ref_p2(xd);
|
const int ctx1 = vp9_get_pred_context_single_ref_p2(xd);
|
||||||
const int bit1 = vpx_read(r, fc->single_ref_prob[ctx1][1]);
|
const int bit1 = vpx_read(r, fc->single_ref_prob[ctx1][1]);
|
||||||
if (counts)
|
if (counts) ++counts->single_ref[ctx1][1][bit1];
|
||||||
++counts->single_ref[ctx1][1][bit1];
|
|
||||||
ref_frame[0] = bit1 ? ALTREF_FRAME : GOLDEN_FRAME;
|
ref_frame[0] = bit1 ? ALTREF_FRAME : GOLDEN_FRAME;
|
||||||
} else {
|
} else {
|
||||||
ref_frame[0] = LAST_FRAME;
|
ref_frame[0] = LAST_FRAME;
|
||||||
@ -353,16 +340,14 @@ static void read_ref_frames(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static INLINE INTERP_FILTER read_switchable_interp_filter(
|
static INLINE INTERP_FILTER read_switchable_interp_filter(VP9_COMMON *const cm,
|
||||||
VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
MACROBLOCKD *const xd,
|
||||||
vpx_reader *r) {
|
vpx_reader *r) {
|
||||||
const int ctx = get_pred_context_switchable_interp(xd);
|
const int ctx = get_pred_context_switchable_interp(xd);
|
||||||
const INTERP_FILTER type =
|
const INTERP_FILTER type = (INTERP_FILTER)vpx_read_tree(
|
||||||
(INTERP_FILTER)vpx_read_tree(r, vp9_switchable_interp_tree,
|
r, vp9_switchable_interp_tree, cm->fc->switchable_interp_prob[ctx]);
|
||||||
cm->fc->switchable_interp_prob[ctx]);
|
|
||||||
FRAME_COUNTS *counts = xd->counts;
|
FRAME_COUNTS *counts = xd->counts;
|
||||||
if (counts)
|
if (counts) ++counts->switchable_interp[ctx][type];
|
||||||
++counts->switchable_interp[ctx][type];
|
|
||||||
return type;
|
return type;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -379,19 +364,16 @@ static void read_intra_block_mode_info(VP9_COMMON *const cm,
|
|||||||
mi->mode = mi->bmi[3].as_mode;
|
mi->mode = mi->bmi[3].as_mode;
|
||||||
break;
|
break;
|
||||||
case BLOCK_4X8:
|
case BLOCK_4X8:
|
||||||
mi->bmi[0].as_mode = mi->bmi[2].as_mode = read_intra_mode_y(cm, xd,
|
mi->bmi[0].as_mode = mi->bmi[2].as_mode = read_intra_mode_y(cm, xd, r, 0);
|
||||||
r, 0);
|
|
||||||
mi->bmi[1].as_mode = mi->bmi[3].as_mode = mi->mode =
|
mi->bmi[1].as_mode = mi->bmi[3].as_mode = mi->mode =
|
||||||
read_intra_mode_y(cm, xd, r, 0);
|
read_intra_mode_y(cm, xd, r, 0);
|
||||||
break;
|
break;
|
||||||
case BLOCK_8X4:
|
case BLOCK_8X4:
|
||||||
mi->bmi[0].as_mode = mi->bmi[1].as_mode = read_intra_mode_y(cm, xd,
|
mi->bmi[0].as_mode = mi->bmi[1].as_mode = read_intra_mode_y(cm, xd, r, 0);
|
||||||
r, 0);
|
|
||||||
mi->bmi[2].as_mode = mi->bmi[3].as_mode = mi->mode =
|
mi->bmi[2].as_mode = mi->bmi[3].as_mode = mi->mode =
|
||||||
read_intra_mode_y(cm, xd, r, 0);
|
read_intra_mode_y(cm, xd, r, 0);
|
||||||
break;
|
break;
|
||||||
default:
|
default: mi->mode = read_intra_mode_y(cm, xd, r, size_group_lookup[bsize]);
|
||||||
mi->mode = read_intra_mode_y(cm, xd, r, size_group_lookup[bsize]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
mi->uv_mode = read_intra_mode_uv(cm, xd, r, mi->mode);
|
mi->uv_mode = read_intra_mode_uv(cm, xd, r, mi->mode);
|
||||||
@ -405,8 +387,8 @@ static void read_intra_block_mode_info(VP9_COMMON *const cm,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static INLINE int is_mv_valid(const MV *mv) {
|
static INLINE int is_mv_valid(const MV *mv) {
|
||||||
return mv->row > MV_LOW && mv->row < MV_UPP &&
|
return mv->row > MV_LOW && mv->row < MV_UPP && mv->col > MV_LOW &&
|
||||||
mv->col > MV_LOW && mv->col < MV_UPP;
|
mv->col < MV_UPP;
|
||||||
}
|
}
|
||||||
|
|
||||||
static INLINE void copy_mv_pair(int_mv *dst, const int_mv *src) {
|
static INLINE void copy_mv_pair(int_mv *dst, const int_mv *src) {
|
||||||
@ -418,9 +400,8 @@ static INLINE void zero_mv_pair(int_mv *dst) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static INLINE int assign_mv(VP9_COMMON *cm, MACROBLOCKD *xd,
|
static INLINE int assign_mv(VP9_COMMON *cm, MACROBLOCKD *xd,
|
||||||
PREDICTION_MODE mode,
|
PREDICTION_MODE mode, int_mv mv[2],
|
||||||
int_mv mv[2], int_mv ref_mv[2],
|
int_mv ref_mv[2], int_mv near_nearest_mv[2],
|
||||||
int_mv near_nearest_mv[2],
|
|
||||||
int is_compound, int allow_hp, vpx_reader *r) {
|
int is_compound, int allow_hp, vpx_reader *r) {
|
||||||
int i;
|
int i;
|
||||||
int ret = 1;
|
int ret = 1;
|
||||||
@ -445,9 +426,7 @@ static INLINE int assign_mv(VP9_COMMON *cm, MACROBLOCKD *xd,
|
|||||||
zero_mv_pair(mv);
|
zero_mv_pair(mv);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default: {
|
default: { return 0; }
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
@ -460,8 +439,7 @@ static int read_is_inter_block(VP9_COMMON *const cm, MACROBLOCKD *const xd,
|
|||||||
const int ctx = get_intra_inter_context(xd);
|
const int ctx = get_intra_inter_context(xd);
|
||||||
const int is_inter = vpx_read(r, cm->fc->intra_inter_prob[ctx]);
|
const int is_inter = vpx_read(r, cm->fc->intra_inter_prob[ctx]);
|
||||||
FRAME_COUNTS *counts = xd->counts;
|
FRAME_COUNTS *counts = xd->counts;
|
||||||
if (counts)
|
if (counts) ++counts->intra_inter[ctx][is_inter];
|
||||||
++counts->intra_inter[ctx][is_inter];
|
|
||||||
return is_inter;
|
return is_inter;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -496,8 +474,7 @@ static void fpm_sync(void *const data, int mi_row) {
|
|||||||
} \
|
} \
|
||||||
} else { \
|
} else { \
|
||||||
(mv_ref_list)[(refmv_count)++] = (mv); \
|
(mv_ref_list)[(refmv_count)++] = (mv); \
|
||||||
if (early_break) \
|
if (early_break) goto Done; \
|
||||||
goto Done; \
|
|
||||||
} \
|
} \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
@ -510,8 +487,7 @@ static void fpm_sync(void *const data, int mi_row) {
|
|||||||
if ((mbmi)->ref_frame[0] != ref_frame) \
|
if ((mbmi)->ref_frame[0] != ref_frame) \
|
||||||
ADD_MV_REF_LIST_EB(scale_mv((mbmi), 0, ref_frame, ref_sign_bias), \
|
ADD_MV_REF_LIST_EB(scale_mv((mbmi), 0, ref_frame, ref_sign_bias), \
|
||||||
refmv_count, mv_ref_list, Done); \
|
refmv_count, mv_ref_list, Done); \
|
||||||
if (has_second_ref(mbmi) && \
|
if (has_second_ref(mbmi) && (mbmi)->ref_frame[1] != ref_frame && \
|
||||||
(mbmi)->ref_frame[1] != ref_frame && \
|
|
||||||
(mbmi)->mv[1].as_int != (mbmi)->mv[0].as_int) \
|
(mbmi)->mv[1].as_int != (mbmi)->mv[0].as_int) \
|
||||||
ADD_MV_REF_LIST_EB(scale_mv((mbmi), 1, ref_frame, ref_sign_bias), \
|
ADD_MV_REF_LIST_EB(scale_mv((mbmi), 1, ref_frame, ref_sign_bias), \
|
||||||
refmv_count, mv_ref_list, Done); \
|
refmv_count, mv_ref_list, Done); \
|
||||||
@ -523,14 +499,16 @@ static void fpm_sync(void *const data, int mi_row) {
|
|||||||
static int dec_find_mv_refs(const VP9_COMMON *cm, const MACROBLOCKD *xd,
|
static int dec_find_mv_refs(const VP9_COMMON *cm, const MACROBLOCKD *xd,
|
||||||
PREDICTION_MODE mode, MV_REFERENCE_FRAME ref_frame,
|
PREDICTION_MODE mode, MV_REFERENCE_FRAME ref_frame,
|
||||||
const POSITION *const mv_ref_search,
|
const POSITION *const mv_ref_search,
|
||||||
int_mv *mv_ref_list,
|
int_mv *mv_ref_list, int mi_row, int mi_col,
|
||||||
int mi_row, int mi_col, int block, int is_sub8x8,
|
int block, int is_sub8x8, find_mv_refs_sync sync,
|
||||||
find_mv_refs_sync sync, void *const data) {
|
void *const data) {
|
||||||
const int *ref_sign_bias = cm->ref_frame_sign_bias;
|
const int *ref_sign_bias = cm->ref_frame_sign_bias;
|
||||||
int i, refmv_count = 0;
|
int i, refmv_count = 0;
|
||||||
int different_ref_found = 0;
|
int different_ref_found = 0;
|
||||||
const MV_REF *const prev_frame_mvs = cm->use_prev_frame_mvs ?
|
const MV_REF *const prev_frame_mvs =
|
||||||
cm->prev_frame->mvs + mi_row * cm->mi_cols + mi_col : NULL;
|
cm->use_prev_frame_mvs
|
||||||
|
? cm->prev_frame->mvs + mi_row * cm->mi_cols + mi_col
|
||||||
|
: NULL;
|
||||||
const TileInfo *const tile = &xd->tile;
|
const TileInfo *const tile = &xd->tile;
|
||||||
// If mode is nearestmv or newmv (uses nearestmv as a reference) then stop
|
// If mode is nearestmv or newmv (uses nearestmv as a reference) then stop
|
||||||
// searching after the first mv is found.
|
// searching after the first mv is found.
|
||||||
@ -654,8 +632,7 @@ static int dec_find_mv_refs(const VP9_COMMON *cm, const MACROBLOCKD *xd,
|
|||||||
|
|
||||||
Done:
|
Done:
|
||||||
// Clamp vectors
|
// Clamp vectors
|
||||||
for (i = 0; i < refmv_count; ++i)
|
for (i = 0; i < refmv_count; ++i) clamp_mv_ref(&mv_ref_list[i].as_mv, xd);
|
||||||
clamp_mv_ref(&mv_ref_list[i].as_mv, xd);
|
|
||||||
|
|
||||||
return refmv_count;
|
return refmv_count;
|
||||||
}
|
}
|
||||||
@ -673,14 +650,12 @@ static void append_sub8x8_mvs_for_idx(VP9_COMMON *cm, MACROBLOCKD *xd,
|
|||||||
|
|
||||||
assert(MAX_MV_REF_CANDIDATES == 2);
|
assert(MAX_MV_REF_CANDIDATES == 2);
|
||||||
|
|
||||||
refmv_count = dec_find_mv_refs(cm, xd, b_mode, mi->ref_frame[ref],
|
refmv_count =
|
||||||
mv_ref_search, mv_list, mi_row, mi_col, block,
|
dec_find_mv_refs(cm, xd, b_mode, mi->ref_frame[ref], mv_ref_search,
|
||||||
1, NULL, NULL);
|
mv_list, mi_row, mi_col, block, 1, NULL, NULL);
|
||||||
|
|
||||||
switch (block) {
|
switch (block) {
|
||||||
case 0:
|
case 0: best_sub8x8->as_int = mv_list[refmv_count - 1].as_int; break;
|
||||||
best_sub8x8->as_int = mv_list[refmv_count - 1].as_int;
|
|
||||||
break;
|
|
||||||
case 1:
|
case 1:
|
||||||
case 2:
|
case 2:
|
||||||
if (b_mode == NEARESTMV) {
|
if (b_mode == NEARESTMV) {
|
||||||
@ -711,14 +686,13 @@ static void append_sub8x8_mvs_for_idx(VP9_COMMON *cm, MACROBLOCKD *xd,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
default:
|
default: assert(0 && "Invalid block index.");
|
||||||
assert(0 && "Invalid block index.");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static uint8_t get_mode_context(const VP9_COMMON *cm, const MACROBLOCKD *xd,
|
static uint8_t get_mode_context(const VP9_COMMON *cm, const MACROBLOCKD *xd,
|
||||||
const POSITION *const mv_ref_search,
|
const POSITION *const mv_ref_search, int mi_row,
|
||||||
int mi_row, int mi_col) {
|
int mi_col) {
|
||||||
int i;
|
int i;
|
||||||
int context_counter = 0;
|
int context_counter = 0;
|
||||||
const TileInfo *const tile = &xd->tile;
|
const TileInfo *const tile = &xd->tile;
|
||||||
@ -739,8 +713,8 @@ static uint8_t get_mode_context(const VP9_COMMON *cm, const MACROBLOCKD *xd,
|
|||||||
|
|
||||||
static void read_inter_block_mode_info(VP9Decoder *const pbi,
|
static void read_inter_block_mode_info(VP9Decoder *const pbi,
|
||||||
MACROBLOCKD *const xd,
|
MACROBLOCKD *const xd,
|
||||||
MODE_INFO *const mi,
|
MODE_INFO *const mi, int mi_row,
|
||||||
int mi_row, int mi_col, vpx_reader *r) {
|
int mi_col, vpx_reader *r) {
|
||||||
VP9_COMMON *const cm = &pbi->common;
|
VP9_COMMON *const cm = &pbi->common;
|
||||||
const BLOCK_SIZE bsize = mi->sb_type;
|
const BLOCK_SIZE bsize = mi->sb_type;
|
||||||
const int allow_hp = cm->allow_high_precision_mv;
|
const int allow_hp = cm->allow_high_precision_mv;
|
||||||
@ -776,9 +750,9 @@ static void read_inter_block_mode_info(VP9Decoder *const pbi,
|
|||||||
const MV_REFERENCE_FRAME frame = mi->ref_frame[ref];
|
const MV_REFERENCE_FRAME frame = mi->ref_frame[ref];
|
||||||
int refmv_count;
|
int refmv_count;
|
||||||
|
|
||||||
refmv_count = dec_find_mv_refs(cm, xd, mi->mode, frame, mv_ref_search,
|
refmv_count =
|
||||||
tmp_mvs, mi_row, mi_col, -1, 0,
|
dec_find_mv_refs(cm, xd, mi->mode, frame, mv_ref_search, tmp_mvs,
|
||||||
fpm_sync, (void *)pbi);
|
mi_row, mi_col, -1, 0, fpm_sync, (void *)pbi);
|
||||||
|
|
||||||
dec_find_best_ref_mvs(allow_hp, tmp_mvs, &best_ref_mvs[ref],
|
dec_find_best_ref_mvs(allow_hp, tmp_mvs, &best_ref_mvs[ref],
|
||||||
refmv_count);
|
refmv_count);
|
||||||
@ -813,10 +787,8 @@ static void read_inter_block_mode_info(VP9Decoder *const pbi,
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (num_4x4_h == 2)
|
if (num_4x4_h == 2) mi->bmi[j + 2] = mi->bmi[j];
|
||||||
mi->bmi[j + 2] = mi->bmi[j];
|
if (num_4x4_w == 2) mi->bmi[j + 1] = mi->bmi[j];
|
||||||
if (num_4x4_w == 2)
|
|
||||||
mi->bmi[j + 1] = mi->bmi[j];
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -830,15 +802,15 @@ static void read_inter_block_mode_info(VP9Decoder *const pbi,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void read_inter_frame_mode_info(VP9Decoder *const pbi,
|
static void read_inter_frame_mode_info(VP9Decoder *const pbi,
|
||||||
MACROBLOCKD *const xd,
|
MACROBLOCKD *const xd, int mi_row,
|
||||||
int mi_row, int mi_col, vpx_reader *r,
|
int mi_col, vpx_reader *r, int x_mis,
|
||||||
int x_mis, int y_mis) {
|
int y_mis) {
|
||||||
VP9_COMMON *const cm = &pbi->common;
|
VP9_COMMON *const cm = &pbi->common;
|
||||||
MODE_INFO *const mi = xd->mi[0];
|
MODE_INFO *const mi = xd->mi[0];
|
||||||
int inter_block;
|
int inter_block;
|
||||||
|
|
||||||
mi->segment_id = read_inter_segment_id(cm, xd, mi_row, mi_col, r, x_mis,
|
mi->segment_id =
|
||||||
y_mis);
|
read_inter_segment_id(cm, xd, mi_row, mi_col, r, x_mis, y_mis);
|
||||||
mi->skip = read_skip(cm, xd, mi->segment_id, r);
|
mi->skip = read_skip(cm, xd, mi->segment_id, r);
|
||||||
inter_block = read_is_inter_block(cm, xd, mi->segment_id, r);
|
inter_block = read_is_inter_block(cm, xd, mi->segment_id, r);
|
||||||
mi->tx_size = read_tx_size(cm, xd, !mi->skip || !inter_block, r);
|
mi->tx_size = read_tx_size(cm, xd, !mi->skip || !inter_block, r);
|
||||||
@ -854,9 +826,8 @@ static INLINE void copy_ref_frame_pair(MV_REFERENCE_FRAME *dst,
|
|||||||
memcpy(dst, src, sizeof(*dst) * 2);
|
memcpy(dst, src, sizeof(*dst) * 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
void vp9_read_mode_info(VP9Decoder *const pbi, MACROBLOCKD *xd,
|
void vp9_read_mode_info(VP9Decoder *const pbi, MACROBLOCKD *xd, int mi_row,
|
||||||
int mi_row, int mi_col, vpx_reader *r,
|
int mi_col, vpx_reader *r, int x_mis, int y_mis) {
|
||||||
int x_mis, int y_mis) {
|
|
||||||
VP9_COMMON *const cm = &pbi->common;
|
VP9_COMMON *const cm = &pbi->common;
|
||||||
MODE_INFO *const mi = xd->mi[0];
|
MODE_INFO *const mi = xd->mi[0];
|
||||||
MV_REF *frame_mvs = cm->cur_frame->mvs + mi_row * cm->mi_cols + mi_col;
|
MV_REF *frame_mvs = cm->cur_frame->mvs + mi_row * cm->mi_cols + mi_col;
|
||||||
|
@ -19,9 +19,8 @@
|
|||||||
extern "C" {
|
extern "C" {
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
void vp9_read_mode_info(VP9Decoder *const pbi, MACROBLOCKD *xd,
|
void vp9_read_mode_info(VP9Decoder *const pbi, MACROBLOCKD *xd, int mi_row,
|
||||||
int mi_row, int mi_col, vpx_reader *r,
|
int mi_col, vpx_reader *r, int x_mis, int y_mis);
|
||||||
int x_mis, int y_mis);
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
} // extern "C"
|
} // extern "C"
|
||||||
|
@ -57,12 +57,10 @@ static void vp9_dec_setup_mi(VP9_COMMON *cm) {
|
|||||||
|
|
||||||
static int vp9_dec_alloc_mi(VP9_COMMON *cm, int mi_size) {
|
static int vp9_dec_alloc_mi(VP9_COMMON *cm, int mi_size) {
|
||||||
cm->mip = vpx_calloc(mi_size, sizeof(*cm->mip));
|
cm->mip = vpx_calloc(mi_size, sizeof(*cm->mip));
|
||||||
if (!cm->mip)
|
if (!cm->mip) return 1;
|
||||||
return 1;
|
|
||||||
cm->mi_alloc_size = mi_size;
|
cm->mi_alloc_size = mi_size;
|
||||||
cm->mi_grid_base = (MODE_INFO **)vpx_calloc(mi_size, sizeof(MODE_INFO *));
|
cm->mi_grid_base = (MODE_INFO **)vpx_calloc(mi_size, sizeof(MODE_INFO *));
|
||||||
if (!cm->mi_grid_base)
|
if (!cm->mi_grid_base) return 1;
|
||||||
return 1;
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -77,8 +75,7 @@ VP9Decoder *vp9_decoder_create(BufferPool *const pool) {
|
|||||||
VP9Decoder *volatile const pbi = vpx_memalign(32, sizeof(*pbi));
|
VP9Decoder *volatile const pbi = vpx_memalign(32, sizeof(*pbi));
|
||||||
VP9_COMMON *volatile const cm = pbi ? &pbi->common : NULL;
|
VP9_COMMON *volatile const cm = pbi ? &pbi->common : NULL;
|
||||||
|
|
||||||
if (!cm)
|
if (!cm) return NULL;
|
||||||
return NULL;
|
|
||||||
|
|
||||||
vp9_zero(*pbi);
|
vp9_zero(*pbi);
|
||||||
|
|
||||||
@ -90,11 +87,10 @@ VP9Decoder *vp9_decoder_create(BufferPool *const pool) {
|
|||||||
|
|
||||||
cm->error.setjmp = 1;
|
cm->error.setjmp = 1;
|
||||||
|
|
||||||
CHECK_MEM_ERROR(cm, cm->fc,
|
CHECK_MEM_ERROR(cm, cm->fc, (FRAME_CONTEXT *)vpx_calloc(1, sizeof(*cm->fc)));
|
||||||
(FRAME_CONTEXT *)vpx_calloc(1, sizeof(*cm->fc)));
|
CHECK_MEM_ERROR(
|
||||||
CHECK_MEM_ERROR(cm, cm->frame_contexts,
|
cm, cm->frame_contexts,
|
||||||
(FRAME_CONTEXT *)vpx_calloc(FRAME_CONTEXTS,
|
(FRAME_CONTEXT *)vpx_calloc(FRAME_CONTEXTS, sizeof(*cm->frame_contexts)));
|
||||||
sizeof(*cm->frame_contexts)));
|
|
||||||
|
|
||||||
pbi->need_resync = 1;
|
pbi->need_resync = 1;
|
||||||
once(initialize_dec);
|
once(initialize_dec);
|
||||||
@ -126,8 +122,7 @@ VP9Decoder *vp9_decoder_create(BufferPool *const pool) {
|
|||||||
void vp9_decoder_remove(VP9Decoder *pbi) {
|
void vp9_decoder_remove(VP9Decoder *pbi) {
|
||||||
int i;
|
int i;
|
||||||
|
|
||||||
if (!pbi)
|
if (!pbi) return;
|
||||||
return;
|
|
||||||
|
|
||||||
vpx_get_worker_interface()->end(&pbi->lf_worker);
|
vpx_get_worker_interface()->end(&pbi->lf_worker);
|
||||||
vpx_free(pbi->lf_worker.data1);
|
vpx_free(pbi->lf_worker.data1);
|
||||||
@ -176,14 +171,12 @@ vpx_codec_err_t vp9_copy_reference_dec(VP9Decoder *pbi,
|
|||||||
else
|
else
|
||||||
vp8_yv12_copy_frame(cfg, sd);
|
vp8_yv12_copy_frame(cfg, sd);
|
||||||
} else {
|
} else {
|
||||||
vpx_internal_error(&cm->error, VPX_CODEC_ERROR,
|
vpx_internal_error(&cm->error, VPX_CODEC_ERROR, "Invalid reference frame");
|
||||||
"Invalid reference frame");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return cm->error.error_code;
|
return cm->error.error_code;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
vpx_codec_err_t vp9_set_reference_dec(VP9_COMMON *cm,
|
vpx_codec_err_t vp9_set_reference_dec(VP9_COMMON *cm,
|
||||||
VP9_REFFRAME ref_frame_flag,
|
VP9_REFFRAME ref_frame_flag,
|
||||||
YV12_BUFFER_CONFIG *sd) {
|
YV12_BUFFER_CONFIG *sd) {
|
||||||
@ -206,8 +199,7 @@ vpx_codec_err_t vp9_set_reference_dec(VP9_COMMON *cm,
|
|||||||
} else if (ref_frame_flag == VP9_ALT_FLAG) {
|
} else if (ref_frame_flag == VP9_ALT_FLAG) {
|
||||||
idx = cm->ref_frame_map[2];
|
idx = cm->ref_frame_map[2];
|
||||||
} else {
|
} else {
|
||||||
vpx_internal_error(&cm->error, VPX_CODEC_ERROR,
|
vpx_internal_error(&cm->error, VPX_CODEC_ERROR, "Invalid reference frame");
|
||||||
"Invalid reference frame");
|
|
||||||
return cm->error.error_code;
|
return cm->error.error_code;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -273,8 +265,8 @@ static void swap_frame_buffers(VP9Decoder *pbi) {
|
|||||||
cm->frame_refs[ref_index].idx = -1;
|
cm->frame_refs[ref_index].idx = -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int vp9_receive_compressed_data(VP9Decoder *pbi,
|
int vp9_receive_compressed_data(VP9Decoder *pbi, size_t size,
|
||||||
size_t size, const uint8_t **psource) {
|
const uint8_t **psource) {
|
||||||
VP9_COMMON *volatile const cm = &pbi->common;
|
VP9_COMMON *volatile const cm = &pbi->common;
|
||||||
BufferPool *volatile const pool = cm->buffer_pool;
|
BufferPool *volatile const pool = cm->buffer_pool;
|
||||||
RefCntBuffer *volatile const frame_bufs = cm->buffer_pool->frame_bufs;
|
RefCntBuffer *volatile const frame_bufs = cm->buffer_pool->frame_bufs;
|
||||||
@ -301,8 +293,8 @@ int vp9_receive_compressed_data(VP9Decoder *pbi,
|
|||||||
|
|
||||||
// Check if the previous frame was a frame without any references to it.
|
// Check if the previous frame was a frame without any references to it.
|
||||||
// Release frame buffer if not decoding in frame parallel mode.
|
// Release frame buffer if not decoding in frame parallel mode.
|
||||||
if (!pbi->frame_parallel_decode && cm->new_fb_idx >= 0
|
if (!pbi->frame_parallel_decode && cm->new_fb_idx >= 0 &&
|
||||||
&& frame_bufs[cm->new_fb_idx].ref_count == 0)
|
frame_bufs[cm->new_fb_idx].ref_count == 0)
|
||||||
pool->release_fb_cb(pool->cb_priv,
|
pool->release_fb_cb(pool->cb_priv,
|
||||||
&frame_bufs[cm->new_fb_idx].raw_frame_buffer);
|
&frame_bufs[cm->new_fb_idx].raw_frame_buffer);
|
||||||
// Find a free frame buffer. Return error if can not find any.
|
// Find a free frame buffer. Return error if can not find any.
|
||||||
@ -330,7 +322,6 @@ int vp9_receive_compressed_data(VP9Decoder *pbi,
|
|||||||
pbi->cur_buf = &frame_bufs[cm->new_fb_idx];
|
pbi->cur_buf = &frame_bufs[cm->new_fb_idx];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if (setjmp(cm->error.jmp)) {
|
if (setjmp(cm->error.jmp)) {
|
||||||
const VPxWorkerInterface *const winterface = vpx_get_worker_interface();
|
const VPxWorkerInterface *const winterface = vpx_get_worker_interface();
|
||||||
int i;
|
int i;
|
||||||
@ -425,14 +416,12 @@ int vp9_get_raw_frame(VP9Decoder *pbi, YV12_BUFFER_CONFIG *sd,
|
|||||||
(void)*flags;
|
(void)*flags;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (pbi->ready_for_new_data == 1)
|
if (pbi->ready_for_new_data == 1) return ret;
|
||||||
return ret;
|
|
||||||
|
|
||||||
pbi->ready_for_new_data = 1;
|
pbi->ready_for_new_data = 1;
|
||||||
|
|
||||||
/* no raw frame to show!!! */
|
/* no raw frame to show!!! */
|
||||||
if (!cm->show_frame)
|
if (!cm->show_frame) return ret;
|
||||||
return ret;
|
|
||||||
|
|
||||||
pbi->ready_for_new_data = 1;
|
pbi->ready_for_new_data = 1;
|
||||||
|
|
||||||
@ -451,8 +440,7 @@ int vp9_get_raw_frame(VP9Decoder *pbi, YV12_BUFFER_CONFIG *sd,
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
vpx_codec_err_t vp9_parse_superframe_index(const uint8_t *data,
|
vpx_codec_err_t vp9_parse_superframe_index(const uint8_t *data, size_t data_sz,
|
||||||
size_t data_sz,
|
|
||||||
uint32_t sizes[8], int *count,
|
uint32_t sizes[8], int *count,
|
||||||
vpx_decrypt_cb decrypt_cb,
|
vpx_decrypt_cb decrypt_cb,
|
||||||
void *decrypt_state) {
|
void *decrypt_state) {
|
||||||
@ -475,18 +463,16 @@ vpx_codec_err_t vp9_parse_superframe_index(const uint8_t *data,
|
|||||||
|
|
||||||
// This chunk is marked as having a superframe index but doesn't have
|
// This chunk is marked as having a superframe index but doesn't have
|
||||||
// enough data for it, thus it's an invalid superframe index.
|
// enough data for it, thus it's an invalid superframe index.
|
||||||
if (data_sz < index_sz)
|
if (data_sz < index_sz) return VPX_CODEC_CORRUPT_FRAME;
|
||||||
return VPX_CODEC_CORRUPT_FRAME;
|
|
||||||
|
|
||||||
{
|
{
|
||||||
const uint8_t marker2 = read_marker(decrypt_cb, decrypt_state,
|
const uint8_t marker2 =
|
||||||
data + data_sz - index_sz);
|
read_marker(decrypt_cb, decrypt_state, data + data_sz - index_sz);
|
||||||
|
|
||||||
// This chunk is marked as having a superframe index but doesn't have
|
// This chunk is marked as having a superframe index but doesn't have
|
||||||
// the matching marker byte at the front of the index therefore it's an
|
// the matching marker byte at the front of the index therefore it's an
|
||||||
// invalid chunk.
|
// invalid chunk.
|
||||||
if (marker != marker2)
|
if (marker != marker2) return VPX_CODEC_CORRUPT_FRAME;
|
||||||
return VPX_CODEC_CORRUPT_FRAME;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
@ -505,8 +491,7 @@ vpx_codec_err_t vp9_parse_superframe_index(const uint8_t *data,
|
|||||||
for (i = 0; i < frames; ++i) {
|
for (i = 0; i < frames; ++i) {
|
||||||
uint32_t this_sz = 0;
|
uint32_t this_sz = 0;
|
||||||
|
|
||||||
for (j = 0; j < mag; ++j)
|
for (j = 0; j < mag; ++j) this_sz |= ((uint32_t)(*x++)) << (j * 8);
|
||||||
this_sz |= ((uint32_t)(*x++)) << (j * 8);
|
|
||||||
sizes[i] = this_sz;
|
sizes[i] = this_sz;
|
||||||
}
|
}
|
||||||
*count = frames;
|
*count = frames;
|
||||||
|
@ -78,8 +78,8 @@ typedef struct VP9Decoder {
|
|||||||
int hold_ref_buf; // hold the reference buffer.
|
int hold_ref_buf; // hold the reference buffer.
|
||||||
} VP9Decoder;
|
} VP9Decoder;
|
||||||
|
|
||||||
int vp9_receive_compressed_data(struct VP9Decoder *pbi,
|
int vp9_receive_compressed_data(struct VP9Decoder *pbi, size_t size,
|
||||||
size_t size, const uint8_t **dest);
|
const uint8_t **dest);
|
||||||
|
|
||||||
int vp9_get_raw_frame(struct VP9Decoder *pbi, YV12_BUFFER_CONFIG *sd,
|
int vp9_get_raw_frame(struct VP9Decoder *pbi, YV12_BUFFER_CONFIG *sd,
|
||||||
vp9_ppflags_t *flags);
|
vp9_ppflags_t *flags);
|
||||||
@ -93,8 +93,7 @@ vpx_codec_err_t vp9_set_reference_dec(VP9_COMMON *cm,
|
|||||||
YV12_BUFFER_CONFIG *sd);
|
YV12_BUFFER_CONFIG *sd);
|
||||||
|
|
||||||
static INLINE uint8_t read_marker(vpx_decrypt_cb decrypt_cb,
|
static INLINE uint8_t read_marker(vpx_decrypt_cb decrypt_cb,
|
||||||
void *decrypt_state,
|
void *decrypt_state, const uint8_t *data) {
|
||||||
const uint8_t *data) {
|
|
||||||
if (decrypt_cb) {
|
if (decrypt_cb) {
|
||||||
uint8_t marker;
|
uint8_t marker;
|
||||||
decrypt_cb(decrypt_state, data, &marker, 1);
|
decrypt_cb(decrypt_state, data, &marker, 1);
|
||||||
@ -105,8 +104,7 @@ static INLINE uint8_t read_marker(vpx_decrypt_cb decrypt_cb,
|
|||||||
|
|
||||||
// This function is exposed for use in tests, as well as the inlined function
|
// This function is exposed for use in tests, as well as the inlined function
|
||||||
// "read_marker".
|
// "read_marker".
|
||||||
vpx_codec_err_t vp9_parse_superframe_index(const uint8_t *data,
|
vpx_codec_err_t vp9_parse_superframe_index(const uint8_t *data, size_t data_sz,
|
||||||
size_t data_sz,
|
|
||||||
uint32_t sizes[8], int *count,
|
uint32_t sizes[8], int *count,
|
||||||
vpx_decrypt_cb decrypt_cb,
|
vpx_decrypt_cb decrypt_cb,
|
||||||
void *decrypt_state);
|
void *decrypt_state);
|
||||||
|
@ -26,19 +26,16 @@
|
|||||||
|
|
||||||
#define INCREMENT_COUNT(token) \
|
#define INCREMENT_COUNT(token) \
|
||||||
do { \
|
do { \
|
||||||
if (counts) \
|
if (counts) ++coef_counts[band][ctx][token]; \
|
||||||
++coef_counts[band][ctx][token]; \
|
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
static INLINE int read_coeff(const vpx_prob *probs, int n, vpx_reader *r) {
|
static INLINE int read_coeff(const vpx_prob *probs, int n, vpx_reader *r) {
|
||||||
int i, val = 0;
|
int i, val = 0;
|
||||||
for (i = 0; i < n; ++i)
|
for (i = 0; i < n; ++i) val = (val << 1) | vpx_read(r, probs[i]);
|
||||||
val = (val << 1) | vpx_read(r, probs[i]);
|
|
||||||
return val;
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int decode_coefs(const MACROBLOCKD *xd,
|
static int decode_coefs(const MACROBLOCKD *xd, PLANE_TYPE type,
|
||||||
PLANE_TYPE type,
|
|
||||||
tran_low_t *dqcoeff, TX_SIZE tx_size, const int16_t *dq,
|
tran_low_t *dqcoeff, TX_SIZE tx_size, const int16_t *dq,
|
||||||
int ctx, const int16_t *scan, const int16_t *nb,
|
int ctx, const int16_t *scan, const int16_t *nb,
|
||||||
vpx_reader *r) {
|
vpx_reader *r) {
|
||||||
@ -59,14 +56,14 @@ static int decode_coefs(const MACROBLOCKD *xd,
|
|||||||
int16_t dqv = dq[0];
|
int16_t dqv = dq[0];
|
||||||
const uint8_t *const cat6_prob =
|
const uint8_t *const cat6_prob =
|
||||||
#if CONFIG_VP9_HIGHBITDEPTH
|
#if CONFIG_VP9_HIGHBITDEPTH
|
||||||
(xd->bd == VPX_BITS_12) ? vp9_cat6_prob_high12 :
|
(xd->bd == VPX_BITS_12)
|
||||||
(xd->bd == VPX_BITS_10) ? vp9_cat6_prob_high12 + 2 :
|
? vp9_cat6_prob_high12
|
||||||
|
: (xd->bd == VPX_BITS_10) ? vp9_cat6_prob_high12 + 2 :
|
||||||
#endif // CONFIG_VP9_HIGHBITDEPTH
|
#endif // CONFIG_VP9_HIGHBITDEPTH
|
||||||
vp9_cat6_prob;
|
vp9_cat6_prob;
|
||||||
const int cat6_bits =
|
const int cat6_bits =
|
||||||
#if CONFIG_VP9_HIGHBITDEPTH
|
#if CONFIG_VP9_HIGHBITDEPTH
|
||||||
(xd->bd == VPX_BITS_12) ? 18 :
|
(xd->bd == VPX_BITS_12) ? 18 : (xd->bd == VPX_BITS_10) ? 16 :
|
||||||
(xd->bd == VPX_BITS_10) ? 16 :
|
|
||||||
#endif // CONFIG_VP9_HIGHBITDEPTH
|
#endif // CONFIG_VP9_HIGHBITDEPTH
|
||||||
14;
|
14;
|
||||||
|
|
||||||
@ -79,8 +76,7 @@ static int decode_coefs(const MACROBLOCKD *xd,
|
|||||||
int val = -1;
|
int val = -1;
|
||||||
band = *band_translate++;
|
band = *band_translate++;
|
||||||
prob = coef_probs[band][ctx];
|
prob = coef_probs[band][ctx];
|
||||||
if (counts)
|
if (counts) ++eob_branch_count[band][ctx];
|
||||||
++eob_branch_count[band][ctx];
|
|
||||||
if (!vpx_read(r, prob[EOB_CONTEXT_NODE])) {
|
if (!vpx_read(r, prob[EOB_CONTEXT_NODE])) {
|
||||||
INCREMENT_COUNT(EOB_MODEL_TOKEN);
|
INCREMENT_COUNT(EOB_MODEL_TOKEN);
|
||||||
break;
|
break;
|
||||||
@ -91,8 +87,7 @@ static int decode_coefs(const MACROBLOCKD *xd,
|
|||||||
dqv = dq[1];
|
dqv = dq[1];
|
||||||
token_cache[scan[c]] = 0;
|
token_cache[scan[c]] = 0;
|
||||||
++c;
|
++c;
|
||||||
if (c >= max_eob)
|
if (c >= max_eob) return c; // zero tokens at the end (no eob token)
|
||||||
return c; // zero tokens at the end (no eob token)
|
|
||||||
ctx = get_coef_context(nb, token_cache, c);
|
ctx = get_coef_context(nb, token_cache, c);
|
||||||
band = *band_translate++;
|
band = *band_translate++;
|
||||||
prob = coef_probs[band][ctx];
|
prob = coef_probs[band][ctx];
|
||||||
@ -109,9 +104,7 @@ static int decode_coefs(const MACROBLOCKD *xd,
|
|||||||
switch (token) {
|
switch (token) {
|
||||||
case TWO_TOKEN:
|
case TWO_TOKEN:
|
||||||
case THREE_TOKEN:
|
case THREE_TOKEN:
|
||||||
case FOUR_TOKEN:
|
case FOUR_TOKEN: val = token; break;
|
||||||
val = token;
|
|
||||||
break;
|
|
||||||
case CATEGORY1_TOKEN:
|
case CATEGORY1_TOKEN:
|
||||||
val = CAT1_MIN_VAL + read_coeff(vp9_cat1_prob, 1, r);
|
val = CAT1_MIN_VAL + read_coeff(vp9_cat1_prob, 1, r);
|
||||||
break;
|
break;
|
||||||
@ -135,8 +128,7 @@ static int decode_coefs(const MACROBLOCKD *xd,
|
|||||||
v = (val * dqv) >> dq_shift;
|
v = (val * dqv) >> dq_shift;
|
||||||
#if CONFIG_COEFFICIENT_RANGE_CHECKING
|
#if CONFIG_COEFFICIENT_RANGE_CHECKING
|
||||||
#if CONFIG_VP9_HIGHBITDEPTH
|
#if CONFIG_VP9_HIGHBITDEPTH
|
||||||
dqcoeff[scan[c]] = highbd_check_range((vpx_read_bit(r) ? -v : v),
|
dqcoeff[scan[c]] = highbd_check_range((vpx_read_bit(r) ? -v : v), xd->bd);
|
||||||
xd->bd);
|
|
||||||
#else
|
#else
|
||||||
dqcoeff[scan[c]] = check_range(vpx_read_bit(r) ? -v : v);
|
dqcoeff[scan[c]] = check_range(vpx_read_bit(r) ? -v : v);
|
||||||
#endif // CONFIG_VP9_HIGHBITDEPTH
|
#endif // CONFIG_VP9_HIGHBITDEPTH
|
||||||
|
@ -8,7 +8,6 @@
|
|||||||
* be found in the AUTHORS file in the root of the source tree.
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
||||||
#ifndef VP9_DECODER_VP9_DETOKENIZE_H_
|
#ifndef VP9_DECODER_VP9_DETOKENIZE_H_
|
||||||
#define VP9_DECODER_VP9_DETOKENIZE_H_
|
#define VP9_DECODER_VP9_DETOKENIZE_H_
|
||||||
|
|
||||||
@ -20,10 +19,8 @@
|
|||||||
extern "C" {
|
extern "C" {
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
int vp9_decode_block_tokens(MACROBLOCKD *xd,
|
int vp9_decode_block_tokens(MACROBLOCKD *xd, int plane, const scan_order *sc,
|
||||||
int plane, const scan_order *sc,
|
int x, int y, TX_SIZE tx_size, vpx_reader *r,
|
||||||
int x, int y,
|
|
||||||
TX_SIZE tx_size, vpx_reader *r,
|
|
||||||
int seg_id);
|
int seg_id);
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
|
@ -15,8 +15,7 @@
|
|||||||
#include "vp9/decoder/vp9_dsubexp.h"
|
#include "vp9/decoder/vp9_dsubexp.h"
|
||||||
|
|
||||||
static int inv_recenter_nonneg(int v, int m) {
|
static int inv_recenter_nonneg(int v, int m) {
|
||||||
if (v > 2 * m)
|
if (v > 2 * m) return v;
|
||||||
return v;
|
|
||||||
|
|
||||||
return (v & 1) ? m - ((v + 1) >> 1) : m + (v >> 1);
|
return (v & 1) ? m - ((v + 1) >> 1) : m + (v >> 1);
|
||||||
}
|
}
|
||||||
@ -59,12 +58,9 @@ static int inv_remap_prob(int v, int m) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static int decode_term_subexp(vpx_reader *r) {
|
static int decode_term_subexp(vpx_reader *r) {
|
||||||
if (!vpx_read_bit(r))
|
if (!vpx_read_bit(r)) return vpx_read_literal(r, 4);
|
||||||
return vpx_read_literal(r, 4);
|
if (!vpx_read_bit(r)) return vpx_read_literal(r, 4) + 16;
|
||||||
if (!vpx_read_bit(r))
|
if (!vpx_read_bit(r)) return vpx_read_literal(r, 5) + 32;
|
||||||
return vpx_read_literal(r, 4) + 16;
|
|
||||||
if (!vpx_read_bit(r))
|
|
||||||
return vpx_read_literal(r, 5) + 32;
|
|
||||||
return decode_uniform(r) + 64;
|
return decode_uniform(r) + 64;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,7 +8,6 @@
|
|||||||
* be found in the AUTHORS file in the root of the source tree.
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
||||||
#ifndef VP9_DECODER_VP9_DSUBEXP_H_
|
#ifndef VP9_DECODER_VP9_DSUBEXP_H_
|
||||||
#define VP9_DECODER_VP9_DSUBEXP_H_
|
#define VP9_DECODER_VP9_DSUBEXP_H_
|
||||||
|
|
||||||
|
@ -62,8 +62,7 @@ void vp9_frameworker_signal_stats(VPxWorker *const worker) {
|
|||||||
void vp9_frameworker_wait(VPxWorker *const worker, RefCntBuffer *const ref_buf,
|
void vp9_frameworker_wait(VPxWorker *const worker, RefCntBuffer *const ref_buf,
|
||||||
int row) {
|
int row) {
|
||||||
#if CONFIG_MULTITHREAD
|
#if CONFIG_MULTITHREAD
|
||||||
if (!ref_buf)
|
if (!ref_buf) return;
|
||||||
return;
|
|
||||||
|
|
||||||
#ifndef BUILDING_WITH_TSAN
|
#ifndef BUILDING_WITH_TSAN
|
||||||
// The following line of code will get harmless tsan error but it is the key
|
// The following line of code will get harmless tsan error but it is the key
|
||||||
@ -150,8 +149,9 @@ void vp9_frameworker_copy_context(VPxWorker *const dst_worker,
|
|||||||
&src_worker_data->stats_mutex);
|
&src_worker_data->stats_mutex);
|
||||||
}
|
}
|
||||||
|
|
||||||
dst_cm->last_frame_seg_map = src_cm->seg.enabled ?
|
dst_cm->last_frame_seg_map = src_cm->seg.enabled
|
||||||
src_cm->current_frame_seg_map : src_cm->last_frame_seg_map;
|
? src_cm->current_frame_seg_map
|
||||||
|
: src_cm->last_frame_seg_map;
|
||||||
dst_worker_data->pbi->need_resync = src_worker_data->pbi->need_resync;
|
dst_worker_data->pbi->need_resync = src_worker_data->pbi->need_resync;
|
||||||
vp9_frameworker_unlock_stats(src_worker);
|
vp9_frameworker_unlock_stats(src_worker);
|
||||||
|
|
||||||
@ -159,17 +159,18 @@ void vp9_frameworker_copy_context(VPxWorker *const dst_worker,
|
|||||||
#if CONFIG_VP9_HIGHBITDEPTH
|
#if CONFIG_VP9_HIGHBITDEPTH
|
||||||
dst_cm->use_highbitdepth = src_cm->use_highbitdepth;
|
dst_cm->use_highbitdepth = src_cm->use_highbitdepth;
|
||||||
#endif
|
#endif
|
||||||
dst_cm->prev_frame = src_cm->show_existing_frame ?
|
dst_cm->prev_frame =
|
||||||
src_cm->prev_frame : src_cm->cur_frame;
|
src_cm->show_existing_frame ? src_cm->prev_frame : src_cm->cur_frame;
|
||||||
dst_cm->last_width = !src_cm->show_existing_frame ?
|
dst_cm->last_width =
|
||||||
src_cm->width : src_cm->last_width;
|
!src_cm->show_existing_frame ? src_cm->width : src_cm->last_width;
|
||||||
dst_cm->last_height = !src_cm->show_existing_frame ?
|
dst_cm->last_height =
|
||||||
src_cm->height : src_cm->last_height;
|
!src_cm->show_existing_frame ? src_cm->height : src_cm->last_height;
|
||||||
dst_cm->subsampling_x = src_cm->subsampling_x;
|
dst_cm->subsampling_x = src_cm->subsampling_x;
|
||||||
dst_cm->subsampling_y = src_cm->subsampling_y;
|
dst_cm->subsampling_y = src_cm->subsampling_y;
|
||||||
dst_cm->frame_type = src_cm->frame_type;
|
dst_cm->frame_type = src_cm->frame_type;
|
||||||
dst_cm->last_show_frame = !src_cm->show_existing_frame ?
|
dst_cm->last_show_frame = !src_cm->show_existing_frame
|
||||||
src_cm->show_frame : src_cm->last_show_frame;
|
? src_cm->show_frame
|
||||||
|
: src_cm->last_show_frame;
|
||||||
for (i = 0; i < REF_FRAMES; ++i)
|
for (i = 0; i < REF_FRAMES; ++i)
|
||||||
dst_cm->ref_frame_map[i] = src_cm->next_ref_frame_map[i];
|
dst_cm->ref_frame_map[i] = src_cm->next_ref_frame_map[i];
|
||||||
|
|
||||||
|
@ -105,8 +105,8 @@ struct vpx_codec_alg_priv {
|
|||||||
BufferPool *buffer_pool;
|
BufferPool *buffer_pool;
|
||||||
};
|
};
|
||||||
|
|
||||||
static vpx_codec_err_t update_error_state(vpx_codec_alg_priv_t *ctx,
|
static vpx_codec_err_t update_error_state(
|
||||||
const struct vpx_internal_error_info *error) {
|
vpx_codec_alg_priv_t *ctx, const struct vpx_internal_error_info *error) {
|
||||||
const vpx_codec_err_t res = error->error_code;
|
const vpx_codec_err_t res = error->error_code;
|
||||||
|
|
||||||
if (res != VPX_CODEC_OK)
|
if (res != VPX_CODEC_OK)
|
||||||
@ -115,29 +115,31 @@ static vpx_codec_err_t update_error_state(vpx_codec_alg_priv_t *ctx,
|
|||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#undef ERROR
|
#undef ERROR
|
||||||
#define ERROR(str) do {\
|
#define ERROR(str) \
|
||||||
|
do { \
|
||||||
ctx->base.err_detail = str; \
|
ctx->base.err_detail = str; \
|
||||||
return VPX_CODEC_INVALID_PARAM; \
|
return VPX_CODEC_INVALID_PARAM; \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
#define RANGE_CHECK(p, memb, lo, hi) do {\
|
#define RANGE_CHECK(p, memb, lo, hi) \
|
||||||
|
do { \
|
||||||
if (!(((p)->memb == lo || (p)->memb > (lo)) && (p)->memb <= hi)) \
|
if (!(((p)->memb == lo || (p)->memb > (lo)) && (p)->memb <= hi)) \
|
||||||
ERROR(#memb " out of range [" #lo ".." #hi "]"); \
|
ERROR(#memb " out of range [" #lo ".." #hi "]"); \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
#define RANGE_CHECK_HI(p, memb, hi) do {\
|
#define RANGE_CHECK_HI(p, memb, hi) \
|
||||||
if (!((p)->memb <= (hi))) \
|
do { \
|
||||||
ERROR(#memb " out of range [.."#hi"]");\
|
if (!((p)->memb <= (hi))) ERROR(#memb " out of range [.." #hi "]"); \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
#define RANGE_CHECK_LO(p, memb, lo) do {\
|
#define RANGE_CHECK_LO(p, memb, lo) \
|
||||||
if (!((p)->memb >= (lo))) \
|
do { \
|
||||||
ERROR(#memb " out of range ["#lo"..]");\
|
if (!((p)->memb >= (lo))) ERROR(#memb " out of range [" #lo "..]"); \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
#define RANGE_CHECK_BOOL(p, memb) do {\
|
#define RANGE_CHECK_BOOL(p, memb) \
|
||||||
|
do { \
|
||||||
if (!!((p)->memb) != (p)->memb) ERROR(#memb " expected boolean"); \
|
if (!!((p)->memb) != (p)->memb) ERROR(#memb " expected boolean"); \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
@ -191,8 +193,8 @@ static vpx_codec_err_t validate_config(vpx_codec_alg_priv_t *ctx,
|
|||||||
level != LEVEL_2_1 && level != LEVEL_3 && level != LEVEL_3_1 &&
|
level != LEVEL_2_1 && level != LEVEL_3 && level != LEVEL_3_1 &&
|
||||||
level != LEVEL_4 && level != LEVEL_4_1 && level != LEVEL_5 &&
|
level != LEVEL_4 && level != LEVEL_4_1 && level != LEVEL_5 &&
|
||||||
level != LEVEL_5_1 && level != LEVEL_5_2 && level != LEVEL_6 &&
|
level != LEVEL_5_1 && level != LEVEL_5_2 && level != LEVEL_6 &&
|
||||||
level != LEVEL_6_1 && level != LEVEL_6_2 &&
|
level != LEVEL_6_1 && level != LEVEL_6_2 && level != LEVEL_UNKNOWN &&
|
||||||
level != LEVEL_UNKNOWN && level != LEVEL_MAX)
|
level != LEVEL_MAX)
|
||||||
ERROR("target_level is invalid");
|
ERROR("target_level is invalid");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -202,8 +204,7 @@ static vpx_codec_err_t validate_config(vpx_codec_alg_priv_t *ctx,
|
|||||||
unsigned int sl, tl;
|
unsigned int sl, tl;
|
||||||
for (sl = 1; sl < cfg->ss_number_layers; ++sl) {
|
for (sl = 1; sl < cfg->ss_number_layers; ++sl) {
|
||||||
for (tl = 1; tl < cfg->ts_number_layers; ++tl) {
|
for (tl = 1; tl < cfg->ts_number_layers; ++tl) {
|
||||||
const int layer =
|
const int layer = LAYER_IDS_TO_IDX(sl, tl, cfg->ts_number_layers);
|
||||||
LAYER_IDS_TO_IDX(sl, tl, cfg->ts_number_layers);
|
|
||||||
if (cfg->layer_target_bitrate[layer] <
|
if (cfg->layer_target_bitrate[layer] <
|
||||||
cfg->layer_target_bitrate[layer - 1])
|
cfg->layer_target_bitrate[layer - 1])
|
||||||
ERROR("ts_target_bitrate entries are not increasing");
|
ERROR("ts_target_bitrate entries are not increasing");
|
||||||
@ -222,8 +223,7 @@ static vpx_codec_err_t validate_config(vpx_codec_alg_priv_t *ctx,
|
|||||||
cfg->g_pass == VPX_RC_LAST_PASS) {
|
cfg->g_pass == VPX_RC_LAST_PASS) {
|
||||||
unsigned int i, alt_ref_sum = 0;
|
unsigned int i, alt_ref_sum = 0;
|
||||||
for (i = 0; i < cfg->ss_number_layers; ++i) {
|
for (i = 0; i < cfg->ss_number_layers; ++i) {
|
||||||
if (cfg->ss_enable_auto_alt_ref[i])
|
if (cfg->ss_enable_auto_alt_ref[i]) ++alt_ref_sum;
|
||||||
++alt_ref_sum;
|
|
||||||
}
|
}
|
||||||
if (alt_ref_sum > REF_FRAMES - cfg->ss_number_layers)
|
if (alt_ref_sum > REF_FRAMES - cfg->ss_number_layers)
|
||||||
ERROR("Not enough ref buffers for svc alt ref frames");
|
ERROR("Not enough ref buffers for svc alt ref frames");
|
||||||
@ -235,10 +235,10 @@ static vpx_codec_err_t validate_config(vpx_codec_alg_priv_t *ctx,
|
|||||||
|
|
||||||
// VP9 does not support a lower bound on the keyframe interval in
|
// VP9 does not support a lower bound on the keyframe interval in
|
||||||
// automatic keyframe placement mode.
|
// automatic keyframe placement mode.
|
||||||
if (cfg->kf_mode != VPX_KF_DISABLED &&
|
if (cfg->kf_mode != VPX_KF_DISABLED && cfg->kf_min_dist != cfg->kf_max_dist &&
|
||||||
cfg->kf_min_dist != cfg->kf_max_dist &&
|
|
||||||
cfg->kf_min_dist > 0)
|
cfg->kf_min_dist > 0)
|
||||||
ERROR("kf_min_dist not supported in auto mode, use 0 "
|
ERROR(
|
||||||
|
"kf_min_dist not supported in auto mode, use 0 "
|
||||||
"or kf_max_dist instead.");
|
"or kf_max_dist instead.");
|
||||||
|
|
||||||
RANGE_CHECK(extra_cfg, enable_auto_alt_ref, 0, 2);
|
RANGE_CHECK(extra_cfg, enable_auto_alt_ref, 0, 2);
|
||||||
@ -252,8 +252,8 @@ static vpx_codec_err_t validate_config(vpx_codec_alg_priv_t *ctx,
|
|||||||
RANGE_CHECK(extra_cfg, cq_level, 0, 63);
|
RANGE_CHECK(extra_cfg, cq_level, 0, 63);
|
||||||
RANGE_CHECK(cfg, g_bit_depth, VPX_BITS_8, VPX_BITS_12);
|
RANGE_CHECK(cfg, g_bit_depth, VPX_BITS_8, VPX_BITS_12);
|
||||||
RANGE_CHECK(cfg, g_input_bit_depth, 8, 12);
|
RANGE_CHECK(cfg, g_input_bit_depth, 8, 12);
|
||||||
RANGE_CHECK(extra_cfg, content,
|
RANGE_CHECK(extra_cfg, content, VP9E_CONTENT_DEFAULT,
|
||||||
VP9E_CONTENT_DEFAULT, VP9E_CONTENT_INVALID - 1);
|
VP9E_CONTENT_INVALID - 1);
|
||||||
|
|
||||||
// TODO(yaowu): remove this when ssim tuning is implemented for vp9
|
// TODO(yaowu): remove this when ssim tuning is implemented for vp9
|
||||||
if (extra_cfg->tuning == VP8_TUNE_SSIM)
|
if (extra_cfg->tuning == VP8_TUNE_SSIM)
|
||||||
@ -285,7 +285,8 @@ static vpx_codec_err_t validate_config(vpx_codec_alg_priv_t *ctx,
|
|||||||
for (i = 0; i < (int)cfg->ss_number_layers; ++i) {
|
for (i = 0; i < (int)cfg->ss_number_layers; ++i) {
|
||||||
unsigned int layer_id;
|
unsigned int layer_id;
|
||||||
if (n_packets_per_layer[i] < 2) {
|
if (n_packets_per_layer[i] < 2) {
|
||||||
ERROR("rc_twopass_stats_in requires at least two packets for each "
|
ERROR(
|
||||||
|
"rc_twopass_stats_in requires at least two packets for each "
|
||||||
"layer.");
|
"layer.");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -293,8 +294,8 @@ static vpx_codec_err_t validate_config(vpx_codec_alg_priv_t *ctx,
|
|||||||
n_packets - cfg->ss_number_layers + i;
|
n_packets - cfg->ss_number_layers + i;
|
||||||
layer_id = (int)stats->spatial_layer_id;
|
layer_id = (int)stats->spatial_layer_id;
|
||||||
|
|
||||||
if (layer_id >= cfg->ss_number_layers
|
if (layer_id >= cfg->ss_number_layers ||
|
||||||
||(unsigned int)(stats->count + 0.5) !=
|
(unsigned int)(stats->count + 0.5) !=
|
||||||
n_packets_per_layer[layer_id] - 1)
|
n_packets_per_layer[layer_id] - 1)
|
||||||
ERROR("rc_twopass_stats_in missing EOS stats packet");
|
ERROR("rc_twopass_stats_in missing EOS stats packet");
|
||||||
}
|
}
|
||||||
@ -319,8 +320,7 @@ static vpx_codec_err_t validate_config(vpx_codec_alg_priv_t *ctx,
|
|||||||
cfg->g_bit_depth > VPX_BITS_8) {
|
cfg->g_bit_depth > VPX_BITS_8) {
|
||||||
ERROR("Codec high bit-depth not supported in profile < 2");
|
ERROR("Codec high bit-depth not supported in profile < 2");
|
||||||
}
|
}
|
||||||
if (cfg->g_profile <= (unsigned int)PROFILE_1 &&
|
if (cfg->g_profile <= (unsigned int)PROFILE_1 && cfg->g_input_bit_depth > 8) {
|
||||||
cfg->g_input_bit_depth > 8) {
|
|
||||||
ERROR("Source high bit-depth not supported in profile < 2");
|
ERROR("Source high bit-depth not supported in profile < 2");
|
||||||
}
|
}
|
||||||
if (cfg->g_profile > (unsigned int)PROFILE_1 &&
|
if (cfg->g_profile > (unsigned int)PROFILE_1 &&
|
||||||
@ -328,8 +328,7 @@ static vpx_codec_err_t validate_config(vpx_codec_alg_priv_t *ctx,
|
|||||||
ERROR("Codec bit-depth 8 not supported in profile > 1");
|
ERROR("Codec bit-depth 8 not supported in profile > 1");
|
||||||
}
|
}
|
||||||
RANGE_CHECK(extra_cfg, color_space, VPX_CS_UNKNOWN, VPX_CS_SRGB);
|
RANGE_CHECK(extra_cfg, color_space, VPX_CS_UNKNOWN, VPX_CS_SRGB);
|
||||||
RANGE_CHECK(extra_cfg, color_range,
|
RANGE_CHECK(extra_cfg, color_range, VPX_CR_STUDIO_RANGE, VPX_CR_FULL_RANGE);
|
||||||
VPX_CR_STUDIO_RANGE, VPX_CR_FULL_RANGE);
|
|
||||||
return VPX_CODEC_OK;
|
return VPX_CODEC_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -338,13 +337,13 @@ static vpx_codec_err_t validate_img(vpx_codec_alg_priv_t *ctx,
|
|||||||
switch (img->fmt) {
|
switch (img->fmt) {
|
||||||
case VPX_IMG_FMT_YV12:
|
case VPX_IMG_FMT_YV12:
|
||||||
case VPX_IMG_FMT_I420:
|
case VPX_IMG_FMT_I420:
|
||||||
case VPX_IMG_FMT_I42016:
|
case VPX_IMG_FMT_I42016: break;
|
||||||
break;
|
|
||||||
case VPX_IMG_FMT_I422:
|
case VPX_IMG_FMT_I422:
|
||||||
case VPX_IMG_FMT_I444:
|
case VPX_IMG_FMT_I444:
|
||||||
case VPX_IMG_FMT_I440:
|
case VPX_IMG_FMT_I440:
|
||||||
if (ctx->cfg.g_profile != (unsigned int)PROFILE_1) {
|
if (ctx->cfg.g_profile != (unsigned int)PROFILE_1) {
|
||||||
ERROR("Invalid image format. I422, I444, I440 images are "
|
ERROR(
|
||||||
|
"Invalid image format. I422, I444, I440 images are "
|
||||||
"not supported in profile.");
|
"not supported in profile.");
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -353,12 +352,14 @@ static vpx_codec_err_t validate_img(vpx_codec_alg_priv_t *ctx,
|
|||||||
case VPX_IMG_FMT_I44016:
|
case VPX_IMG_FMT_I44016:
|
||||||
if (ctx->cfg.g_profile != (unsigned int)PROFILE_1 &&
|
if (ctx->cfg.g_profile != (unsigned int)PROFILE_1 &&
|
||||||
ctx->cfg.g_profile != (unsigned int)PROFILE_3) {
|
ctx->cfg.g_profile != (unsigned int)PROFILE_3) {
|
||||||
ERROR("Invalid image format. 16-bit I422, I444, I440 images are "
|
ERROR(
|
||||||
|
"Invalid image format. 16-bit I422, I444, I440 images are "
|
||||||
"not supported in profile.");
|
"not supported in profile.");
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
ERROR("Invalid image format. Only YV12, I420, I422, I444 images are "
|
ERROR(
|
||||||
|
"Invalid image format. Only YV12, I420, I422, I444 images are "
|
||||||
"supported.");
|
"supported.");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -386,8 +387,7 @@ static int get_image_bps(const vpx_image_t *img) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static vpx_codec_err_t set_encoder_config(
|
static vpx_codec_err_t set_encoder_config(
|
||||||
VP9EncoderConfig *oxcf,
|
VP9EncoderConfig *oxcf, const vpx_codec_enc_cfg_t *cfg,
|
||||||
const vpx_codec_enc_cfg_t *cfg,
|
|
||||||
const struct vp9_extracfg *extra_cfg) {
|
const struct vp9_extracfg *extra_cfg) {
|
||||||
const int is_vbr = cfg->rc_end_usage == VPX_VBR;
|
const int is_vbr = cfg->rc_end_usage == VPX_VBR;
|
||||||
int sl, tl;
|
int sl, tl;
|
||||||
@ -399,25 +399,18 @@ static vpx_codec_err_t set_encoder_config(
|
|||||||
oxcf->input_bit_depth = cfg->g_input_bit_depth;
|
oxcf->input_bit_depth = cfg->g_input_bit_depth;
|
||||||
// guess a frame rate if out of whack, use 30
|
// guess a frame rate if out of whack, use 30
|
||||||
oxcf->init_framerate = (double)cfg->g_timebase.den / cfg->g_timebase.num;
|
oxcf->init_framerate = (double)cfg->g_timebase.den / cfg->g_timebase.num;
|
||||||
if (oxcf->init_framerate > 180)
|
if (oxcf->init_framerate > 180) oxcf->init_framerate = 30;
|
||||||
oxcf->init_framerate = 30;
|
|
||||||
|
|
||||||
oxcf->mode = GOOD;
|
oxcf->mode = GOOD;
|
||||||
|
|
||||||
switch (cfg->g_pass) {
|
switch (cfg->g_pass) {
|
||||||
case VPX_RC_ONE_PASS:
|
case VPX_RC_ONE_PASS: oxcf->pass = 0; break;
|
||||||
oxcf->pass = 0;
|
case VPX_RC_FIRST_PASS: oxcf->pass = 1; break;
|
||||||
break;
|
case VPX_RC_LAST_PASS: oxcf->pass = 2; break;
|
||||||
case VPX_RC_FIRST_PASS:
|
|
||||||
oxcf->pass = 1;
|
|
||||||
break;
|
|
||||||
case VPX_RC_LAST_PASS:
|
|
||||||
oxcf->pass = 2;
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
oxcf->lag_in_frames = cfg->g_pass == VPX_RC_FIRST_PASS ? 0
|
oxcf->lag_in_frames =
|
||||||
: cfg->g_lag_in_frames;
|
cfg->g_pass == VPX_RC_FIRST_PASS ? 0 : cfg->g_lag_in_frames;
|
||||||
oxcf->rc_mode = cfg->rc_end_usage;
|
oxcf->rc_mode = cfg->rc_end_usage;
|
||||||
|
|
||||||
// Convert target bandwidth from Kbit/s to Bit/s
|
// Convert target bandwidth from Kbit/s to Bit/s
|
||||||
@ -440,8 +433,9 @@ static vpx_codec_err_t set_encoder_config(
|
|||||||
oxcf->scaled_frame_height = cfg->rc_scaled_height;
|
oxcf->scaled_frame_height = cfg->rc_scaled_height;
|
||||||
if (cfg->rc_resize_allowed == 1) {
|
if (cfg->rc_resize_allowed == 1) {
|
||||||
oxcf->resize_mode =
|
oxcf->resize_mode =
|
||||||
(oxcf->scaled_frame_width == 0 || oxcf->scaled_frame_height == 0) ?
|
(oxcf->scaled_frame_width == 0 || oxcf->scaled_frame_height == 0)
|
||||||
RESIZE_DYNAMIC : RESIZE_FIXED;
|
? RESIZE_DYNAMIC
|
||||||
|
: RESIZE_FIXED;
|
||||||
} else {
|
} else {
|
||||||
oxcf->resize_mode = RESIZE_NONE;
|
oxcf->resize_mode = RESIZE_NONE;
|
||||||
}
|
}
|
||||||
@ -456,8 +450,8 @@ static vpx_codec_err_t set_encoder_config(
|
|||||||
oxcf->two_pass_vbrmin_section = cfg->rc_2pass_vbr_minsection_pct;
|
oxcf->two_pass_vbrmin_section = cfg->rc_2pass_vbr_minsection_pct;
|
||||||
oxcf->two_pass_vbrmax_section = cfg->rc_2pass_vbr_maxsection_pct;
|
oxcf->two_pass_vbrmax_section = cfg->rc_2pass_vbr_maxsection_pct;
|
||||||
|
|
||||||
oxcf->auto_key = cfg->kf_mode == VPX_KF_AUTO &&
|
oxcf->auto_key =
|
||||||
cfg->kf_min_dist != cfg->kf_max_dist;
|
cfg->kf_mode == VPX_KF_AUTO && cfg->kf_min_dist != cfg->kf_max_dist;
|
||||||
|
|
||||||
oxcf->key_freq = cfg->kf_max_dist;
|
oxcf->key_freq = cfg->kf_max_dist;
|
||||||
|
|
||||||
@ -506,8 +500,8 @@ static vpx_codec_err_t set_encoder_config(
|
|||||||
|
|
||||||
oxcf->ss_number_layers = cfg->ss_number_layers;
|
oxcf->ss_number_layers = cfg->ss_number_layers;
|
||||||
oxcf->ts_number_layers = cfg->ts_number_layers;
|
oxcf->ts_number_layers = cfg->ts_number_layers;
|
||||||
oxcf->temporal_layering_mode = (enum vp9e_temporal_layering_mode)
|
oxcf->temporal_layering_mode =
|
||||||
cfg->temporal_layering_mode;
|
(enum vp9e_temporal_layering_mode)cfg->temporal_layering_mode;
|
||||||
|
|
||||||
oxcf->target_level = extra_cfg->target_level;
|
oxcf->target_level = extra_cfg->target_level;
|
||||||
|
|
||||||
@ -528,8 +522,8 @@ static vpx_codec_err_t set_encoder_config(
|
|||||||
}
|
}
|
||||||
if (oxcf->ts_number_layers > 1) {
|
if (oxcf->ts_number_layers > 1) {
|
||||||
for (tl = 0; tl < VPX_TS_MAX_LAYERS; ++tl) {
|
for (tl = 0; tl < VPX_TS_MAX_LAYERS; ++tl) {
|
||||||
oxcf->ts_rate_decimator[tl] = cfg->ts_rate_decimator[tl] ?
|
oxcf->ts_rate_decimator[tl] =
|
||||||
cfg->ts_rate_decimator[tl] : 1;
|
cfg->ts_rate_decimator[tl] ? cfg->ts_rate_decimator[tl] : 1;
|
||||||
}
|
}
|
||||||
} else if (oxcf->ts_number_layers == 1) {
|
} else if (oxcf->ts_number_layers == 1) {
|
||||||
oxcf->ts_rate_decimator[0] = 1;
|
oxcf->ts_rate_decimator[0] = 1;
|
||||||
@ -601,8 +595,7 @@ static vpx_codec_err_t encoder_set_config(vpx_codec_alg_priv_t *ctx,
|
|||||||
vp9_change_config(ctx->cpi, &ctx->oxcf);
|
vp9_change_config(ctx->cpi, &ctx->oxcf);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (force_key)
|
if (force_key) ctx->next_frame_flags |= VPX_EFLAG_FORCE_KF;
|
||||||
ctx->next_frame_flags |= VPX_EFLAG_FORCE_KF;
|
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
@ -610,8 +603,7 @@ static vpx_codec_err_t encoder_set_config(vpx_codec_alg_priv_t *ctx,
|
|||||||
static vpx_codec_err_t ctrl_get_quantizer(vpx_codec_alg_priv_t *ctx,
|
static vpx_codec_err_t ctrl_get_quantizer(vpx_codec_alg_priv_t *ctx,
|
||||||
va_list args) {
|
va_list args) {
|
||||||
int *const arg = va_arg(args, int *);
|
int *const arg = va_arg(args, int *);
|
||||||
if (arg == NULL)
|
if (arg == NULL) return VPX_CODEC_INVALID_PARAM;
|
||||||
return VPX_CODEC_INVALID_PARAM;
|
|
||||||
*arg = vp9_get_quantizer(ctx->cpi);
|
*arg = vp9_get_quantizer(ctx->cpi);
|
||||||
return VPX_CODEC_OK;
|
return VPX_CODEC_OK;
|
||||||
}
|
}
|
||||||
@ -619,8 +611,7 @@ static vpx_codec_err_t ctrl_get_quantizer(vpx_codec_alg_priv_t *ctx,
|
|||||||
static vpx_codec_err_t ctrl_get_quantizer64(vpx_codec_alg_priv_t *ctx,
|
static vpx_codec_err_t ctrl_get_quantizer64(vpx_codec_alg_priv_t *ctx,
|
||||||
va_list args) {
|
va_list args) {
|
||||||
int *const arg = va_arg(args, int *);
|
int *const arg = va_arg(args, int *);
|
||||||
if (arg == NULL)
|
if (arg == NULL) return VPX_CODEC_INVALID_PARAM;
|
||||||
return VPX_CODEC_INVALID_PARAM;
|
|
||||||
*arg = vp9_qindex_to_quantizer(vp9_get_quantizer(ctx->cpi));
|
*arg = vp9_qindex_to_quantizer(vp9_get_quantizer(ctx->cpi));
|
||||||
return VPX_CODEC_OK;
|
return VPX_CODEC_OK;
|
||||||
}
|
}
|
||||||
@ -736,11 +727,10 @@ static vpx_codec_err_t ctrl_set_rc_max_inter_bitrate_pct(
|
|||||||
return update_extra_cfg(ctx, &extra_cfg);
|
return update_extra_cfg(ctx, &extra_cfg);
|
||||||
}
|
}
|
||||||
|
|
||||||
static vpx_codec_err_t ctrl_set_rc_gf_cbr_boost_pct(
|
static vpx_codec_err_t ctrl_set_rc_gf_cbr_boost_pct(vpx_codec_alg_priv_t *ctx,
|
||||||
vpx_codec_alg_priv_t *ctx, va_list args) {
|
va_list args) {
|
||||||
struct vp9_extracfg extra_cfg = ctx->extra_cfg;
|
struct vp9_extracfg extra_cfg = ctx->extra_cfg;
|
||||||
extra_cfg.gf_cbr_boost_pct =
|
extra_cfg.gf_cbr_boost_pct = CAST(VP9E_SET_GF_CBR_BOOST_PCT, args);
|
||||||
CAST(VP9E_SET_GF_CBR_BOOST_PCT, args);
|
|
||||||
return update_extra_cfg(ctx, &extra_cfg);
|
return update_extra_cfg(ctx, &extra_cfg);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -808,16 +798,13 @@ static vpx_codec_err_t encoder_init(vpx_codec_ctx_t *ctx,
|
|||||||
|
|
||||||
if (ctx->priv == NULL) {
|
if (ctx->priv == NULL) {
|
||||||
vpx_codec_alg_priv_t *const priv = vpx_calloc(1, sizeof(*priv));
|
vpx_codec_alg_priv_t *const priv = vpx_calloc(1, sizeof(*priv));
|
||||||
if (priv == NULL)
|
if (priv == NULL) return VPX_CODEC_MEM_ERROR;
|
||||||
return VPX_CODEC_MEM_ERROR;
|
|
||||||
|
|
||||||
ctx->priv = (vpx_codec_priv_t *)priv;
|
ctx->priv = (vpx_codec_priv_t *)priv;
|
||||||
ctx->priv->init_flags = ctx->init_flags;
|
ctx->priv->init_flags = ctx->init_flags;
|
||||||
ctx->priv->enc.total_encoders = 1;
|
ctx->priv->enc.total_encoders = 1;
|
||||||
priv->buffer_pool =
|
priv->buffer_pool = (BufferPool *)vpx_calloc(1, sizeof(BufferPool));
|
||||||
(BufferPool *)vpx_calloc(1, sizeof(BufferPool));
|
if (priv->buffer_pool == NULL) return VPX_CODEC_MEM_ERROR;
|
||||||
if (priv->buffer_pool == NULL)
|
|
||||||
return VPX_CODEC_MEM_ERROR;
|
|
||||||
|
|
||||||
#if CONFIG_MULTITHREAD
|
#if CONFIG_MULTITHREAD
|
||||||
if (pthread_mutex_init(&priv->buffer_pool->pool_mutex, NULL)) {
|
if (pthread_mutex_init(&priv->buffer_pool->pool_mutex, NULL)) {
|
||||||
@ -876,7 +863,8 @@ static void pick_quickcompress_mode(vpx_codec_alg_priv_t *ctx,
|
|||||||
|
|
||||||
// Convert duration parameter from stream timebase to microseconds.
|
// Convert duration parameter from stream timebase to microseconds.
|
||||||
const uint64_t duration_us = (uint64_t)duration * 1000000 *
|
const uint64_t duration_us = (uint64_t)duration * 1000000 *
|
||||||
(uint64_t)cfg->g_timebase.num /(uint64_t)cfg->g_timebase.den;
|
(uint64_t)cfg->g_timebase.num /
|
||||||
|
(uint64_t)cfg->g_timebase.den;
|
||||||
|
|
||||||
// If the deadline is more that the duration this frame is to be shown,
|
// If the deadline is more that the duration this frame is to be shown,
|
||||||
// use good quality mode. Otherwise use realtime mode.
|
// use good quality mode. Otherwise use realtime mode.
|
||||||
@ -885,11 +873,8 @@ static void pick_quickcompress_mode(vpx_codec_alg_priv_t *ctx,
|
|||||||
new_mode = BEST;
|
new_mode = BEST;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case VPX_RC_FIRST_PASS:
|
case VPX_RC_FIRST_PASS: break;
|
||||||
break;
|
case VPX_RC_LAST_PASS: new_mode = deadline > 0 ? GOOD : BEST; break;
|
||||||
case VPX_RC_LAST_PASS:
|
|
||||||
new_mode = deadline > 0 ? GOOD : BEST;
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (deadline == VPX_DL_REALTIME) {
|
if (deadline == VPX_DL_REALTIME) {
|
||||||
@ -918,8 +903,7 @@ static int write_superframe_index(vpx_codec_alg_priv_t *ctx) {
|
|||||||
|
|
||||||
// Choose the magnitude
|
// Choose the magnitude
|
||||||
for (mag = 0, mask = 0xff; mag < 4; mag++) {
|
for (mag = 0, mask = 0xff; mag < 4; mag++) {
|
||||||
if (ctx->pending_frame_magnitude < mask)
|
if (ctx->pending_frame_magnitude < mask) break;
|
||||||
break;
|
|
||||||
mask <<= 8;
|
mask <<= 8;
|
||||||
mask |= 0xff;
|
mask |= 0xff;
|
||||||
}
|
}
|
||||||
@ -982,12 +966,11 @@ static vpx_codec_frame_flags_t get_frame_pkt_flags(const VP9_COMP *cpi,
|
|||||||
(cpi->use_svc &&
|
(cpi->use_svc &&
|
||||||
cpi->svc.layer_context[cpi->svc.spatial_layer_id *
|
cpi->svc.layer_context[cpi->svc.spatial_layer_id *
|
||||||
cpi->svc.number_temporal_layers +
|
cpi->svc.number_temporal_layers +
|
||||||
cpi->svc.temporal_layer_id].is_key_frame)
|
cpi->svc.temporal_layer_id]
|
||||||
)
|
.is_key_frame))
|
||||||
flags |= VPX_FRAME_IS_KEY;
|
flags |= VPX_FRAME_IS_KEY;
|
||||||
|
|
||||||
if (cpi->droppable)
|
if (cpi->droppable) flags |= VPX_FRAME_IS_DROPPABLE;
|
||||||
flags |= VPX_FRAME_IS_DROPPABLE;
|
|
||||||
|
|
||||||
return flags;
|
return flags;
|
||||||
}
|
}
|
||||||
@ -1014,8 +997,7 @@ static vpx_codec_err_t encoder_encode(vpx_codec_alg_priv_t *ctx,
|
|||||||
// instance for its status to determine the compressed data size.
|
// instance for its status to determine the compressed data size.
|
||||||
data_sz = ctx->cfg.g_w * ctx->cfg.g_h * get_image_bps(img) / 8 *
|
data_sz = ctx->cfg.g_w * ctx->cfg.g_h * get_image_bps(img) / 8 *
|
||||||
(cpi->multi_arf_allowed ? 8 : 2);
|
(cpi->multi_arf_allowed ? 8 : 2);
|
||||||
if (data_sz < kMinCompressedSize)
|
if (data_sz < kMinCompressedSize) data_sz = kMinCompressedSize;
|
||||||
data_sz = kMinCompressedSize;
|
|
||||||
if (ctx->cx_data == NULL || ctx->cx_data_sz < data_sz) {
|
if (ctx->cx_data == NULL || ctx->cx_data_sz < data_sz) {
|
||||||
ctx->cx_data_sz = data_sz;
|
ctx->cx_data_sz = data_sz;
|
||||||
free(ctx->cx_data);
|
free(ctx->cx_data);
|
||||||
@ -1066,16 +1048,15 @@ static vpx_codec_err_t encoder_encode(vpx_codec_alg_priv_t *ctx,
|
|||||||
unsigned char *cx_data;
|
unsigned char *cx_data;
|
||||||
|
|
||||||
// Set up internal flags
|
// Set up internal flags
|
||||||
if (ctx->base.init_flags & VPX_CODEC_USE_PSNR)
|
if (ctx->base.init_flags & VPX_CODEC_USE_PSNR) cpi->b_calculate_psnr = 1;
|
||||||
cpi->b_calculate_psnr = 1;
|
|
||||||
|
|
||||||
if (img != NULL) {
|
if (img != NULL) {
|
||||||
res = image2yuvconfig(img, &sd);
|
res = image2yuvconfig(img, &sd);
|
||||||
|
|
||||||
// Store the original flags in to the frame buffer. Will extract the
|
// Store the original flags in to the frame buffer. Will extract the
|
||||||
// key frame flag when we actually encode this frame.
|
// key frame flag when we actually encode this frame.
|
||||||
if (vp9_receive_raw_frame(cpi, flags | ctx->next_frame_flags,
|
if (vp9_receive_raw_frame(cpi, flags | ctx->next_frame_flags, &sd,
|
||||||
&sd, dst_time_stamp, dst_end_time_stamp)) {
|
dst_time_stamp, dst_end_time_stamp)) {
|
||||||
res = update_error_state(ctx, &cpi->common.error);
|
res = update_error_state(ctx, &cpi->common.error);
|
||||||
}
|
}
|
||||||
ctx->next_frame_flags = 0;
|
ctx->next_frame_flags = 0;
|
||||||
@ -1102,25 +1083,24 @@ static vpx_codec_err_t encoder_encode(vpx_codec_alg_priv_t *ctx,
|
|||||||
}
|
}
|
||||||
|
|
||||||
while (cx_data_sz >= ctx->cx_data_sz / 2 &&
|
while (cx_data_sz >= ctx->cx_data_sz / 2 &&
|
||||||
-1 != vp9_get_compressed_data(cpi, &lib_flags, &size,
|
-1 != vp9_get_compressed_data(cpi, &lib_flags, &size, cx_data,
|
||||||
cx_data, &dst_time_stamp,
|
&dst_time_stamp, &dst_end_time_stamp,
|
||||||
&dst_end_time_stamp, !img)) {
|
!img)) {
|
||||||
if (size) {
|
if (size) {
|
||||||
vpx_codec_cx_pkt_t pkt;
|
vpx_codec_cx_pkt_t pkt;
|
||||||
|
|
||||||
#if CONFIG_SPATIAL_SVC
|
#if CONFIG_SPATIAL_SVC
|
||||||
if (cpi->use_svc)
|
if (cpi->use_svc)
|
||||||
cpi->svc.layer_context[cpi->svc.spatial_layer_id *
|
cpi->svc.layer_context[cpi->svc.spatial_layer_id *
|
||||||
cpi->svc.number_temporal_layers].layer_size += size;
|
cpi->svc.number_temporal_layers]
|
||||||
|
.layer_size += size;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Pack invisible frames with the next visible frame
|
// Pack invisible frames with the next visible frame
|
||||||
if (!cpi->common.show_frame ||
|
if (!cpi->common.show_frame ||
|
||||||
(cpi->use_svc &&
|
(cpi->use_svc &&
|
||||||
cpi->svc.spatial_layer_id < cpi->svc.number_spatial_layers - 1)
|
cpi->svc.spatial_layer_id < cpi->svc.number_spatial_layers - 1)) {
|
||||||
) {
|
if (ctx->pending_cx_data == 0) ctx->pending_cx_data = cx_data;
|
||||||
if (ctx->pending_cx_data == 0)
|
|
||||||
ctx->pending_cx_data = cx_data;
|
|
||||||
ctx->pending_cx_data_sz += size;
|
ctx->pending_cx_data_sz += size;
|
||||||
ctx->pending_frame_sizes[ctx->pending_frame_count++] = size;
|
ctx->pending_frame_sizes[ctx->pending_frame_count++] = size;
|
||||||
ctx->pending_frame_magnitude |= size;
|
ctx->pending_frame_magnitude |= size;
|
||||||
@ -1129,11 +1109,10 @@ static vpx_codec_err_t encoder_encode(vpx_codec_alg_priv_t *ctx,
|
|||||||
|
|
||||||
if (ctx->output_cx_pkt_cb.output_cx_pkt) {
|
if (ctx->output_cx_pkt_cb.output_cx_pkt) {
|
||||||
pkt.kind = VPX_CODEC_CX_FRAME_PKT;
|
pkt.kind = VPX_CODEC_CX_FRAME_PKT;
|
||||||
pkt.data.frame.pts = ticks_to_timebase_units(timebase,
|
pkt.data.frame.pts =
|
||||||
dst_time_stamp);
|
ticks_to_timebase_units(timebase, dst_time_stamp);
|
||||||
pkt.data.frame.duration =
|
pkt.data.frame.duration = (unsigned long)ticks_to_timebase_units(
|
||||||
(unsigned long)ticks_to_timebase_units(timebase,
|
timebase, dst_end_time_stamp - dst_time_stamp);
|
||||||
dst_end_time_stamp - dst_time_stamp);
|
|
||||||
pkt.data.frame.flags = get_frame_pkt_flags(cpi, lib_flags);
|
pkt.data.frame.flags = get_frame_pkt_flags(cpi, lib_flags);
|
||||||
pkt.data.frame.buf = ctx->pending_cx_data;
|
pkt.data.frame.buf = ctx->pending_cx_data;
|
||||||
pkt.data.frame.sz = size;
|
pkt.data.frame.sz = size;
|
||||||
@ -1150,9 +1129,8 @@ static vpx_codec_err_t encoder_encode(vpx_codec_alg_priv_t *ctx,
|
|||||||
// Add the frame packet to the list of returned packets.
|
// Add the frame packet to the list of returned packets.
|
||||||
pkt.kind = VPX_CODEC_CX_FRAME_PKT;
|
pkt.kind = VPX_CODEC_CX_FRAME_PKT;
|
||||||
pkt.data.frame.pts = ticks_to_timebase_units(timebase, dst_time_stamp);
|
pkt.data.frame.pts = ticks_to_timebase_units(timebase, dst_time_stamp);
|
||||||
pkt.data.frame.duration =
|
pkt.data.frame.duration = (unsigned long)ticks_to_timebase_units(
|
||||||
(unsigned long)ticks_to_timebase_units(timebase,
|
timebase, dst_end_time_stamp - dst_time_stamp);
|
||||||
dst_end_time_stamp - dst_time_stamp);
|
|
||||||
pkt.data.frame.flags = get_frame_pkt_flags(cpi, lib_flags);
|
pkt.data.frame.flags = get_frame_pkt_flags(cpi, lib_flags);
|
||||||
|
|
||||||
if (ctx->pending_cx_data) {
|
if (ctx->pending_cx_data) {
|
||||||
@ -1287,7 +1265,6 @@ static vpx_codec_err_t ctrl_set_previewpp(vpx_codec_alg_priv_t *ctx,
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static vpx_image_t *encoder_get_preview(vpx_codec_alg_priv_t *ctx) {
|
static vpx_image_t *encoder_get_preview(vpx_codec_alg_priv_t *ctx) {
|
||||||
YV12_BUFFER_CONFIG sd;
|
YV12_BUFFER_CONFIG sd;
|
||||||
vp9_ppflags_t flags;
|
vp9_ppflags_t flags;
|
||||||
@ -1316,14 +1293,13 @@ static vpx_codec_err_t ctrl_set_roi_map(vpx_codec_alg_priv_t *ctx,
|
|||||||
return VPX_CODEC_INVALID_PARAM;
|
return VPX_CODEC_INVALID_PARAM;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static vpx_codec_err_t ctrl_set_active_map(vpx_codec_alg_priv_t *ctx,
|
static vpx_codec_err_t ctrl_set_active_map(vpx_codec_alg_priv_t *ctx,
|
||||||
va_list args) {
|
va_list args) {
|
||||||
vpx_active_map_t *const map = va_arg(args, vpx_active_map_t *);
|
vpx_active_map_t *const map = va_arg(args, vpx_active_map_t *);
|
||||||
|
|
||||||
if (map) {
|
if (map) {
|
||||||
if (!vp9_set_active_map(ctx->cpi, map->active_map,
|
if (!vp9_set_active_map(ctx->cpi, map->active_map, (int)map->rows,
|
||||||
(int)map->rows, (int)map->cols))
|
(int)map->cols))
|
||||||
return VPX_CODEC_OK;
|
return VPX_CODEC_OK;
|
||||||
else
|
else
|
||||||
return VPX_CODEC_INVALID_PARAM;
|
return VPX_CODEC_INVALID_PARAM;
|
||||||
@ -1337,8 +1313,8 @@ static vpx_codec_err_t ctrl_get_active_map(vpx_codec_alg_priv_t *ctx,
|
|||||||
vpx_active_map_t *const map = va_arg(args, vpx_active_map_t *);
|
vpx_active_map_t *const map = va_arg(args, vpx_active_map_t *);
|
||||||
|
|
||||||
if (map) {
|
if (map) {
|
||||||
if (!vp9_get_active_map(ctx->cpi, map->active_map,
|
if (!vp9_get_active_map(ctx->cpi, map->active_map, (int)map->rows,
|
||||||
(int)map->rows, (int)map->cols))
|
(int)map->cols))
|
||||||
return VPX_CODEC_OK;
|
return VPX_CODEC_OK;
|
||||||
else
|
else
|
||||||
return VPX_CODEC_INVALID_PARAM;
|
return VPX_CODEC_INVALID_PARAM;
|
||||||
@ -1352,8 +1328,8 @@ static vpx_codec_err_t ctrl_set_scale_mode(vpx_codec_alg_priv_t *ctx,
|
|||||||
vpx_scaling_mode_t *const mode = va_arg(args, vpx_scaling_mode_t *);
|
vpx_scaling_mode_t *const mode = va_arg(args, vpx_scaling_mode_t *);
|
||||||
|
|
||||||
if (mode) {
|
if (mode) {
|
||||||
const int res = vp9_set_internal_size(ctx->cpi,
|
const int res =
|
||||||
(VPX_SCALING)mode->h_scaling_mode,
|
vp9_set_internal_size(ctx->cpi, (VPX_SCALING)mode->h_scaling_mode,
|
||||||
(VPX_SCALING)mode->v_scaling_mode);
|
(VPX_SCALING)mode->v_scaling_mode);
|
||||||
return (res == 0) ? VPX_CODEC_OK : VPX_CODEC_INVALID_PARAM;
|
return (res == 0) ? VPX_CODEC_OK : VPX_CODEC_INVALID_PARAM;
|
||||||
} else {
|
} else {
|
||||||
@ -1374,10 +1350,8 @@ static vpx_codec_err_t ctrl_set_svc(vpx_codec_alg_priv_t *ctx, va_list args) {
|
|||||||
vp9_set_svc(ctx->cpi, data);
|
vp9_set_svc(ctx->cpi, data);
|
||||||
|
|
||||||
if (data == 1 &&
|
if (data == 1 &&
|
||||||
(cfg->g_pass == VPX_RC_FIRST_PASS ||
|
(cfg->g_pass == VPX_RC_FIRST_PASS || cfg->g_pass == VPX_RC_LAST_PASS) &&
|
||||||
cfg->g_pass == VPX_RC_LAST_PASS) &&
|
cfg->ss_number_layers > 1 && cfg->ts_number_layers > 1) {
|
||||||
cfg->ss_number_layers > 1 &&
|
|
||||||
cfg->ts_number_layers > 1) {
|
|
||||||
return VPX_CODEC_INVALID_PARAM;
|
return VPX_CODEC_INVALID_PARAM;
|
||||||
}
|
}
|
||||||
return VPX_CODEC_OK;
|
return VPX_CODEC_OK;
|
||||||
@ -1431,8 +1405,7 @@ static vpx_codec_err_t ctrl_set_svc_parameters(vpx_codec_alg_priv_t *ctx,
|
|||||||
for (tl = 0; tl < cpi->svc.number_temporal_layers; ++tl) {
|
for (tl = 0; tl < cpi->svc.number_temporal_layers; ++tl) {
|
||||||
const int layer =
|
const int layer =
|
||||||
LAYER_IDS_TO_IDX(sl, tl, cpi->svc.number_temporal_layers);
|
LAYER_IDS_TO_IDX(sl, tl, cpi->svc.number_temporal_layers);
|
||||||
LAYER_CONTEXT *lc =
|
LAYER_CONTEXT *lc = &cpi->svc.layer_context[layer];
|
||||||
&cpi->svc.layer_context[layer];
|
|
||||||
lc->max_q = params->max_quantizers[layer];
|
lc->max_q = params->max_quantizers[layer];
|
||||||
lc->min_q = params->min_quantizers[layer];
|
lc->min_q = params->min_quantizers[layer];
|
||||||
lc->scaling_factor_num = params->scaling_factor_num[sl];
|
lc->scaling_factor_num = params->scaling_factor_num[sl];
|
||||||
@ -1551,9 +1524,9 @@ static vpx_codec_ctrl_fn_map_t encoder_ctrl_maps[] = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
static vpx_codec_enc_cfg_map_t encoder_usage_cfg_map[] = {
|
static vpx_codec_enc_cfg_map_t encoder_usage_cfg_map[] = {
|
||||||
|
{ 0,
|
||||||
{
|
{
|
||||||
0,
|
// NOLINT
|
||||||
{ // NOLINT
|
|
||||||
0, // g_usage
|
0, // g_usage
|
||||||
8, // g_threads
|
8, // g_threads
|
||||||
0, // g_profile
|
0, // g_profile
|
||||||
@ -1610,8 +1583,7 @@ static vpx_codec_enc_cfg_map_t encoder_usage_cfg_map[] = {
|
|||||||
{ 0 }, // ts_layer_id
|
{ 0 }, // ts_layer_id
|
||||||
{ 0 }, // layer_taget_bitrate
|
{ 0 }, // layer_taget_bitrate
|
||||||
0 // temporal_layering_mode
|
0 // temporal_layering_mode
|
||||||
}
|
} },
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#ifndef VERSION_STRING
|
#ifndef VERSION_STRING
|
||||||
@ -1627,14 +1599,16 @@ CODEC_INTERFACE(vpx_codec_vp9_cx) = {
|
|||||||
encoder_init, // vpx_codec_init_fn_t
|
encoder_init, // vpx_codec_init_fn_t
|
||||||
encoder_destroy, // vpx_codec_destroy_fn_t
|
encoder_destroy, // vpx_codec_destroy_fn_t
|
||||||
encoder_ctrl_maps, // vpx_codec_ctrl_fn_map_t
|
encoder_ctrl_maps, // vpx_codec_ctrl_fn_map_t
|
||||||
{ // NOLINT
|
{
|
||||||
|
// NOLINT
|
||||||
NULL, // vpx_codec_peek_si_fn_t
|
NULL, // vpx_codec_peek_si_fn_t
|
||||||
NULL, // vpx_codec_get_si_fn_t
|
NULL, // vpx_codec_get_si_fn_t
|
||||||
NULL, // vpx_codec_decode_fn_t
|
NULL, // vpx_codec_decode_fn_t
|
||||||
NULL, // vpx_codec_frame_get_fn_t
|
NULL, // vpx_codec_frame_get_fn_t
|
||||||
NULL // vpx_codec_set_fb_fn_t
|
NULL // vpx_codec_set_fb_fn_t
|
||||||
},
|
},
|
||||||
{ // NOLINT
|
{
|
||||||
|
// NOLINT
|
||||||
1, // 1 cfg map
|
1, // 1 cfg map
|
||||||
encoder_usage_cfg_map, // vpx_codec_enc_cfg_map_t
|
encoder_usage_cfg_map, // vpx_codec_enc_cfg_map_t
|
||||||
encoder_encode, // vpx_codec_encode_fn_t
|
encoder_encode, // vpx_codec_encode_fn_t
|
||||||
|
@ -41,8 +41,7 @@ static vpx_codec_err_t decoder_init(vpx_codec_ctx_t *ctx,
|
|||||||
if (!ctx->priv) {
|
if (!ctx->priv) {
|
||||||
vpx_codec_alg_priv_t *const priv =
|
vpx_codec_alg_priv_t *const priv =
|
||||||
(vpx_codec_alg_priv_t *)vpx_calloc(1, sizeof(*priv));
|
(vpx_codec_alg_priv_t *)vpx_calloc(1, sizeof(*priv));
|
||||||
if (priv == NULL)
|
if (priv == NULL) return VPX_CODEC_MEM_ERROR;
|
||||||
return VPX_CODEC_MEM_ERROR;
|
|
||||||
|
|
||||||
ctx->priv = (vpx_codec_priv_t *)priv;
|
ctx->priv = (vpx_codec_priv_t *)priv;
|
||||||
ctx->priv->init_flags = ctx->init_flags;
|
ctx->priv->init_flags = ctx->init_flags;
|
||||||
@ -51,7 +50,9 @@ static vpx_codec_err_t decoder_init(vpx_codec_ctx_t *ctx,
|
|||||||
// Only do frame parallel decode when threads > 1.
|
// Only do frame parallel decode when threads > 1.
|
||||||
priv->frame_parallel_decode =
|
priv->frame_parallel_decode =
|
||||||
(ctx->config.dec && (ctx->config.dec->threads > 1) &&
|
(ctx->config.dec && (ctx->config.dec->threads > 1) &&
|
||||||
(ctx->init_flags & VPX_CODEC_USE_FRAME_THREADING)) ? 1 : 0;
|
(ctx->init_flags & VPX_CODEC_USE_FRAME_THREADING))
|
||||||
|
? 1
|
||||||
|
: 0;
|
||||||
if (ctx->config.dec) {
|
if (ctx->config.dec) {
|
||||||
priv->cfg = *ctx->config.dec;
|
priv->cfg = *ctx->config.dec;
|
||||||
ctx->config.dec = &priv->cfg;
|
ctx->config.dec = &priv->cfg;
|
||||||
@ -102,11 +103,10 @@ static vpx_codec_err_t decoder_destroy(vpx_codec_alg_priv_t *ctx) {
|
|||||||
return VPX_CODEC_OK;
|
return VPX_CODEC_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int parse_bitdepth_colorspace_sampling(
|
static int parse_bitdepth_colorspace_sampling(BITSTREAM_PROFILE profile,
|
||||||
BITSTREAM_PROFILE profile, struct vpx_read_bit_buffer *rb) {
|
struct vpx_read_bit_buffer *rb) {
|
||||||
vpx_color_space_t color_space;
|
vpx_color_space_t color_space;
|
||||||
if (profile >= PROFILE_2)
|
if (profile >= PROFILE_2) rb->bit_offset += 1; // Bit-depth 10 or 12.
|
||||||
rb->bit_offset += 1; // Bit-depth 10 or 12.
|
|
||||||
color_space = (vpx_color_space_t)vpx_rb_read_literal(rb, 3);
|
color_space = (vpx_color_space_t)vpx_rb_read_literal(rb, 3);
|
||||||
if (color_space != VPX_CS_SRGB) {
|
if (color_space != VPX_CS_SRGB) {
|
||||||
rb->bit_offset += 1; // [16,235] (including xvycc) vs [0,255] range.
|
rb->bit_offset += 1; // [16,235] (including xvycc) vs [0,255] range.
|
||||||
@ -125,17 +125,13 @@ static int parse_bitdepth_colorspace_sampling(
|
|||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
static vpx_codec_err_t decoder_peek_si_internal(const uint8_t *data,
|
static vpx_codec_err_t decoder_peek_si_internal(
|
||||||
unsigned int data_sz,
|
const uint8_t *data, unsigned int data_sz, vpx_codec_stream_info_t *si,
|
||||||
vpx_codec_stream_info_t *si,
|
int *is_intra_only, vpx_decrypt_cb decrypt_cb, void *decrypt_state) {
|
||||||
int *is_intra_only,
|
|
||||||
vpx_decrypt_cb decrypt_cb,
|
|
||||||
void *decrypt_state) {
|
|
||||||
int intra_only_flag = 0;
|
int intra_only_flag = 0;
|
||||||
uint8_t clear_buffer[10];
|
uint8_t clear_buffer[10];
|
||||||
|
|
||||||
if (data + data_sz <= data)
|
if (data + data_sz <= data) return VPX_CODEC_INVALID_PARAM;
|
||||||
return VPX_CODEC_INVALID_PARAM;
|
|
||||||
|
|
||||||
si->is_kf = 0;
|
si->is_kf = 0;
|
||||||
si->w = si->h = 0;
|
si->w = si->h = 0;
|
||||||
@ -148,8 +144,7 @@ static vpx_codec_err_t decoder_peek_si_internal(const uint8_t *data,
|
|||||||
|
|
||||||
// A maximum of 6 bits are needed to read the frame marker, profile and
|
// A maximum of 6 bits are needed to read the frame marker, profile and
|
||||||
// show_existing_frame.
|
// show_existing_frame.
|
||||||
if (data_sz < 1)
|
if (data_sz < 1) return VPX_CODEC_UNSUP_BITSTREAM;
|
||||||
return VPX_CODEC_UNSUP_BITSTREAM;
|
|
||||||
|
|
||||||
{
|
{
|
||||||
int show_frame;
|
int show_frame;
|
||||||
@ -158,17 +153,14 @@ static vpx_codec_err_t decoder_peek_si_internal(const uint8_t *data,
|
|||||||
const int frame_marker = vpx_rb_read_literal(&rb, 2);
|
const int frame_marker = vpx_rb_read_literal(&rb, 2);
|
||||||
const BITSTREAM_PROFILE profile = vp9_read_profile(&rb);
|
const BITSTREAM_PROFILE profile = vp9_read_profile(&rb);
|
||||||
|
|
||||||
if (frame_marker != VP9_FRAME_MARKER)
|
if (frame_marker != VP9_FRAME_MARKER) return VPX_CODEC_UNSUP_BITSTREAM;
|
||||||
return VPX_CODEC_UNSUP_BITSTREAM;
|
|
||||||
|
|
||||||
if (profile >= MAX_PROFILES)
|
if (profile >= MAX_PROFILES) return VPX_CODEC_UNSUP_BITSTREAM;
|
||||||
return VPX_CODEC_UNSUP_BITSTREAM;
|
|
||||||
|
|
||||||
if (vpx_rb_read_bit(&rb)) { // show an existing frame
|
if (vpx_rb_read_bit(&rb)) { // show an existing frame
|
||||||
// If profile is > 2 and show_existing_frame is true, then at least 1 more
|
// If profile is > 2 and show_existing_frame is true, then at least 1 more
|
||||||
// byte (6+3=9 bits) is needed.
|
// byte (6+3=9 bits) is needed.
|
||||||
if (profile > 2 && data_sz < 2)
|
if (profile > 2 && data_sz < 2) return VPX_CODEC_UNSUP_BITSTREAM;
|
||||||
return VPX_CODEC_UNSUP_BITSTREAM;
|
|
||||||
vpx_rb_read_literal(&rb, 3); // Frame buffer to show.
|
vpx_rb_read_literal(&rb, 3); // Frame buffer to show.
|
||||||
return VPX_CODEC_OK;
|
return VPX_CODEC_OK;
|
||||||
}
|
}
|
||||||
@ -176,16 +168,14 @@ static vpx_codec_err_t decoder_peek_si_internal(const uint8_t *data,
|
|||||||
// For the rest of the function, a maximum of 9 more bytes are needed
|
// For the rest of the function, a maximum of 9 more bytes are needed
|
||||||
// (computed by taking the maximum possible bits needed in each case). Note
|
// (computed by taking the maximum possible bits needed in each case). Note
|
||||||
// that this has to be updated if we read any more bits in this function.
|
// that this has to be updated if we read any more bits in this function.
|
||||||
if (data_sz < 10)
|
if (data_sz < 10) return VPX_CODEC_UNSUP_BITSTREAM;
|
||||||
return VPX_CODEC_UNSUP_BITSTREAM;
|
|
||||||
|
|
||||||
si->is_kf = !vpx_rb_read_bit(&rb);
|
si->is_kf = !vpx_rb_read_bit(&rb);
|
||||||
show_frame = vpx_rb_read_bit(&rb);
|
show_frame = vpx_rb_read_bit(&rb);
|
||||||
error_resilient = vpx_rb_read_bit(&rb);
|
error_resilient = vpx_rb_read_bit(&rb);
|
||||||
|
|
||||||
if (si->is_kf) {
|
if (si->is_kf) {
|
||||||
if (!vp9_read_sync_code(&rb))
|
if (!vp9_read_sync_code(&rb)) return VPX_CODEC_UNSUP_BITSTREAM;
|
||||||
return VPX_CODEC_UNSUP_BITSTREAM;
|
|
||||||
|
|
||||||
if (!parse_bitdepth_colorspace_sampling(profile, &rb))
|
if (!parse_bitdepth_colorspace_sampling(profile, &rb))
|
||||||
return VPX_CODEC_UNSUP_BITSTREAM;
|
return VPX_CODEC_UNSUP_BITSTREAM;
|
||||||
@ -196,8 +186,7 @@ static vpx_codec_err_t decoder_peek_si_internal(const uint8_t *data,
|
|||||||
rb.bit_offset += error_resilient ? 0 : 2; // reset_frame_context
|
rb.bit_offset += error_resilient ? 0 : 2; // reset_frame_context
|
||||||
|
|
||||||
if (intra_only_flag) {
|
if (intra_only_flag) {
|
||||||
if (!vp9_read_sync_code(&rb))
|
if (!vp9_read_sync_code(&rb)) return VPX_CODEC_UNSUP_BITSTREAM;
|
||||||
return VPX_CODEC_UNSUP_BITSTREAM;
|
|
||||||
if (profile > PROFILE_0) {
|
if (profile > PROFILE_0) {
|
||||||
if (!parse_bitdepth_colorspace_sampling(profile, &rb))
|
if (!parse_bitdepth_colorspace_sampling(profile, &rb))
|
||||||
return VPX_CODEC_UNSUP_BITSTREAM;
|
return VPX_CODEC_UNSUP_BITSTREAM;
|
||||||
@ -207,8 +196,7 @@ static vpx_codec_err_t decoder_peek_si_internal(const uint8_t *data,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (is_intra_only != NULL)
|
if (is_intra_only != NULL) *is_intra_only = intra_only_flag;
|
||||||
*is_intra_only = intra_only_flag;
|
|
||||||
return VPX_CODEC_OK;
|
return VPX_CODEC_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -234,8 +222,8 @@ static void set_error_detail(vpx_codec_alg_priv_t *ctx,
|
|||||||
ctx->base.err_detail = error;
|
ctx->base.err_detail = error;
|
||||||
}
|
}
|
||||||
|
|
||||||
static vpx_codec_err_t update_error_state(vpx_codec_alg_priv_t *ctx,
|
static vpx_codec_err_t update_error_state(
|
||||||
const struct vpx_internal_error_info *error) {
|
vpx_codec_alg_priv_t *ctx, const struct vpx_internal_error_info *error) {
|
||||||
if (error->error_code)
|
if (error->error_code)
|
||||||
set_error_detail(ctx, error->has_detail ? error->detail : NULL);
|
set_error_detail(ctx, error->has_detail ? error->detail : NULL);
|
||||||
|
|
||||||
@ -278,10 +266,8 @@ static void set_default_ppflags(vp8_postproc_cfg_t *cfg) {
|
|||||||
cfg->noise_level = 0;
|
cfg->noise_level = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void set_ppflags(const vpx_codec_alg_priv_t *ctx,
|
static void set_ppflags(const vpx_codec_alg_priv_t *ctx, vp9_ppflags_t *flags) {
|
||||||
vp9_ppflags_t *flags) {
|
flags->post_proc_flag = ctx->postproc_cfg.post_proc_flag;
|
||||||
flags->post_proc_flag =
|
|
||||||
ctx->postproc_cfg.post_proc_flag;
|
|
||||||
|
|
||||||
flags->deblocking_level = ctx->postproc_cfg.deblocking_level;
|
flags->deblocking_level = ctx->postproc_cfg.deblocking_level;
|
||||||
flags->noise_level = ctx->postproc_cfg.noise_level;
|
flags->noise_level = ctx->postproc_cfg.noise_level;
|
||||||
@ -292,10 +278,8 @@ static int frame_worker_hook(void *arg1, void *arg2) {
|
|||||||
const uint8_t *data = frame_worker_data->data;
|
const uint8_t *data = frame_worker_data->data;
|
||||||
(void)arg2;
|
(void)arg2;
|
||||||
|
|
||||||
frame_worker_data->result =
|
frame_worker_data->result = vp9_receive_compressed_data(
|
||||||
vp9_receive_compressed_data(frame_worker_data->pbi,
|
frame_worker_data->pbi, frame_worker_data->data_size, &data);
|
||||||
frame_worker_data->data_size,
|
|
||||||
&data);
|
|
||||||
frame_worker_data->data_end = data;
|
frame_worker_data->data_end = data;
|
||||||
|
|
||||||
if (frame_worker_data->pbi->frame_parallel_decode) {
|
if (frame_worker_data->pbi->frame_parallel_decode) {
|
||||||
@ -344,8 +328,7 @@ static vpx_codec_err_t init_decoder(vpx_codec_alg_priv_t *ctx) {
|
|||||||
ctx->flushed = 0;
|
ctx->flushed = 0;
|
||||||
|
|
||||||
ctx->buffer_pool = (BufferPool *)vpx_calloc(1, sizeof(BufferPool));
|
ctx->buffer_pool = (BufferPool *)vpx_calloc(1, sizeof(BufferPool));
|
||||||
if (ctx->buffer_pool == NULL)
|
if (ctx->buffer_pool == NULL) return VPX_CODEC_MEM_ERROR;
|
||||||
return VPX_CODEC_MEM_ERROR;
|
|
||||||
|
|
||||||
#if CONFIG_MULTITHREAD
|
#if CONFIG_MULTITHREAD
|
||||||
if (pthread_mutex_init(&ctx->buffer_pool->pool_mutex, NULL)) {
|
if (pthread_mutex_init(&ctx->buffer_pool->pool_mutex, NULL)) {
|
||||||
@ -354,8 +337,8 @@ static vpx_codec_err_t init_decoder(vpx_codec_alg_priv_t *ctx) {
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
ctx->frame_workers = (VPxWorker *)
|
ctx->frame_workers = (VPxWorker *)vpx_malloc(ctx->num_frame_workers *
|
||||||
vpx_malloc(ctx->num_frame_workers * sizeof(*ctx->frame_workers));
|
sizeof(*ctx->frame_workers));
|
||||||
if (ctx->frame_workers == NULL) {
|
if (ctx->frame_workers == NULL) {
|
||||||
set_error_detail(ctx, "Failed to allocate frame_workers");
|
set_error_detail(ctx, "Failed to allocate frame_workers");
|
||||||
return VPX_CODEC_MEM_ERROR;
|
return VPX_CODEC_MEM_ERROR;
|
||||||
@ -411,8 +394,7 @@ static vpx_codec_err_t init_decoder(vpx_codec_alg_priv_t *ctx) {
|
|||||||
|
|
||||||
// If postprocessing was enabled by the application and a
|
// If postprocessing was enabled by the application and a
|
||||||
// configuration has not been provided, default it.
|
// configuration has not been provided, default it.
|
||||||
if (!ctx->postproc_cfg_set &&
|
if (!ctx->postproc_cfg_set && (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC))
|
||||||
(ctx->base.init_flags & VPX_CODEC_USE_POSTPROC))
|
|
||||||
set_default_ppflags(&ctx->postproc_cfg);
|
set_default_ppflags(&ctx->postproc_cfg);
|
||||||
|
|
||||||
init_buffer_callbacks(ctx);
|
init_buffer_callbacks(ctx);
|
||||||
@ -442,11 +424,9 @@ static vpx_codec_err_t decode_one(vpx_codec_alg_priv_t *ctx,
|
|||||||
const vpx_codec_err_t res =
|
const vpx_codec_err_t res =
|
||||||
decoder_peek_si_internal(*data, data_sz, &ctx->si, &is_intra_only,
|
decoder_peek_si_internal(*data, data_sz, &ctx->si, &is_intra_only,
|
||||||
ctx->decrypt_cb, ctx->decrypt_state);
|
ctx->decrypt_cb, ctx->decrypt_state);
|
||||||
if (res != VPX_CODEC_OK)
|
if (res != VPX_CODEC_OK) return res;
|
||||||
return res;
|
|
||||||
|
|
||||||
if (!ctx->si.is_kf && !is_intra_only)
|
if (!ctx->si.is_kf && !is_intra_only) return VPX_CODEC_ERROR;
|
||||||
return VPX_CODEC_ERROR;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!ctx->frame_parallel_decode) {
|
if (!ctx->frame_parallel_decode) {
|
||||||
@ -541,8 +521,7 @@ static void wait_worker_and_cache_frame(vpx_codec_alg_priv_t *ctx) {
|
|||||||
frame_worker_data->user_priv);
|
frame_worker_data->user_priv);
|
||||||
ctx->frame_cache[ctx->frame_cache_write].img.fb_priv =
|
ctx->frame_cache[ctx->frame_cache_write].img.fb_priv =
|
||||||
frame_bufs[cm->new_fb_idx].raw_frame_buffer.priv;
|
frame_bufs[cm->new_fb_idx].raw_frame_buffer.priv;
|
||||||
ctx->frame_cache_write =
|
ctx->frame_cache_write = (ctx->frame_cache_write + 1) % FRAME_CACHE_SIZE;
|
||||||
(ctx->frame_cache_write + 1) % FRAME_CACHE_SIZE;
|
|
||||||
++ctx->num_cache_frames;
|
++ctx->num_cache_frames;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -567,14 +546,12 @@ static vpx_codec_err_t decoder_decode(vpx_codec_alg_priv_t *ctx,
|
|||||||
// Initialize the decoder workers on the first frame.
|
// Initialize the decoder workers on the first frame.
|
||||||
if (ctx->frame_workers == NULL) {
|
if (ctx->frame_workers == NULL) {
|
||||||
const vpx_codec_err_t res = init_decoder(ctx);
|
const vpx_codec_err_t res = init_decoder(ctx);
|
||||||
if (res != VPX_CODEC_OK)
|
if (res != VPX_CODEC_OK) return res;
|
||||||
return res;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
res = vp9_parse_superframe_index(data, data_sz, frame_sizes, &frame_count,
|
res = vp9_parse_superframe_index(data, data_sz, frame_sizes, &frame_count,
|
||||||
ctx->decrypt_cb, ctx->decrypt_state);
|
ctx->decrypt_cb, ctx->decrypt_state);
|
||||||
if (res != VPX_CODEC_OK)
|
if (res != VPX_CODEC_OK) return res;
|
||||||
return res;
|
|
||||||
|
|
||||||
if (ctx->frame_parallel_decode) {
|
if (ctx->frame_parallel_decode) {
|
||||||
// Decode in frame parallel mode. When decoding in this mode, the frame
|
// Decode in frame parallel mode. When decoding in this mode, the frame
|
||||||
@ -587,8 +564,8 @@ static vpx_codec_err_t decoder_decode(vpx_codec_alg_priv_t *ctx,
|
|||||||
for (i = 0; i < frame_count; ++i) {
|
for (i = 0; i < frame_count; ++i) {
|
||||||
const uint8_t *data_start_copy = data_start;
|
const uint8_t *data_start_copy = data_start;
|
||||||
const uint32_t frame_size = frame_sizes[i];
|
const uint32_t frame_size = frame_sizes[i];
|
||||||
if (data_start < data
|
if (data_start < data ||
|
||||||
|| frame_size > (uint32_t) (data_end - data_start)) {
|
frame_size > (uint32_t)(data_end - data_start)) {
|
||||||
set_error_detail(ctx, "Invalid frame size in index");
|
set_error_detail(ctx, "Invalid frame size in index");
|
||||||
return VPX_CODEC_CORRUPT_FRAME;
|
return VPX_CODEC_CORRUPT_FRAME;
|
||||||
}
|
}
|
||||||
@ -605,10 +582,9 @@ static vpx_codec_err_t decoder_decode(vpx_codec_alg_priv_t *ctx,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
res = decode_one(ctx, &data_start_copy, frame_size, user_priv,
|
res =
|
||||||
deadline);
|
decode_one(ctx, &data_start_copy, frame_size, user_priv, deadline);
|
||||||
if (res != VPX_CODEC_OK)
|
if (res != VPX_CODEC_OK) return res;
|
||||||
return res;
|
|
||||||
data_start += frame_size;
|
data_start += frame_size;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -625,8 +601,7 @@ static vpx_codec_err_t decoder_decode(vpx_codec_alg_priv_t *ctx,
|
|||||||
}
|
}
|
||||||
|
|
||||||
res = decode_one(ctx, &data, data_sz, user_priv, deadline);
|
res = decode_one(ctx, &data, data_sz, user_priv, deadline);
|
||||||
if (res != VPX_CODEC_OK)
|
if (res != VPX_CODEC_OK) return res;
|
||||||
return res;
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Decode in serial mode.
|
// Decode in serial mode.
|
||||||
@ -637,33 +612,30 @@ static vpx_codec_err_t decoder_decode(vpx_codec_alg_priv_t *ctx,
|
|||||||
const uint8_t *data_start_copy = data_start;
|
const uint8_t *data_start_copy = data_start;
|
||||||
const uint32_t frame_size = frame_sizes[i];
|
const uint32_t frame_size = frame_sizes[i];
|
||||||
vpx_codec_err_t res;
|
vpx_codec_err_t res;
|
||||||
if (data_start < data
|
if (data_start < data ||
|
||||||
|| frame_size > (uint32_t) (data_end - data_start)) {
|
frame_size > (uint32_t)(data_end - data_start)) {
|
||||||
set_error_detail(ctx, "Invalid frame size in index");
|
set_error_detail(ctx, "Invalid frame size in index");
|
||||||
return VPX_CODEC_CORRUPT_FRAME;
|
return VPX_CODEC_CORRUPT_FRAME;
|
||||||
}
|
}
|
||||||
|
|
||||||
res = decode_one(ctx, &data_start_copy, frame_size, user_priv,
|
res =
|
||||||
deadline);
|
decode_one(ctx, &data_start_copy, frame_size, user_priv, deadline);
|
||||||
if (res != VPX_CODEC_OK)
|
if (res != VPX_CODEC_OK) return res;
|
||||||
return res;
|
|
||||||
|
|
||||||
data_start += frame_size;
|
data_start += frame_size;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
while (data_start < data_end) {
|
while (data_start < data_end) {
|
||||||
const uint32_t frame_size = (uint32_t)(data_end - data_start);
|
const uint32_t frame_size = (uint32_t)(data_end - data_start);
|
||||||
const vpx_codec_err_t res = decode_one(ctx, &data_start, frame_size,
|
const vpx_codec_err_t res =
|
||||||
user_priv, deadline);
|
decode_one(ctx, &data_start, frame_size, user_priv, deadline);
|
||||||
if (res != VPX_CODEC_OK)
|
if (res != VPX_CODEC_OK) return res;
|
||||||
return res;
|
|
||||||
|
|
||||||
// Account for suboptimal termination by the encoder.
|
// Account for suboptimal termination by the encoder.
|
||||||
while (data_start < data_end) {
|
while (data_start < data_end) {
|
||||||
const uint8_t marker = read_marker(ctx->decrypt_cb,
|
const uint8_t marker =
|
||||||
ctx->decrypt_state, data_start);
|
read_marker(ctx->decrypt_cb, ctx->decrypt_state, data_start);
|
||||||
if (marker)
|
if (marker) break;
|
||||||
break;
|
|
||||||
++data_start;
|
++data_start;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -699,8 +671,7 @@ static vpx_image_t *decoder_get_frame(vpx_codec_alg_priv_t *ctx,
|
|||||||
if (ctx->num_cache_frames > 0) {
|
if (ctx->num_cache_frames > 0) {
|
||||||
release_last_output_frame(ctx);
|
release_last_output_frame(ctx);
|
||||||
ctx->last_show_frame = ctx->frame_cache[ctx->frame_cache_read].fb_idx;
|
ctx->last_show_frame = ctx->frame_cache[ctx->frame_cache_read].fb_idx;
|
||||||
if (ctx->need_resync)
|
if (ctx->need_resync) return NULL;
|
||||||
return NULL;
|
|
||||||
img = &ctx->frame_cache[ctx->frame_cache_read].img;
|
img = &ctx->frame_cache[ctx->frame_cache_read].img;
|
||||||
ctx->frame_cache_read = (ctx->frame_cache_read + 1) % FRAME_CACHE_SIZE;
|
ctx->frame_cache_read = (ctx->frame_cache_read + 1) % FRAME_CACHE_SIZE;
|
||||||
--ctx->num_cache_frames;
|
--ctx->num_cache_frames;
|
||||||
@ -714,8 +685,7 @@ static vpx_image_t *decoder_get_frame(vpx_codec_alg_priv_t *ctx,
|
|||||||
YV12_BUFFER_CONFIG sd;
|
YV12_BUFFER_CONFIG sd;
|
||||||
vp9_ppflags_t flags = { 0, 0, 0 };
|
vp9_ppflags_t flags = { 0, 0, 0 };
|
||||||
const VPxWorkerInterface *const winterface = vpx_get_worker_interface();
|
const VPxWorkerInterface *const winterface = vpx_get_worker_interface();
|
||||||
VPxWorker *const worker =
|
VPxWorker *const worker = &ctx->frame_workers[ctx->next_output_worker_id];
|
||||||
&ctx->frame_workers[ctx->next_output_worker_id];
|
|
||||||
FrameWorkerData *const frame_worker_data =
|
FrameWorkerData *const frame_worker_data =
|
||||||
(FrameWorkerData *)worker->data1;
|
(FrameWorkerData *)worker->data1;
|
||||||
ctx->next_output_worker_id =
|
ctx->next_output_worker_id =
|
||||||
@ -735,8 +705,7 @@ static vpx_image_t *decoder_get_frame(vpx_codec_alg_priv_t *ctx,
|
|||||||
RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
|
RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
|
||||||
release_last_output_frame(ctx);
|
release_last_output_frame(ctx);
|
||||||
ctx->last_show_frame = frame_worker_data->pbi->common.new_fb_idx;
|
ctx->last_show_frame = frame_worker_data->pbi->common.new_fb_idx;
|
||||||
if (ctx->need_resync)
|
if (ctx->need_resync) return NULL;
|
||||||
return NULL;
|
|
||||||
yuvconfig2image(&ctx->img, &sd, frame_worker_data->user_priv);
|
yuvconfig2image(&ctx->img, &sd, frame_worker_data->user_priv);
|
||||||
ctx->img.fb_priv = frame_bufs[cm->new_fb_idx].raw_frame_buffer.priv;
|
ctx->img.fb_priv = frame_bufs[cm->new_fb_idx].raw_frame_buffer.priv;
|
||||||
img = &ctx->img;
|
img = &ctx->img;
|
||||||
@ -747,8 +716,7 @@ static vpx_image_t *decoder_get_frame(vpx_codec_alg_priv_t *ctx,
|
|||||||
frame_worker_data->received_frame = 0;
|
frame_worker_data->received_frame = 0;
|
||||||
++ctx->available_threads;
|
++ctx->available_threads;
|
||||||
ctx->need_resync = 1;
|
ctx->need_resync = 1;
|
||||||
if (ctx->flushed != 1)
|
if (ctx->flushed != 1) return NULL;
|
||||||
return NULL;
|
|
||||||
}
|
}
|
||||||
} while (ctx->next_output_worker_id != ctx->next_submit_worker_id);
|
} while (ctx->next_output_worker_id != ctx->next_submit_worker_id);
|
||||||
}
|
}
|
||||||
@ -756,8 +724,7 @@ static vpx_image_t *decoder_get_frame(vpx_codec_alg_priv_t *ctx,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static vpx_codec_err_t decoder_set_fb_fn(
|
static vpx_codec_err_t decoder_set_fb_fn(
|
||||||
vpx_codec_alg_priv_t *ctx,
|
vpx_codec_alg_priv_t *ctx, vpx_get_frame_buffer_cb_fn_t cb_get,
|
||||||
vpx_get_frame_buffer_cb_fn_t cb_get,
|
|
||||||
vpx_release_frame_buffer_cb_fn_t cb_release, void *cb_priv) {
|
vpx_release_frame_buffer_cb_fn_t cb_release, void *cb_priv) {
|
||||||
if (cb_get == NULL || cb_release == NULL) {
|
if (cb_get == NULL || cb_release == NULL) {
|
||||||
return VPX_CODEC_INVALID_PARAM;
|
return VPX_CODEC_INVALID_PARAM;
|
||||||
@ -1022,8 +989,7 @@ static vpx_codec_err_t ctrl_set_byte_alignment(vpx_codec_alg_priv_t *ctx,
|
|||||||
ctx->byte_alignment = byte_alignment;
|
ctx->byte_alignment = byte_alignment;
|
||||||
if (ctx->frame_workers) {
|
if (ctx->frame_workers) {
|
||||||
VPxWorker *const worker = ctx->frame_workers;
|
VPxWorker *const worker = ctx->frame_workers;
|
||||||
FrameWorkerData *const frame_worker_data =
|
FrameWorkerData *const frame_worker_data = (FrameWorkerData *)worker->data1;
|
||||||
(FrameWorkerData *)worker->data1;
|
|
||||||
frame_worker_data->pbi->common.byte_alignment = byte_alignment;
|
frame_worker_data->pbi->common.byte_alignment = byte_alignment;
|
||||||
}
|
}
|
||||||
return VPX_CODEC_OK;
|
return VPX_CODEC_OK;
|
||||||
@ -1079,14 +1045,16 @@ CODEC_INTERFACE(vpx_codec_vp9_dx) = {
|
|||||||
decoder_init, // vpx_codec_init_fn_t
|
decoder_init, // vpx_codec_init_fn_t
|
||||||
decoder_destroy, // vpx_codec_destroy_fn_t
|
decoder_destroy, // vpx_codec_destroy_fn_t
|
||||||
decoder_ctrl_maps, // vpx_codec_ctrl_fn_map_t
|
decoder_ctrl_maps, // vpx_codec_ctrl_fn_map_t
|
||||||
{ // NOLINT
|
{
|
||||||
|
// NOLINT
|
||||||
decoder_peek_si, // vpx_codec_peek_si_fn_t
|
decoder_peek_si, // vpx_codec_peek_si_fn_t
|
||||||
decoder_get_si, // vpx_codec_get_si_fn_t
|
decoder_get_si, // vpx_codec_get_si_fn_t
|
||||||
decoder_decode, // vpx_codec_decode_fn_t
|
decoder_decode, // vpx_codec_decode_fn_t
|
||||||
decoder_get_frame, // vpx_codec_frame_get_fn_t
|
decoder_get_frame, // vpx_codec_frame_get_fn_t
|
||||||
decoder_set_fb_fn, // vpx_codec_set_fb_fn_t
|
decoder_set_fb_fn, // vpx_codec_set_fb_fn_t
|
||||||
},
|
},
|
||||||
{ // NOLINT
|
{
|
||||||
|
// NOLINT
|
||||||
0,
|
0,
|
||||||
NULL, // vpx_codec_enc_cfg_map_t
|
NULL, // vpx_codec_enc_cfg_map_t
|
||||||
NULL, // vpx_codec_encode_fn_t
|
NULL, // vpx_codec_encode_fn_t
|
||||||
|
@ -91,10 +91,10 @@ static vpx_codec_err_t image2yuvconfig(const vpx_image_t *img,
|
|||||||
yv12->y_width = img->d_w;
|
yv12->y_width = img->d_w;
|
||||||
yv12->y_height = img->d_h;
|
yv12->y_height = img->d_h;
|
||||||
|
|
||||||
yv12->uv_width = img->x_chroma_shift == 1 ? (1 + yv12->y_width) / 2
|
yv12->uv_width =
|
||||||
: yv12->y_width;
|
img->x_chroma_shift == 1 ? (1 + yv12->y_width) / 2 : yv12->y_width;
|
||||||
yv12->uv_height = img->y_chroma_shift == 1 ? (1 + yv12->y_height) / 2
|
yv12->uv_height =
|
||||||
: yv12->y_height;
|
img->y_chroma_shift == 1 ? (1 + yv12->y_height) / 2 : yv12->y_height;
|
||||||
yv12->uv_crop_width = yv12->uv_width;
|
yv12->uv_crop_width = yv12->uv_width;
|
||||||
yv12->uv_crop_height = yv12->uv_height;
|
yv12->uv_crop_height = yv12->uv_height;
|
||||||
|
|
||||||
@ -135,12 +135,9 @@ static vpx_codec_err_t image2yuvconfig(const vpx_image_t *img,
|
|||||||
|
|
||||||
static VP9_REFFRAME ref_frame_to_vp9_reframe(vpx_ref_frame_type_t frame) {
|
static VP9_REFFRAME ref_frame_to_vp9_reframe(vpx_ref_frame_type_t frame) {
|
||||||
switch (frame) {
|
switch (frame) {
|
||||||
case VP8_LAST_FRAME:
|
case VP8_LAST_FRAME: return VP9_LAST_FLAG;
|
||||||
return VP9_LAST_FLAG;
|
case VP8_GOLD_FRAME: return VP9_GOLD_FLAG;
|
||||||
case VP8_GOLD_FRAME:
|
case VP8_ALTR_FRAME: return VP9_ALT_FLAG;
|
||||||
return VP9_GOLD_FLAG;
|
|
||||||
case VP8_ALTR_FRAME:
|
|
||||||
return VP9_ALT_FLAG;
|
|
||||||
}
|
}
|
||||||
assert(0 && "Invalid Reference Frame");
|
assert(0 && "Invalid Reference Frame");
|
||||||
return VP9_LAST_FLAG;
|
return VP9_LAST_FLAG;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user