Cleaning up encoder segmentation code.

Moving code from vp9_pack_bitstream to new function encode_segmentation.

Change-Id: I1f1e59a1f038618ad95162b7db4b6f8164850ea8
This commit is contained in:
Dmitry Kovalev 2013-04-29 16:07:17 -07:00
parent bbac4094e4
commit ee97da2c03
7 changed files with 126 additions and 150 deletions

View File

@ -355,7 +355,7 @@ typedef struct macroblockd {
PARTITION_CONTEXT *above_seg_context;
PARTITION_CONTEXT *left_seg_context;
/* 0 indicates segmentation at MB level is not enabled. Otherwise the individual bits indicate which features are active. */
/* 0 (disable) 1 (enable) segmentation */
unsigned char segmentation_enabled;
/* 0 (do not update) 1 (update) the macroblock segmentation map. */

View File

@ -923,25 +923,20 @@ static void write_mb_modes_kf(const VP9_COMP *cpi,
const int segment_id = m->mbmi.segment_id;
int skip_coeff;
if (xd->update_mb_segmentation_map) {
if (xd->update_mb_segmentation_map)
write_mb_segid(bc, &m->mbmi, xd);
}
if (vp9_segfeature_active(xd, segment_id, SEG_LVL_SKIP)) {
skip_coeff = 1;
} else {
skip_coeff = m->mbmi.mb_skip_coeff;
vp9_write(bc, skip_coeff,
vp9_get_pred_prob(c, xd, PRED_MBSKIP));
vp9_write(bc, skip_coeff, vp9_get_pred_prob(c, xd, PRED_MBSKIP));
}
if (m->mbmi.sb_type > BLOCK_SIZE_MB16X16) {
sb_kfwrite_ymode(bc, ym,
c->sb_kf_ymode_prob[c->kf_ymode_probs_index]);
} else {
kfwrite_ymode(bc, ym,
c->kf_ymode_prob[c->kf_ymode_probs_index]);
}
if (m->mbmi.sb_type > BLOCK_SIZE_MB16X16)
sb_kfwrite_ymode(bc, ym, c->sb_kf_ymode_prob[c->kf_ymode_probs_index]);
else
kfwrite_ymode(bc, ym, c->kf_ymode_prob[c->kf_ymode_probs_index]);
if (ym == I4X4_PRED) {
int i = 0;
@ -1747,16 +1742,91 @@ static void segment_reference_frames(VP9_COMP *cpi) {
}
}
void vp9_pack_bitstream(VP9_COMP *cpi, unsigned char *dest,
unsigned long *size) {
static void encode_segmentation(VP9_COMP *cpi, vp9_writer *w) {
int i, j;
VP9_COMMON *const pc = &cpi->common;
MACROBLOCKD *const xd = &cpi->mb.e_mbd;
vp9_write_bit(w, xd->segmentation_enabled);
if (!xd->segmentation_enabled)
return;
// Segmentation map
vp9_write_bit(w, xd->update_mb_segmentation_map);
#if CONFIG_IMPLICIT_SEGMENTATION
vp9_write_bit(w, xd->allow_implicit_segment_update);
#endif
if (xd->update_mb_segmentation_map) {
// Select the coding strategy (temporal or spatial)
vp9_choose_segmap_coding_method(cpi);
// Write out probabilities used to decode unpredicted macro-block segments
for (i = 0; i < MB_SEG_TREE_PROBS; i++) {
const int prob = xd->mb_segment_tree_probs[i];
if (prob != MAX_PROB) {
vp9_write_bit(w, 1);
vp9_write_prob(w, prob);
} else {
vp9_write_bit(w, 0);
}
}
// Write out the chosen coding method.
vp9_write_bit(w, pc->temporal_update);
if (pc->temporal_update) {
for (i = 0; i < PREDICTION_PROBS; i++) {
const int prob = pc->segment_pred_probs[i];
if (prob != MAX_PROB) {
vp9_write_bit(w, 1);
vp9_write_prob(w, prob);
} else {
vp9_write_bit(w, 0);
}
}
}
}
// Segmentation data
vp9_write_bit(w, xd->update_mb_segmentation_data);
// segment_reference_frames(cpi);
if (xd->update_mb_segmentation_data) {
vp9_write_bit(w, xd->mb_segment_abs_delta);
for (i = 0; i < MAX_MB_SEGMENTS; i++) {
for (j = 0; j < SEG_LVL_MAX; j++) {
const int data = vp9_get_segdata(xd, i, j);
const int data_max = vp9_seg_feature_data_max(j);
if (vp9_segfeature_active(xd, i, j)) {
vp9_write_bit(w, 1);
if (vp9_is_segfeature_signed(j)) {
if (data < 0) {
vp9_encode_unsigned_max(w, -data, data_max);
vp9_write_bit(w, 1);
} else {
vp9_encode_unsigned_max(w, data, data_max);
vp9_write_bit(w, 0);
}
} else {
vp9_encode_unsigned_max(w, data, data_max);
}
} else {
vp9_write_bit(w, 0);
}
}
}
}
}
void vp9_pack_bitstream(VP9_COMP *cpi, uint8_t *dest, unsigned long *size) {
int i;
VP9_HEADER oh;
VP9_COMMON *const pc = &cpi->common;
vp9_writer header_bc, residual_bc;
MACROBLOCKD *const xd = &cpi->mb.e_mbd;
int extra_bytes_packed = 0;
unsigned char *cx_data = dest;
uint8_t *cx_data = dest;
oh.show_frame = (int) pc->show_frame;
oh.type = (int)pc->frame_type;
@ -1989,87 +2059,7 @@ void vp9_pack_bitstream(VP9_COMP *cpi, unsigned char *dest,
active_section = 7;
#endif
// Signal whether or not Segmentation is enabled
vp9_write_bit(&header_bc, (xd->segmentation_enabled) ? 1 : 0);
// Indicate which features are enabled
if (xd->segmentation_enabled) {
// Indicate whether or not the segmentation map is being updated.
vp9_write_bit(&header_bc, (xd->update_mb_segmentation_map) ? 1 : 0);
#if CONFIG_IMPLICIT_SEGMENTATION
vp9_write_bit(&header_bc, (xd->allow_implicit_segment_update) ? 1 : 0);
#endif
// If it is, then indicate the method that will be used.
if (xd->update_mb_segmentation_map) {
// Select the coding strategy (temporal or spatial)
vp9_choose_segmap_coding_method(cpi);
// Send the tree probabilities used to decode unpredicted
// macro-block segments
for (i = 0; i < MB_SEG_TREE_PROBS; i++) {
const int prob = xd->mb_segment_tree_probs[i];
if (prob != 255) {
vp9_write_bit(&header_bc, 1);
vp9_write_prob(&header_bc, prob);
} else {
vp9_write_bit(&header_bc, 0);
}
}
// Write out the chosen coding method.
vp9_write_bit(&header_bc, (pc->temporal_update) ? 1 : 0);
if (pc->temporal_update) {
for (i = 0; i < PREDICTION_PROBS; i++) {
const int prob = pc->segment_pred_probs[i];
if (prob != 255) {
vp9_write_bit(&header_bc, 1);
vp9_write_prob(&header_bc, prob);
} else {
vp9_write_bit(&header_bc, 0);
}
}
}
}
vp9_write_bit(&header_bc, (xd->update_mb_segmentation_data) ? 1 : 0);
// segment_reference_frames(cpi);
if (xd->update_mb_segmentation_data) {
vp9_write_bit(&header_bc, (xd->mb_segment_abs_delta) ? 1 : 0);
// For each segments id...
for (i = 0; i < MAX_MB_SEGMENTS; i++) {
// For each segmentation codable feature...
for (j = 0; j < SEG_LVL_MAX; j++) {
const int8_t data = vp9_get_segdata(xd, i, j);
const int data_max = vp9_seg_feature_data_max(j);
// If the feature is enabled...
if (vp9_segfeature_active(xd, i, j)) {
vp9_write_bit(&header_bc, 1);
// Is the segment data signed..
if (vp9_is_segfeature_signed(j)) {
// Encode the relevant feature data
if (data < 0) {
vp9_encode_unsigned_max(&header_bc, -data, data_max);
vp9_write_bit(&header_bc, 1);
} else {
vp9_encode_unsigned_max(&header_bc, data, data_max);
vp9_write_bit(&header_bc, 0);
}
} else {
// Unsigned data element so no sign bit needed
vp9_encode_unsigned_max(&header_bc, data, data_max);
}
} else {
vp9_write_bit(&header_bc, 0);
}
}
}
}
}
encode_segmentation(cpi, &header_bc);
// Encode the common prediction model status flag probability updates for
// the reference frame

View File

@ -631,13 +631,11 @@ static void set_offsets(VP9_COMP *cpi,
/* segment ID */
if (xd->segmentation_enabled) {
if (xd->update_mb_segmentation_map) {
mbmi->segment_id = find_seg_id(cpi->segmentation_map, bsize,
mi_row, cm->mi_rows, mi_col, cm->mi_cols);
} else {
mbmi->segment_id = find_seg_id(cm->last_frame_seg_map, bsize,
mi_row, cm->mi_rows, mi_col, cm->mi_cols);
}
uint8_t *map = xd->update_mb_segmentation_map ? cpi->segmentation_map
: cm->last_frame_seg_map;
mbmi->segment_id = find_seg_id(map, bsize, mi_row,
cm->mi_rows, mi_col, cm->mi_cols);
assert(mbmi->segment_id <= (MAX_MB_SEGMENTS-1));
vp9_mb_init_quantizer(cpi, x);

View File

@ -419,10 +419,10 @@ static void separate_arf_mbs(VP9_COMP *cpi) {
cpi->static_mb_pct = 0;
cpi->seg0_cnt = ncnt[0];
vp9_enable_segmentation((VP9_PTR) cpi);
vp9_enable_segmentation((VP9_PTR)cpi);
} else {
cpi->static_mb_pct = 0;
vp9_disable_segmentation((VP9_PTR) cpi);
vp9_disable_segmentation((VP9_PTR)cpi);
}
// Free localy allocated storage

View File

@ -280,8 +280,7 @@ static void setup_features(VP9_COMP *cpi) {
MACROBLOCKD *xd = &cpi->mb.e_mbd;
// Set up default state for MB feature flags
xd->segmentation_enabled = 0; // Default segmentation disabled
xd->segmentation_enabled = 0;
xd->update_mb_segmentation_map = 0;
xd->update_mb_segmentation_data = 0;
@ -383,7 +382,7 @@ static void configure_static_seg_features(VP9_COMP *cpi) {
xd->update_mb_segmentation_map = 0;
xd->update_mb_segmentation_data = 0;
#if CONFIG_IMPLICIT_SEGMENTATION
xd->allow_implicit_segment_update = 0;
xd->allow_implicit_segment_update = 0;
#endif
cpi->static_mb_pct = 0;
@ -399,7 +398,7 @@ static void configure_static_seg_features(VP9_COMP *cpi) {
xd->update_mb_segmentation_map = 0;
xd->update_mb_segmentation_data = 0;
#if CONFIG_IMPLICIT_SEGMENTATION
xd->allow_implicit_segment_update = 0;
xd->allow_implicit_segment_update = 0;
#endif
cpi->static_mb_pct = 0;
@ -428,9 +427,9 @@ static void configure_static_seg_features(VP9_COMP *cpi) {
xd->mb_segment_abs_delta = SEGMENT_DELTADATA;
}
}
// All other frames if segmentation has been enabled
else if (xd->segmentation_enabled) {
} else if (xd->segmentation_enabled) {
// All other frames if segmentation has been enabled
// First normal frame in a valid gf or alt ref group
if (cpi->common.frames_since_golden == 0) {
// Set up segment features for normal frames in an arf group
@ -454,10 +453,10 @@ static void configure_static_seg_features(VP9_COMP *cpi) {
vp9_enable_segfeature(xd, 1, SEG_LVL_REF_FRAME);
vp9_enable_segfeature(xd, 1, SEG_LVL_SKIP);
}
}
// Disable segmentation and clear down features if alt ref
// is not active for this group
else {
} else {
// Disable segmentation and clear down features if alt ref
// is not active for this group
vp9_disable_segmentation((VP9_PTR)cpi);
vpx_memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
@ -467,12 +466,11 @@ static void configure_static_seg_features(VP9_COMP *cpi) {
vp9_clearall_segfeatures(xd);
}
}
} else if (cpi->is_src_frame_alt_ref) {
// Special case where we are coding over the top of a previous
// alt ref frame.
// Segment coding disabled for compred testing
// Special case where we are coding over the top of a previous
// alt ref frame.
// Segment coding disabled for compred testing
else if (cpi->is_src_frame_alt_ref) {
// Enable ref frame features for segment 0 as well
vp9_enable_segfeature(xd, 0, SEG_LVL_REF_FRAME);
vp9_enable_segfeature(xd, 1, SEG_LVL_REF_FRAME);
@ -490,9 +488,9 @@ static void configure_static_seg_features(VP9_COMP *cpi) {
}
// Enable data udpate
xd->update_mb_segmentation_data = 1;
}
// All other frames.
else {
} else {
// All other frames.
// No updates.. leave things as they are.
xd->update_mb_segmentation_map = 0;
xd->update_mb_segmentation_data = 0;

View File

@ -4824,10 +4824,9 @@ void vp9_pick_mode_inter_macroblock(VP9_COMP *cpi, MACROBLOCK *x,
int64_t intra_error = 0;
unsigned char *segment_id = &mbmi->segment_id;
if (xd->segmentation_enabled)
x->encode_breakout = cpi->segment_encode_breakout[*segment_id];
else
x->encode_breakout = cpi->oxcf.encode_breakout;
x->encode_breakout = xd->segmentation_enabled ?
cpi->segment_encode_breakout[*segment_id] :
cpi->oxcf.encode_breakout;
// if (cpi->sf.RD)
// For now this codebase is limited to a single rd encode path

View File

@ -16,18 +16,15 @@
#include "vp9/common/vp9_tile_common.h"
void vp9_enable_segmentation(VP9_PTR ptr) {
VP9_COMP *cpi = (VP9_COMP *)(ptr);
VP9_COMP *cpi = (VP9_COMP *)ptr;
// Set the appropriate feature bit
cpi->mb.e_mbd.segmentation_enabled = 1;
cpi->mb.e_mbd.update_mb_segmentation_map = 1;
cpi->mb.e_mbd.update_mb_segmentation_data = 1;
}
void vp9_disable_segmentation(VP9_PTR ptr) {
VP9_COMP *cpi = (VP9_COMP *)(ptr);
// Clear the appropriate feature bit
VP9_COMP *cpi = (VP9_COMP *)ptr;
cpi->mb.e_mbd.segmentation_enabled = 0;
}
@ -238,10 +235,8 @@ void vp9_choose_segmap_coding_method(VP9_COMP *cpi) {
// Set default state for the segment tree probabilities and the
// temporal coding probabilities
vpx_memset(xd->mb_segment_tree_probs, 255,
sizeof(xd->mb_segment_tree_probs));
vpx_memset(cm->segment_pred_probs, 255,
sizeof(cm->segment_pred_probs));
vpx_memset(xd->mb_segment_tree_probs, 255, sizeof(xd->mb_segment_tree_probs));
vpx_memset(cm->segment_pred_probs, 255, sizeof(cm->segment_pred_probs));
vpx_memset(no_pred_segcounts, 0, sizeof(no_pred_segcounts));
vpx_memset(t_unpred_seg_counts, 0, sizeof(t_unpred_seg_counts));
@ -249,7 +244,6 @@ void vp9_choose_segmap_coding_method(VP9_COMP *cpi) {
// First of all generate stats regarding how well the last segment map
// predicts this one
for (tile_col = 0; tile_col < cm->tile_columns; tile_col++) {
vp9_get_tile_col_offsets(cm, tile_col);
mi_ptr = cm->mi + cm->cur_tile_mi_col_start;
@ -279,27 +273,24 @@ void vp9_choose_segmap_coding_method(VP9_COMP *cpi) {
// Add in the cost of the signalling for each prediction context
for (i = 0; i < PREDICTION_PROBS; i++) {
t_nopred_prob[i] = get_binary_prob(temporal_predictor_count[i][0],
temporal_predictor_count[i][1]);
const int count0 = temporal_predictor_count[i][0];
const int count1 = temporal_predictor_count[i][1];
t_nopred_prob[i] = get_binary_prob(count0, count1);
// Add in the predictor signaling cost
t_pred_cost += (temporal_predictor_count[i][0] *
vp9_cost_zero(t_nopred_prob[i])) +
(temporal_predictor_count[i][1] *
vp9_cost_one(t_nopred_prob[i]));
t_pred_cost += count0 * vp9_cost_zero(t_nopred_prob[i]) +
count1 * vp9_cost_one(t_nopred_prob[i]);
}
}
// Now choose which coding method to use.
if (t_pred_cost < no_pred_cost) {
cm->temporal_update = 1;
vpx_memcpy(xd->mb_segment_tree_probs,
t_pred_tree, sizeof(t_pred_tree));
vpx_memcpy(&cm->segment_pred_probs,
t_nopred_prob, sizeof(t_nopred_prob));
vpx_memcpy(xd->mb_segment_tree_probs, t_pred_tree, sizeof(t_pred_tree));
vpx_memcpy(cm->segment_pred_probs, t_nopred_prob, sizeof(t_nopred_prob));
} else {
cm->temporal_update = 0;
vpx_memcpy(xd->mb_segment_tree_probs,
no_pred_tree, sizeof(no_pred_tree));
vpx_memcpy(xd->mb_segment_tree_probs, no_pred_tree, sizeof(no_pred_tree));
}
}