Removing 'new' probability calculation from convert_distribution().

We don't have to calculate 'new' probability in convert_distribution()
because it is enough to calculate only 'new' counters which could be used
to calculate probability if necessary. That's why removing a lot of unused
temporary probability arrays and reducing number of get_binary_prob()
calls.

Change-Id: I4e14eb7203d1ace61bbddefd6b9b6326be83ba63
This commit is contained in:
Dmitry Kovalev 2013-11-01 15:09:43 -07:00
parent 340b2b076e
commit df19c6b64c
7 changed files with 81 additions and 131 deletions

View File

@ -322,9 +322,8 @@ static void adapt_coef_probs(VP9_COMMON *cm, TX_SIZE tx_size,
vp9_coeff_count_model *coef_counts = cm->counts.coef[tx_size];
unsigned int (*eob_branch_count)[REF_TYPES][COEF_BANDS][PREV_COEF_CONTEXTS] =
cm->counts.eob_branch[tx_size];
int t, i, j, k, l;
int i, j, k, l, m;
unsigned int branch_ct[UNCONSTRAINED_NODES][2];
vp9_prob coef_probs[UNCONSTRAINED_NODES];
for (i = 0; i < BLOCK_TYPES; ++i)
for (j = 0; j < REF_TYPES; ++j)
@ -332,15 +331,14 @@ static void adapt_coef_probs(VP9_COMMON *cm, TX_SIZE tx_size,
for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
if (l >= 3 && k == 0)
continue;
vp9_tree_probs_from_distribution(vp9_coefmodel_tree, coef_probs,
branch_ct, coef_counts[i][j][k][l],
0);
vp9_tree_probs_from_distribution(vp9_coefmodel_tree, branch_ct,
coef_counts[i][j][k][l], 0);
branch_ct[0][1] = eob_branch_count[i][j][k][l] - branch_ct[0][0];
coef_probs[0] = get_binary_prob(branch_ct[0][0], branch_ct[0][1]);
for (t = 0; t < UNCONSTRAINED_NODES; ++t)
dst_coef_probs[i][j][k][l][t] = merge_probs(
pre_coef_probs[i][j][k][l][t], coef_probs[t],
branch_ct[t], count_sat, update_factor);
for (m = 0; m < UNCONSTRAINED_NODES; ++m)
dst_coef_probs[i][j][k][l][m] = merge_probs(
pre_coef_probs[i][j][k][l][m],
branch_ct[m],
count_sat, update_factor);
}
}

View File

@ -349,13 +349,8 @@ void vp9_entropy_mode_init() {
#define COUNT_SAT 20
#define MAX_UPDATE_FACTOR 128
static int update_ct(vp9_prob pre_prob, vp9_prob prob,
const unsigned int ct[2]) {
return merge_probs(pre_prob, prob, ct, COUNT_SAT, MAX_UPDATE_FACTOR);
}
static int update_ct2(vp9_prob pre_prob, const unsigned int ct[2]) {
return merge_probs2(pre_prob, ct, COUNT_SAT, MAX_UPDATE_FACTOR);
static int update_ct(vp9_prob pre_prob, const unsigned int ct[2]) {
return merge_probs(pre_prob, ct, COUNT_SAT, MAX_UPDATE_FACTOR);
}
static void update_mode_probs(int n_modes,
@ -364,14 +359,13 @@ static void update_mode_probs(int n_modes,
const vp9_prob *pre_probs, vp9_prob *dst_probs,
unsigned int tok0_offset) {
#define MAX_PROBS 32
vp9_prob probs[MAX_PROBS];
unsigned int branch_ct[MAX_PROBS][2];
int t;
assert(n_modes - 1 < MAX_PROBS);
vp9_tree_probs_from_distribution(tree, probs, branch_ct, cnt, tok0_offset);
vp9_tree_probs_from_distribution(tree, branch_ct, cnt, tok0_offset);
for (t = 0; t < n_modes - 1; ++t)
dst_probs[t] = update_ct(pre_probs[t], probs[t], branch_ct[t]);
dst_probs[t] = update_ct(pre_probs[t], branch_ct[t]);
}
void vp9_adapt_mode_probs(VP9_COMMON *cm) {
@ -381,17 +375,17 @@ void vp9_adapt_mode_probs(VP9_COMMON *cm) {
const FRAME_COUNTS *counts = &cm->counts;
for (i = 0; i < INTRA_INTER_CONTEXTS; i++)
fc->intra_inter_prob[i] = update_ct2(pre_fc->intra_inter_prob[i],
fc->intra_inter_prob[i] = update_ct(pre_fc->intra_inter_prob[i],
counts->intra_inter[i]);
for (i = 0; i < COMP_INTER_CONTEXTS; i++)
fc->comp_inter_prob[i] = update_ct2(pre_fc->comp_inter_prob[i],
fc->comp_inter_prob[i] = update_ct(pre_fc->comp_inter_prob[i],
counts->comp_inter[i]);
for (i = 0; i < REF_CONTEXTS; i++)
fc->comp_ref_prob[i] = update_ct2(pre_fc->comp_ref_prob[i],
fc->comp_ref_prob[i] = update_ct(pre_fc->comp_ref_prob[i],
counts->comp_ref[i]);
for (i = 0; i < REF_CONTEXTS; i++)
for (j = 0; j < 2; j++)
fc->single_ref_prob[i][j] = update_ct2(pre_fc->single_ref_prob[i][j],
fc->single_ref_prob[i][j] = update_ct(pre_fc->single_ref_prob[i][j],
counts->single_ref[i][j]);
for (i = 0; i < INTER_MODE_CONTEXTS; i++)
@ -432,24 +426,23 @@ void vp9_adapt_mode_probs(VP9_COMMON *cm) {
for (i = 0; i < TX_SIZE_CONTEXTS; ++i) {
tx_counts_to_branch_counts_8x8(counts->tx.p8x8[i], branch_ct_8x8p);
for (j = 0; j < TX_SIZES - 3; ++j)
fc->tx_probs.p8x8[i][j] = update_ct2(pre_fc->tx_probs.p8x8[i][j],
fc->tx_probs.p8x8[i][j] = update_ct(pre_fc->tx_probs.p8x8[i][j],
branch_ct_8x8p[j]);
tx_counts_to_branch_counts_16x16(counts->tx.p16x16[i], branch_ct_16x16p);
for (j = 0; j < TX_SIZES - 2; ++j)
fc->tx_probs.p16x16[i][j] = update_ct2(pre_fc->tx_probs.p16x16[i][j],
fc->tx_probs.p16x16[i][j] = update_ct(pre_fc->tx_probs.p16x16[i][j],
branch_ct_16x16p[j]);
tx_counts_to_branch_counts_32x32(counts->tx.p32x32[i], branch_ct_32x32p);
for (j = 0; j < TX_SIZES - 1; ++j)
fc->tx_probs.p32x32[i][j] = update_ct2(pre_fc->tx_probs.p32x32[i][j],
fc->tx_probs.p32x32[i][j] = update_ct(pre_fc->tx_probs.p32x32[i][j],
branch_ct_32x32p[j]);
}
}
for (i = 0; i < MBSKIP_CONTEXTS; ++i)
fc->mbskip_probs[i] = update_ct2(pre_fc->mbskip_probs[i],
counts->mbskip[i]);
fc->mbskip_probs[i] = update_ct(pre_fc->mbskip_probs[i], counts->mbskip[i]);
}
static void set_default_lf_deltas(struct loopfilter *lf) {

View File

@ -191,7 +191,7 @@ void vp9_inc_mv(const MV *mv, nmv_context_counts *counts) {
}
static vp9_prob adapt_prob(vp9_prob prep, const unsigned int ct[2]) {
return merge_probs2(prep, ct, MV_COUNT_SAT, MV_MAX_UPDATE_FACTOR);
return merge_probs(prep, ct, MV_COUNT_SAT, MV_MAX_UPDATE_FACTOR);
}
static unsigned int adapt_probs(unsigned int i,

View File

@ -40,9 +40,7 @@ void vp9_tokens_from_tree_offset(struct vp9_token *p, vp9_tree t,
tree2tok(p - offset, t, 0, 0, 0);
}
static unsigned int convert_distribution(unsigned int i,
vp9_tree tree,
vp9_prob probs[],
static unsigned int convert_distribution(unsigned int i, vp9_tree tree,
unsigned int branch_ct[][2],
const unsigned int num_events[],
unsigned int tok0_offset) {
@ -51,24 +49,25 @@ static unsigned int convert_distribution(unsigned int i,
if (tree[i] <= 0) {
left = num_events[-tree[i] - tok0_offset];
} else {
left = convert_distribution(tree[i], tree, probs, branch_ct,
num_events, tok0_offset);
left = convert_distribution(tree[i], tree, branch_ct, num_events,
tok0_offset);
}
if (tree[i + 1] <= 0)
right = num_events[-tree[i + 1] - tok0_offset];
else
right = convert_distribution(tree[i + 1], tree, probs, branch_ct,
num_events, tok0_offset);
right = convert_distribution(tree[i + 1], tree, branch_ct, num_events,
tok0_offset);
probs[i>>1] = get_binary_prob(left, right);
branch_ct[i>>1][0] = left;
branch_ct[i>>1][1] = right;
branch_ct[i >> 1][0] = left;
branch_ct[i >> 1][1] = right;
return left + right;
}
void vp9_tree_probs_from_distribution(vp9_tree tree, vp9_prob probs[/* n-1 */],
void vp9_tree_probs_from_distribution(vp9_tree tree,
unsigned int branch_ct[/* n-1 */][2],
const unsigned int num_events[/* n */],
unsigned int tok0_offset) {
convert_distribution(0, tree, probs, branch_ct, num_events, tok0_offset);
convert_distribution(0, tree, branch_ct, num_events, tok0_offset);
}

View File

@ -50,11 +50,11 @@ void vp9_tokens_from_tree_offset(struct vp9_token*, vp9_tree, int offset);
probability updates. */
void vp9_tree_probs_from_distribution(vp9_tree tree,
vp9_prob probs[ /* n - 1 */ ],
unsigned int branch_ct[ /* n - 1 */ ][2],
const unsigned int num_events[ /* n */ ],
unsigned int tok0_offset);
static INLINE vp9_prob clip_prob(int p) {
return (p > 255) ? 255u : (p < 1) ? 1u : p;
}
@ -81,22 +81,15 @@ static INLINE vp9_prob weighted_prob(int prob1, int prob2, int factor) {
return ROUND_POWER_OF_TWO(prob1 * (256 - factor) + prob2 * factor, 8);
}
static INLINE vp9_prob merge_probs(vp9_prob pre_prob, vp9_prob prob,
static INLINE vp9_prob merge_probs(vp9_prob pre_prob,
const unsigned int ct[2],
unsigned int count_sat,
unsigned int max_update_factor) {
const vp9_prob prob = get_binary_prob(ct[0], ct[1]);
const unsigned int count = MIN(ct[0] + ct[1], count_sat);
const unsigned int factor = max_update_factor * count / count_sat;
return weighted_prob(pre_prob, prob, factor);
}
static INLINE vp9_prob merge_probs2(vp9_prob pre_prob,
const unsigned int ct[2],
unsigned int count_sat,
unsigned int max_update_factor) {
return merge_probs(pre_prob, get_binary_prob(ct[0], ct[1]), ct, count_sat,
max_update_factor);
}
#endif // VP9_COMMON_VP9_TREECODER_H_

View File

@ -163,18 +163,13 @@ void vp9_encode_unsigned_max(struct vp9_write_bit_buffer *wb,
vp9_wb_write_literal(wb, data, get_unsigned_bits(max));
}
static void update_mode(
vp9_writer *w,
int n,
vp9_tree tree,
vp9_prob Pnew[/* n-1 */],
static void update_mode(vp9_writer *w, int n, vp9_tree tree,
vp9_prob Pcur[/* n-1 */],
unsigned int bct[/* n-1 */] [2],
const unsigned int num_events[/* n */]
) {
unsigned int bct[/* n-1 */][2],
const unsigned int num_events[/* n */]) {
int i = 0;
vp9_tree_probs_from_distribution(tree, Pnew, bct, num_events, 0);
vp9_tree_probs_from_distribution(tree, bct, num_events, 0);
n--;
for (i = 0; i < n; ++i)
@ -185,11 +180,10 @@ static void update_mbintra_mode_probs(VP9_COMP* const cpi,
vp9_writer* const bc) {
VP9_COMMON *const cm = &cpi->common;
int j;
vp9_prob pnew[INTRA_MODES - 1];
unsigned int bct[INTRA_MODES - 1][2];
for (j = 0; j < BLOCK_SIZE_GROUPS; j++)
update_mode(bc, INTRA_MODES, vp9_intra_mode_tree, pnew,
update_mode(bc, INTRA_MODES, vp9_intra_mode_tree,
cm->fc.y_mode_prob[j], bct,
(unsigned int *)cpi->y_mode_count[j]);
}
@ -231,43 +225,35 @@ static void write_intra_mode(vp9_writer *bc, int m, const vp9_prob *p) {
write_token(bc, vp9_intra_mode_tree, p, vp9_intra_mode_encodings + m);
}
static void update_switchable_interp_probs(VP9_COMP *const cpi,
vp9_writer* const bc) {
static void update_switchable_interp_probs(VP9_COMP *cpi, vp9_writer *w) {
VP9_COMMON *const cm = &cpi->common;
unsigned int branch_ct[SWITCHABLE_FILTER_CONTEXTS][SWITCHABLE_FILTERS - 1][2];
vp9_prob new_prob[SWITCHABLE_FILTER_CONTEXTS][SWITCHABLE_FILTERS - 1];
unsigned int branch_ct[SWITCHABLE_FILTERS - 1][2];
int i, j;
for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j) {
vp9_tree_probs_from_distribution(
vp9_switchable_interp_tree,
new_prob[j], branch_ct[j],
vp9_tree_probs_from_distribution(vp9_switchable_interp_tree, branch_ct,
cm->counts.switchable_interp[j], 0);
for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i)
vp9_cond_prob_diff_update(w, &cm->fc.switchable_interp_prob[j][i],
branch_ct[i]);
}
for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j) {
for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i) {
vp9_cond_prob_diff_update(bc, &cm->fc.switchable_interp_prob[j][i],
branch_ct[j][i]);
}
}
#ifdef MODE_STATS
if (!cpi->dummy_packing)
update_switchable_interp_stats(cm);
#endif
}
static void update_inter_mode_probs(VP9_COMMON *cm, vp9_writer* const bc) {
static void update_inter_mode_probs(VP9_COMMON *cm, vp9_writer *w) {
int i, j;
for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
unsigned int branch_ct[INTER_MODES - 1][2];
vp9_prob new_prob[INTER_MODES - 1];
vp9_tree_probs_from_distribution(vp9_inter_mode_tree,
new_prob, branch_ct,
vp9_tree_probs_from_distribution(vp9_inter_mode_tree, branch_ct,
cm->counts.inter_mode[i], NEARESTMV);
for (j = 0; j < INTER_MODES - 1; ++j)
vp9_cond_prob_diff_update(bc, &cm->fc.inter_mode_probs[i][j],
vp9_cond_prob_diff_update(w, &cm->fc.inter_mode_probs[i][j],
branch_ct[j]);
}
}
@ -710,8 +696,7 @@ static void build_tree_distribution(VP9_COMP *cpi, TX_SIZE tx_size) {
unsigned int (*eob_branch_ct)[REF_TYPES][COEF_BANDS][PREV_COEF_CONTEXTS] =
cpi->common.counts.eob_branch[tx_size];
vp9_coeff_stats *coef_branch_ct = cpi->frame_branch_ct[tx_size];
vp9_prob full_probs[ENTROPY_NODES];
int i, j, k, l;
int i, j, k, l, m;
for (i = 0; i < BLOCK_TYPES; ++i) {
for (j = 0; j < REF_TYPES; ++j) {
@ -720,16 +705,14 @@ static void build_tree_distribution(VP9_COMP *cpi, TX_SIZE tx_size) {
if (l >= 3 && k == 0)
continue;
vp9_tree_probs_from_distribution(vp9_coef_tree,
full_probs,
coef_branch_ct[i][j][k][l],
coef_counts[i][j][k][l], 0);
vpx_memcpy(coef_probs[i][j][k][l], full_probs,
sizeof(vp9_prob) * UNCONSTRAINED_NODES);
coef_branch_ct[i][j][k][l][0][1] = eob_branch_ct[i][j][k][l] -
coef_branch_ct[i][j][k][l][0][0];
coef_probs[i][j][k][l][0] =
get_binary_prob(coef_branch_ct[i][j][k][l][0][0],
coef_branch_ct[i][j][k][l][0][1]);
for (m = 0; m < UNCONSTRAINED_NODES; ++m)
coef_probs[i][j][k][l][m] = get_binary_prob(
coef_branch_ct[i][j][k][l][m][0],
coef_branch_ct[i][j][k][l][m][1]);
#ifdef ENTROPY_STATS
if (!cpi->dummy_packing) {
int t;
@ -1467,10 +1450,8 @@ static size_t write_compressed_header(VP9_COMP *cpi, uint8_t *data) {
update_mbintra_mode_probs(cpi, &header_bc);
for (i = 0; i < PARTITION_CONTEXTS; ++i) {
vp9_prob pnew[PARTITION_TYPES - 1];
unsigned int bct[PARTITION_TYPES - 1][2];
update_mode(&header_bc, PARTITION_TYPES,
vp9_partition_tree, pnew,
update_mode(&header_bc, PARTITION_TYPES, vp9_partition_tree,
fc->partition_prob[cm->frame_type][i], bct,
(unsigned int *)cpi->partition_count[i]);
}

View File

@ -124,8 +124,9 @@ static void build_nmv_component_cost_table(int *mvcost,
}
}
static int update_mv(vp9_writer *w, const unsigned int ct[2],
vp9_prob *cur_p, vp9_prob new_p, vp9_prob upd_p) {
static int update_mv(vp9_writer *w, const unsigned int ct[2], vp9_prob *cur_p,
vp9_prob upd_p) {
const vp9_prob new_p = get_binary_prob(ct[0], ct[1]);
vp9_prob mod_p = new_p | 1;
const int cur_b = cost_branch256(ct, *cur_p);
const int mod_b = cost_branch256(ct, mod_p);
@ -143,7 +144,6 @@ static int update_mv(vp9_writer *w, const unsigned int ct[2],
static void counts_to_nmv_context(
nmv_context_counts *nmv_count,
nmv_context *prob,
int usehp,
unsigned int (*branch_ct_joint)[2],
unsigned int (*branch_ct_sign)[2],
@ -156,29 +156,24 @@ static void counts_to_nmv_context(
unsigned int (*branch_ct_hp)[2]) {
int i, j, k;
vp9_tree_probs_from_distribution(vp9_mv_joint_tree,
prob->joints,
branch_ct_joint,
nmv_count->joints, 0);
for (i = 0; i < 2; ++i) {
const uint32_t s0 = nmv_count->comps[i].sign[0];
const uint32_t s1 = nmv_count->comps[i].sign[1];
prob->comps[i].sign = get_binary_prob(s0, s1);
branch_ct_sign[i][0] = s0;
branch_ct_sign[i][1] = s1;
vp9_tree_probs_from_distribution(vp9_mv_class_tree,
prob->comps[i].classes,
branch_ct_classes[i],
nmv_count->comps[i].classes, 0);
vp9_tree_probs_from_distribution(vp9_mv_class0_tree,
prob->comps[i].class0,
branch_ct_class0[i],
nmv_count->comps[i].class0, 0);
for (j = 0; j < MV_OFFSET_BITS; ++j) {
const uint32_t b0 = nmv_count->comps[i].bits[j][0];
const uint32_t b1 = nmv_count->comps[i].bits[j][1];
prob->comps[i].bits[j] = get_binary_prob(b0, b1);
branch_ct_bits[i][j][0] = b0;
branch_ct_bits[i][j][1] = b1;
}
@ -186,12 +181,10 @@ static void counts_to_nmv_context(
for (i = 0; i < 2; ++i) {
for (k = 0; k < CLASS0_SIZE; ++k) {
vp9_tree_probs_from_distribution(vp9_mv_fp_tree,
prob->comps[i].class0_fp[k],
branch_ct_class0_fp[i][k],
nmv_count->comps[i].class0_fp[k], 0);
}
vp9_tree_probs_from_distribution(vp9_mv_fp_tree,
prob->comps[i].fp,
branch_ct_fp[i],
nmv_count->comps[i].fp, 0);
}
@ -202,11 +195,9 @@ static void counts_to_nmv_context(
const uint32_t hp0 = nmv_count->comps[i].hp[0];
const uint32_t hp1 = nmv_count->comps[i].hp[1];
prob->comps[i].class0_hp = get_binary_prob(c0_hp0, c0_hp1);
branch_ct_class0_hp[i][0] = c0_hp0;
branch_ct_class0_hp[i][1] = c0_hp1;
prob->comps[i].hp = get_binary_prob(hp0, hp1);
branch_ct_hp[i][0] = hp0;
branch_ct_hp[i][1] = hp1;
}
@ -215,7 +206,6 @@ static void counts_to_nmv_context(
void vp9_write_nmv_probs(VP9_COMP* const cpi, int usehp, vp9_writer* const bc) {
int i, j;
nmv_context prob;
unsigned int branch_ct_joint[MV_JOINTS - 1][2];
unsigned int branch_ct_sign[2][2];
unsigned int branch_ct_classes[2][MV_CLASSES - 1][2];
@ -227,30 +217,28 @@ void vp9_write_nmv_probs(VP9_COMP* const cpi, int usehp, vp9_writer* const bc) {
unsigned int branch_ct_hp[2][2];
nmv_context *mvc = &cpi->common.fc.nmvc;
counts_to_nmv_context(&cpi->NMVcount, &prob, usehp,
counts_to_nmv_context(&cpi->NMVcount, usehp,
branch_ct_joint, branch_ct_sign, branch_ct_classes,
branch_ct_class0, branch_ct_bits,
branch_ct_class0_fp, branch_ct_fp,
branch_ct_class0_hp, branch_ct_hp);
for (j = 0; j < MV_JOINTS - 1; ++j)
update_mv(bc, branch_ct_joint[j], &mvc->joints[j], prob.joints[j],
NMV_UPDATE_PROB);
update_mv(bc, branch_ct_joint[j], &mvc->joints[j], NMV_UPDATE_PROB);
for (i = 0; i < 2; ++i) {
update_mv(bc, branch_ct_sign[i], &mvc->comps[i].sign,
prob.comps[i].sign, NMV_UPDATE_PROB);
update_mv(bc, branch_ct_sign[i], &mvc->comps[i].sign, NMV_UPDATE_PROB);
for (j = 0; j < MV_CLASSES - 1; ++j)
update_mv(bc, branch_ct_classes[i][j], &mvc->comps[i].classes[j],
prob.comps[i].classes[j], NMV_UPDATE_PROB);
NMV_UPDATE_PROB);
for (j = 0; j < CLASS0_SIZE - 1; ++j)
update_mv(bc, branch_ct_class0[i][j], &mvc->comps[i].class0[j],
prob.comps[i].class0[j], NMV_UPDATE_PROB);
NMV_UPDATE_PROB);
for (j = 0; j < MV_OFFSET_BITS; ++j)
update_mv(bc, branch_ct_bits[i][j], &mvc->comps[i].bits[j],
prob.comps[i].bits[j], NMV_UPDATE_PROB);
NMV_UPDATE_PROB);
}
for (i = 0; i < 2; ++i) {
@ -258,21 +246,19 @@ void vp9_write_nmv_probs(VP9_COMP* const cpi, int usehp, vp9_writer* const bc) {
int k;
for (k = 0; k < 3; ++k)
update_mv(bc, branch_ct_class0_fp[i][j][k],
&mvc->comps[i].class0_fp[j][k],
prob.comps[i].class0_fp[j][k], NMV_UPDATE_PROB);
&mvc->comps[i].class0_fp[j][k], NMV_UPDATE_PROB);
}
for (j = 0; j < 3; ++j)
update_mv(bc, branch_ct_fp[i][j], &mvc->comps[i].fp[j],
prob.comps[i].fp[j], NMV_UPDATE_PROB);
update_mv(bc, branch_ct_fp[i][j], &mvc->comps[i].fp[j], NMV_UPDATE_PROB);
}
if (usehp) {
for (i = 0; i < 2; ++i) {
update_mv(bc, branch_ct_class0_hp[i], &mvc->comps[i].class0_hp,
prob.comps[i].class0_hp, NMV_UPDATE_PROB);
NMV_UPDATE_PROB);
update_mv(bc, branch_ct_hp[i], &mvc->comps[i].hp,
prob.comps[i].hp, NMV_UPDATE_PROB);
NMV_UPDATE_PROB);
}
}
}