I have uploaded this patch for your review. I have done an extensive test to be sure that removing of tables does not create any problem. 

The test file, is called test_lpc.c which requires a hack to standard iSAC. The test computes LPC coefficients, then encodes and decodes with old and new (size-reduced) tables. It compares the results is all steps. I have ran the test over large set of files, more then 51 hours of audio, and there was no error. 

I tried to do no formatting so the review to be easier, but I know it can be a tricky CL. Hopefully, the test file helps you to be more confident on the CL. 

Thanks,... Turaj  

In this change list the LPC tables associated with mode 1 & 2 are remoded, and necessary cahnges are made to other files. 

The only model allowed is model number 0. Therefore, this CL breaks compatibility with iSAC released prior to 2.4.3. To avoid changing the bit-stream, we still keep the model number in the bit-stream. 

entropy_coding.c is cleaned up, especially encoding of LAR had KLT transform of LPC gains which are removed now. 
Review URL: https://webrtc-codereview.appspot.com/548004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@2186 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
turaj@webrtc.org 2012-05-07 20:36:22 +00:00
parent d46fe7034b
commit fe4cfa7e5e
8 changed files with 1200 additions and 1779 deletions

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -42,7 +42,7 @@ WebRtcIsac_DecodeLb(
WebRtc_Word16* current_framesamples,
WebRtc_Word16 isRCUPayload)
{
int k, model;
int k;
int len, err;
WebRtc_Word16 bandwidthInd;
@ -116,7 +116,7 @@ WebRtcIsac_DecodeLb(
/* decode & dequantize FiltCoef */
err = WebRtcIsac_DecodeLpc(&ISACdecLB_obj->bitstr_obj,
lo_filt_coef,hi_filt_coef, &model);
lo_filt_coef,hi_filt_coef);
if (err < 0) { // error check
return err;
}
@ -166,14 +166,14 @@ WebRtcIsac_DecodeLb(
}
/* perceptual post-filtering (using normalized lattice filter) */
WebRtcIsac_NormLatticeFilterAr(ORDERLO,
ISACdecLB_obj->maskfiltstr_obj.PostStateLoF,
(ISACdecLB_obj->maskfiltstr_obj).PostStateLoG,
LPw_pf, lo_filt_coef, LP_dec_float);
WebRtcIsac_NormLatticeFilterAr(ORDERHI,
ISACdecLB_obj->maskfiltstr_obj.PostStateHiF,
(ISACdecLB_obj->maskfiltstr_obj).PostStateHiG,
HPw, hi_filt_coef, HP_dec_float);
WebRtcIsac_NormLatticeFilterAr(
ORDERLO, ISACdecLB_obj->maskfiltstr_obj.PostStateLoF,
(ISACdecLB_obj->maskfiltstr_obj).PostStateLoG, LPw_pf, lo_filt_coef,
LP_dec_float);
WebRtcIsac_NormLatticeFilterAr(
ORDERHI, ISACdecLB_obj->maskfiltstr_obj.PostStateHiF,
(ISACdecLB_obj->maskfiltstr_obj).PostStateHiG, HPw, hi_filt_coef,
HP_dec_float);
/* recombine the 2 bands */
WebRtcIsac_FilterAndCombineFloat(LP_dec_float, HP_dec_float,
@ -240,13 +240,13 @@ WebRtcIsac_DecodeUb16(
&ISACdecUB_obj->fftstr_obj);
/* perceptual post-filtering (using normalized lattice filter) */
WebRtcIsac_NormLatticeFilterAr(UB_LPC_ORDER,
ISACdecUB_obj->maskfiltstr_obj.PostStateLoF,
WebRtcIsac_NormLatticeFilterAr(
UB_LPC_ORDER, ISACdecUB_obj->maskfiltstr_obj.PostStateLoF,
(ISACdecUB_obj->maskfiltstr_obj).PostStateLoG, halfFrameFirst,
&percepFilterParam[(UB_LPC_ORDER+1)], signal_out);
WebRtcIsac_NormLatticeFilterAr(UB_LPC_ORDER,
ISACdecUB_obj->maskfiltstr_obj.PostStateLoF,
WebRtcIsac_NormLatticeFilterAr(
UB_LPC_ORDER, ISACdecUB_obj->maskfiltstr_obj.PostStateLoF,
(ISACdecUB_obj->maskfiltstr_obj).PostStateLoG, halfFrameSecond,
&percepFilterParam[(UB_LPC_ORDER + 1) * SUBFRAMES + (UB_LPC_ORDER+1)],
&signal_out[FRAMESAMPLES_HALF]);
@ -314,8 +314,8 @@ WebRtcIsac_DecodeUb12(
/* perceptual post-filtering (using normalized lattice filter) */
WebRtcIsac_NormLatticeFilterAr(UB_LPC_ORDER,
ISACdecUB_obj->maskfiltstr_obj.PostStateLoF,
(ISACdecUB_obj->maskfiltstr_obj).PostStateLoG, LPw,
percepFilterParam, LP_dec_float);
(ISACdecUB_obj->maskfiltstr_obj).PostStateLoG,
LPw, percepFilterParam, LP_dec_float);
/* Zerro for upper-band */
memset(HP_dec_float, 0, sizeof(float) * (FRAMESAMPLES_HALF));

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -134,12 +134,10 @@ WebRtcIsac_RateAllocation(
if(idx < 6)
{
*rateLBBitPerSec += (WebRtc_Word16)(idxErr *
(kLowerBandBitRate12[idx + 1] -
kLowerBandBitRate12[idx]));
*rateUBBitPerSec += (WebRtc_Word16)(idxErr *
(kUpperBandBitRate12[idx + 1] -
kUpperBandBitRate12[idx]));
*rateLBBitPerSec += (WebRtc_Word16)(
idxErr *(kLowerBandBitRate12[idx + 1] - kLowerBandBitRate12[idx]));
*rateUBBitPerSec += (WebRtc_Word16)(
idxErr *(kUpperBandBitRate12[idx + 1] - kUpperBandBitRate12[idx]));
}
*bandwidthKHz = isac12kHz;
@ -222,9 +220,6 @@ WebRtcIsac_EncodeLb(
int frame_mode; /* 0 for 30ms, 1 for 60ms */
int processed_samples, status = 0;
double bits_gains;
int bmodel;
transcode_obj transcodingParam;
double bytesLeftSpecCoding;
WebRtc_UWord16 payloadLimitBytes;
@ -246,8 +241,7 @@ WebRtcIsac_EncodeLb(
/* fill the buffer with 10ms input data */
for (k = 0; k < FRAMESAMPLES_10ms; k++) {
ISACencLB_obj->data_buffer_float[k + ISACencLB_obj->buffer_index] =
in[k];
ISACencLB_obj->data_buffer_float[k + ISACencLB_obj->buffer_index] = in[k];
}
/* if buffersize is not equal to current framesize then increase index
@ -277,9 +271,8 @@ WebRtcIsac_EncodeLb(
if((codingMode == 0) && (frame_mode == 0) &&
(ISACencLB_obj->enforceFrameSize == 0)) {
ISACencLB_obj->new_framelength =
WebRtcIsac_GetNewFrameLength(ISACencLB_obj->bottleneck,
ISACencLB_obj->current_framesamples);
ISACencLB_obj->new_framelength = WebRtcIsac_GetNewFrameLength(
ISACencLB_obj->bottleneck, ISACencLB_obj->current_framesamples);
}
ISACencLB_obj->s2nr = WebRtcIsac_GetSnr(
@ -304,11 +297,13 @@ WebRtcIsac_EncodeLb(
/* split signal in two bands */
WebRtcIsac_SplitAndFilterFloat(ISACencLB_obj->data_buffer_float, LP, HP,
LP_lookahead, HP_lookahead, &ISACencLB_obj->prefiltbankstr_obj );
LP_lookahead, HP_lookahead,
&ISACencLB_obj->prefiltbankstr_obj );
/* estimate pitch parameters and pitch-filter lookahead signal */
WebRtcIsac_PitchAnalysis(LP_lookahead, LP_lookahead_pf,
&ISACencLB_obj->pitchanalysisstr_obj, PitchLags, PitchGains);
&ISACencLB_obj->pitchanalysisstr_obj, PitchLags,
PitchGains);
/* encode in FIX Q12 */
@ -329,7 +324,8 @@ WebRtcIsac_EncodeLb(
WebRtcIsac_EncodePitchGain(PitchGains_Q12, &ISACencLB_obj->bitstr_obj,
&ISACencLB_obj->SaveEnc_obj);
WebRtcIsac_EncodePitchLag(PitchLags, PitchGains_Q12,
&ISACencLB_obj->bitstr_obj, &ISACencLB_obj->SaveEnc_obj);
&ISACencLB_obj->bitstr_obj,
&ISACencLB_obj->SaveEnc_obj);
AvgPitchGain_Q12 = (PitchGains_Q12[0] + PitchGains_Q12[1] +
PitchGains_Q12[2] + PitchGains_Q12[3]) >> 2;
@ -340,8 +336,8 @@ WebRtcIsac_EncodeLb(
PitchGains_Q12, lofilt_coef, hifilt_coef);
/* code LPC model and shape - gains not quantized yet */
WebRtcIsac_EncodeLpcLb(lofilt_coef, hifilt_coef, &bmodel, &bits_gains,
&ISACencLB_obj->bitstr_obj, &ISACencLB_obj->SaveEnc_obj);
WebRtcIsac_EncodeLpcLb(lofilt_coef, hifilt_coef, &ISACencLB_obj->bitstr_obj,
&ISACencLB_obj->SaveEnc_obj);
/* convert PitchGains back to FLOAT for pitchfilter_pre */
for (k = 0; k < 4; k++) {
@ -352,12 +348,14 @@ WebRtcIsac_EncodeLb(
transcodingParam.W_upper = ISACencLB_obj->bitstr_obj.W_upper;
transcodingParam.stream_index = ISACencLB_obj->bitstr_obj.stream_index;
transcodingParam.streamval = ISACencLB_obj->bitstr_obj.streamval;
transcodingParam.stream[0] = ISACencLB_obj->bitstr_obj.stream[
ISACencLB_obj->bitstr_obj.stream_index - 2];
transcodingParam.stream[1] = ISACencLB_obj->bitstr_obj.stream[
ISACencLB_obj->bitstr_obj.stream_index - 1];
transcodingParam.stream[2] = ISACencLB_obj->bitstr_obj.stream[
ISACencLB_obj->bitstr_obj.stream_index];
transcodingParam.stream[0] =
ISACencLB_obj->bitstr_obj.stream[ISACencLB_obj->bitstr_obj.stream_index -
2];
transcodingParam.stream[1] =
ISACencLB_obj->bitstr_obj.stream[ISACencLB_obj->bitstr_obj.stream_index -
1];
transcodingParam.stream[2] =
ISACencLB_obj->bitstr_obj.stream[ISACencLB_obj->bitstr_obj.stream_index];
/* Store LPC Gains before encoding them */
for(k = 0; k < SUBFRAMES; k++) {
@ -366,8 +364,9 @@ WebRtcIsac_EncodeLb(
}
/* Code gains */
WebRtcIsac_EncodeLpcGainLb(lofilt_coef, hifilt_coef, bmodel,
&ISACencLB_obj->bitstr_obj, &ISACencLB_obj->SaveEnc_obj);
WebRtcIsac_EncodeLpcGainLb(lofilt_coef, hifilt_coef,
&ISACencLB_obj->bitstr_obj,
&ISACencLB_obj->SaveEnc_obj);
/* Get the correct value for the payload limit and calculate the
number of bytes left for coding the spectrum.*/
@ -391,11 +390,13 @@ WebRtcIsac_EncodeLb(
/* low-band filtering */
WebRtcIsac_NormLatticeFilterMa(ORDERLO,
ISACencLB_obj->maskfiltstr_obj.PreStateLoF,
ISACencLB_obj->maskfiltstr_obj.PreStateLoG, LP, lofilt_coef, LPw);
ISACencLB_obj->maskfiltstr_obj.PreStateLoG,
LP, lofilt_coef, LPw);
/* high-band filtering */
WebRtcIsac_NormLatticeFilterMa(ORDERHI,
ISACencLB_obj->maskfiltstr_obj.PreStateHiF,
ISACencLB_obj->maskfiltstr_obj.PreStateHiG, HP, hifilt_coef, HPw);
ISACencLB_obj->maskfiltstr_obj.PreStateHiG,
HP, hifilt_coef, HPw);
/* pitch filter */
@ -405,16 +406,15 @@ WebRtcIsac_EncodeLb(
/* transform */
WebRtcIsac_Time2Spec(LPw_pf, HPw, fre, fim, &ISACencLB_obj->fftstr_obj);
/* Save data for multiple packets memory */
for (k = 0; k < FRAMESAMPLES_HALF; k++) {
ISACencLB_obj->SaveEnc_obj.fre[k +
ISACencLB_obj->SaveEnc_obj.startIdx*FRAMESAMPLES_HALF] = fre[k];
ISACencLB_obj->SaveEnc_obj.fim[k +
ISACencLB_obj->SaveEnc_obj.startIdx*FRAMESAMPLES_HALF] = fim[k];
ISACencLB_obj->SaveEnc_obj.fre[k + ISACencLB_obj->SaveEnc_obj.startIdx *
FRAMESAMPLES_HALF] = fre[k];
ISACencLB_obj->SaveEnc_obj.fim[k + ISACencLB_obj->SaveEnc_obj.startIdx *
FRAMESAMPLES_HALF] = fim[k];
}
ISACencLB_obj->SaveEnc_obj.AvgPitchGain[
ISACencLB_obj->SaveEnc_obj.startIdx] = AvgPitchGain_Q12;
ISACencLB_obj->SaveEnc_obj.AvgPitchGain[ISACencLB_obj->SaveEnc_obj.startIdx] =
AvgPitchGain_Q12;
/* quantization and lossless coding */
err = WebRtcIsac_EncodeSpecLb(fre, fim, &ISACencLB_obj->bitstr_obj,
@ -476,10 +476,8 @@ WebRtcIsac_EncodeLb(
transcodingParam.loFiltGain[k] * transcodeScale;
hifilt_coef[(LPC_HIBAND_ORDER+1) * k] =
transcodingParam.hiFiltGain[k] * transcodeScale;
transcodingParam.loFiltGain[k] =
lofilt_coef[(LPC_LOBAND_ORDER+1) * k];
transcodingParam.hiFiltGain[k] =
hifilt_coef[(LPC_HIBAND_ORDER+1) * k];
transcodingParam.loFiltGain[k] = lofilt_coef[(LPC_LOBAND_ORDER+1) * k];
transcodingParam.hiFiltGain[k] = hifilt_coef[(LPC_HIBAND_ORDER+1) * k];
}
/* Scale DFT coefficients */
@ -490,12 +488,10 @@ WebRtcIsac_EncodeLb(
/* Save data for multiple packets memory */
for (k = 0; k < FRAMESAMPLES_HALF; k++) {
ISACencLB_obj->SaveEnc_obj.fre[k +
ISACencLB_obj->SaveEnc_obj.startIdx * FRAMESAMPLES_HALF] =
fre[k];
ISACencLB_obj->SaveEnc_obj.fim[k +
ISACencLB_obj->SaveEnc_obj.startIdx * FRAMESAMPLES_HALF] =
fim[k];
ISACencLB_obj->SaveEnc_obj.fre[k + ISACencLB_obj->SaveEnc_obj.startIdx *
FRAMESAMPLES_HALF] = fre[k];
ISACencLB_obj->SaveEnc_obj.fim[k + ISACencLB_obj->SaveEnc_obj.startIdx *
FRAMESAMPLES_HALF] = fim[k];
}
/* Re-store the state of arithmetic coder before coding LPC gains */
@ -510,8 +506,9 @@ WebRtcIsac_EncodeLb(
transcodingParam.stream[2];
/* Code gains */
WebRtcIsac_EncodeLpcGainLb(lofilt_coef, hifilt_coef, bmodel,
&ISACencLB_obj->bitstr_obj, &ISACencLB_obj->SaveEnc_obj);
WebRtcIsac_EncodeLpcGainLb(lofilt_coef, hifilt_coef,
&ISACencLB_obj->bitstr_obj,
&ISACencLB_obj->SaveEnc_obj);
/* Update the number of bytes left for encoding the spectrum */
bytesLeftSpecCoding = payloadLimitBytes -
@ -626,8 +623,7 @@ WebRtcIsac_EncodeUb16(
/* To be used for Redundant Coding */
WebRtcIsac_EncodeJitterInfo(jitterInfo, &ISACencUB_obj->bitstr_obj);
status = WebRtcIsac_EncodeBandwidth(isac16kHz,
&ISACencUB_obj->bitstr_obj);
status = WebRtcIsac_EncodeBandwidth(isac16kHz, &ISACencUB_obj->bitstr_obj);
if (status < 0) {
return status;
}
@ -651,8 +647,8 @@ WebRtcIsac_EncodeUb16(
/* code LPC model and shape - gains not quantized yet */
WebRtcIsac_EncodeLpcUB(lpcVecs, &ISACencUB_obj->bitstr_obj,
percepFilterParams, isac16kHz, &ISACencUB_obj->SaveEnc_obj);
percepFilterParams, isac16kHz,
&ISACencUB_obj->SaveEnc_obj);
// the first set of lpc parameters are from the last sub-frame of
// the previous frame. so we don't care about them
@ -663,12 +659,14 @@ WebRtcIsac_EncodeUb16(
transcodingParam.stream_index = ISACencUB_obj->bitstr_obj.stream_index;
transcodingParam.W_upper = ISACencUB_obj->bitstr_obj.W_upper;
transcodingParam.streamval = ISACencUB_obj->bitstr_obj.streamval;
transcodingParam.stream[0] = ISACencUB_obj->bitstr_obj.stream[
ISACencUB_obj->bitstr_obj.stream_index - 2];
transcodingParam.stream[1] = ISACencUB_obj->bitstr_obj.stream[
ISACencUB_obj->bitstr_obj.stream_index - 1];
transcodingParam.stream[2] = ISACencUB_obj->bitstr_obj.stream[
ISACencUB_obj->bitstr_obj.stream_index];
transcodingParam.stream[0] =
ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index -
2];
transcodingParam.stream[1] =
ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index -
1];
transcodingParam.stream[2] =
ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index];
/* Store LPC Gains before encoding them */
for(k = 0; k < SUBFRAMES; k++) {
@ -677,11 +675,13 @@ WebRtcIsac_EncodeUb16(
}
// Store the gains for multiple encoding
memcpy(ISACencUB_obj->SaveEnc_obj.lpcGain, lpcGains, (SUBFRAMES << 1) * sizeof(double));
memcpy(ISACencUB_obj->SaveEnc_obj.lpcGain, lpcGains,
(SUBFRAMES << 1) * sizeof(double));
WebRtcIsac_EncodeLpcGainUb(lpcGains, &ISACencUB_obj->bitstr_obj,
ISACencUB_obj->SaveEnc_obj.lpcGainIndex);
WebRtcIsac_EncodeLpcGainUb(&lpcGains[SUBFRAMES], &ISACencUB_obj->bitstr_obj,
WebRtcIsac_EncodeLpcGainUb(
&lpcGains[SUBFRAMES], &ISACencUB_obj->bitstr_obj,
&ISACencUB_obj->SaveEnc_obj.lpcGainIndex[SUBFRAMES]);
/* Get the correct value for the payload limit and calculate the number of
@ -707,22 +707,21 @@ WebRtcIsac_EncodeUb16(
&LP_lookahead[0]);
/* Second half-frame filtering */
WebRtcIsac_NormLatticeFilterMa(UB_LPC_ORDER,
ISACencUB_obj->maskfiltstr_obj.PreStateLoF,
WebRtcIsac_NormLatticeFilterMa(
UB_LPC_ORDER, ISACencUB_obj->maskfiltstr_obj.PreStateLoF,
ISACencUB_obj->maskfiltstr_obj.PreStateLoG,
&ISACencUB_obj->data_buffer_float[FRAMESAMPLES_HALF],
&percepFilterParams[(UB_LPC_ORDER + 1) + SUBFRAMES *
(UB_LPC_ORDER + 1)], &LP_lookahead[FRAMESAMPLES_HALF]);
&percepFilterParams[(UB_LPC_ORDER + 1) + SUBFRAMES * (UB_LPC_ORDER + 1)],
&LP_lookahead[FRAMESAMPLES_HALF]);
WebRtcIsac_Time2Spec(&LP_lookahead[0], &LP_lookahead[FRAMESAMPLES_HALF],
fre, fim, &ISACencUB_obj->fftstr_obj);
//Store FFT coefficients for multiple encoding
memcpy(&ISACencUB_obj->SaveEnc_obj.realFFT, fre,
FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
memcpy(&ISACencUB_obj->SaveEnc_obj.imagFFT, fim,
FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
memcpy(&ISACencUB_obj->SaveEnc_obj.realFFT, fre, FRAMESAMPLES_HALF *
sizeof(WebRtc_Word16));
memcpy(&ISACencUB_obj->SaveEnc_obj.imagFFT, fim, FRAMESAMPLES_HALF *
sizeof(WebRtc_Word16));
// Prepare the audio buffer for the next packet
// move the last 3 ms to the beginning of the buffer
@ -734,8 +733,8 @@ WebRtcIsac_EncodeUb16(
ISACencUB_obj->buffer_index = LB_TOTAL_DELAY_SAMPLES;
// Save the bit-stream object at this point for FEC.
memcpy(&ISACencUB_obj->SaveEnc_obj.bitStreamObj,
&ISACencUB_obj->bitstr_obj, sizeof(Bitstr));
memcpy(&ISACencUB_obj->SaveEnc_obj.bitStreamObj, &ISACencUB_obj->bitstr_obj,
sizeof(Bitstr));
/* quantization and lossless coding */
err = WebRtcIsac_EncodeSpecUB16(fre, fim, &ISACencUB_obj->bitstr_obj);
@ -782,26 +781,20 @@ WebRtcIsac_EncodeUb16(
}
//Store FFT coefficients for multiple encoding
memcpy(&ISACencUB_obj->SaveEnc_obj.realFFT, fre,
FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
memcpy(&ISACencUB_obj->SaveEnc_obj.imagFFT, fim,
FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
memcpy(&ISACencUB_obj->SaveEnc_obj.realFFT, fre, FRAMESAMPLES_HALF *
sizeof(WebRtc_Word16));
memcpy(&ISACencUB_obj->SaveEnc_obj.imagFFT, fim, FRAMESAMPLES_HALF *
sizeof(WebRtc_Word16));
/* Store the state of arithmetic coder before coding LPC gains */
ISACencUB_obj->bitstr_obj.W_upper = transcodingParam.W_upper;
ISACencUB_obj->bitstr_obj.stream_index = transcodingParam.stream_index;
ISACencUB_obj->bitstr_obj.streamval = transcodingParam.streamval;
ISACencUB_obj->bitstr_obj.stream[transcodingParam.stream_index - 2] =
transcodingParam.stream[0];
ISACencUB_obj->bitstr_obj.stream[transcodingParam.stream_index - 1] =
transcodingParam.stream[1];
ISACencUB_obj->bitstr_obj.stream[transcodingParam.stream_index] =
transcodingParam.stream[2];
@ -812,8 +805,8 @@ WebRtcIsac_EncodeUb16(
WebRtcIsac_EncodeLpcGainUb(transcodingParam.loFiltGain,
&ISACencUB_obj->bitstr_obj,
ISACencUB_obj->SaveEnc_obj.lpcGainIndex);
WebRtcIsac_EncodeLpcGainUb(transcodingParam.hiFiltGain,
&ISACencUB_obj->bitstr_obj,
WebRtcIsac_EncodeLpcGainUb(
transcodingParam.hiFiltGain, &ISACencUB_obj->bitstr_obj,
&ISACencUB_obj->SaveEnc_obj.lpcGainIndex[SUBFRAMES]);
/* Update the number of bytes left for encoding the spectrum */
@ -880,8 +873,7 @@ WebRtcIsac_EncodeUb12(
/* fill the buffer with 10ms input data */
for (k=0; k<FRAMESAMPLES_10ms; k++) {
ISACencUB_obj->data_buffer_float[k + ISACencUB_obj->buffer_index] =
in[k];
ISACencUB_obj->data_buffer_float[k + ISACencUB_obj->buffer_index] = in[k];
}
/* if buffer-size is not equal to current frame-size then increase the
@ -909,19 +901,17 @@ WebRtcIsac_EncodeUb12(
/* To be used for Redundant Coding */
WebRtcIsac_EncodeJitterInfo(jitterInfo, &ISACencUB_obj->bitstr_obj);
status = WebRtcIsac_EncodeBandwidth(isac12kHz,
&ISACencUB_obj->bitstr_obj);
status = WebRtcIsac_EncodeBandwidth(isac12kHz, &ISACencUB_obj->bitstr_obj);
if (status < 0) {
return status;
}
s2nr = WebRtcIsac_GetSnr(ISACencUB_obj->bottleneck,
FRAMESAMPLES);
s2nr = WebRtcIsac_GetSnr(ISACencUB_obj->bottleneck, FRAMESAMPLES);
/* split signal in two bands */
WebRtcIsac_SplitAndFilterFloat(ISACencUB_obj->data_buffer_float, HP, LP,
HP_lookahead, LP_lookahead, &ISACencUB_obj->prefiltbankstr_obj);
HP_lookahead, LP_lookahead,
&ISACencUB_obj->prefiltbankstr_obj);
/* find coefficients for perceptual pre-filters */
WebRtcIsac_GetLpcCoefUb(LP_lookahead, &ISACencUB_obj->maskfiltstr_obj,
@ -929,26 +919,24 @@ WebRtcIsac_EncodeUb12(
/* code LPC model and shape - gains not quantized yet */
WebRtcIsac_EncodeLpcUB(lpcVecs, &ISACencUB_obj->bitstr_obj,
percepFilterParams, isac12kHz, &ISACencUB_obj->SaveEnc_obj);
percepFilterParams, isac12kHz,
&ISACencUB_obj->SaveEnc_obj);
WebRtcIsac_GetLpcGain(s2nr, percepFilterParams, SUBFRAMES, lpcGains,
corr, varscale);
WebRtcIsac_GetLpcGain(s2nr, percepFilterParams, SUBFRAMES, lpcGains, corr,
varscale);
/* Store the state of arithmetic coder before coding LPC gains */
transcodingParam.W_upper = ISACencUB_obj->bitstr_obj.W_upper;
transcodingParam.stream_index = ISACencUB_obj->bitstr_obj.stream_index;
transcodingParam.streamval = ISACencUB_obj->bitstr_obj.streamval;
transcodingParam.stream[0] = ISACencUB_obj->bitstr_obj.stream[
ISACencUB_obj->bitstr_obj.stream_index - 2];
transcodingParam.stream[1] = ISACencUB_obj->bitstr_obj.stream[
ISACencUB_obj->bitstr_obj.stream_index - 1];
transcodingParam.stream[2] = ISACencUB_obj->bitstr_obj.stream[
ISACencUB_obj->bitstr_obj.stream_index];
transcodingParam.stream[0] =
ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index -
2];
transcodingParam.stream[1] =
ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index -
1];
transcodingParam.stream[2] =
ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index];
/* Store LPC Gains before encoding them */
for(k = 0; k < SUBFRAMES; k++) {
@ -970,8 +958,8 @@ WebRtcIsac_EncodeUb12(
/* low-band filtering */
WebRtcIsac_NormLatticeFilterMa(UB_LPC_ORDER,
ISACencUB_obj->maskfiltstr_obj.PreStateLoF,
ISACencUB_obj->maskfiltstr_obj.PreStateLoG, LP, percepFilterParams,
LPw);
ISACencUB_obj->maskfiltstr_obj.PreStateLoG, LP,
percepFilterParams, LPw);
/* Get the correct value for the payload limit and calculate the number
of bytes left for coding the spectrum. It is a 30ms frame Subract 3
@ -987,12 +975,12 @@ WebRtcIsac_EncodeUb12(
WebRtcIsac_Time2Spec(LPw, HPw, fre, fim, &ISACencUB_obj->fftstr_obj);
//Store real FFT coefficients for multiple encoding
memcpy(&ISACencUB_obj->SaveEnc_obj.realFFT, fre,
FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
memcpy(&ISACencUB_obj->SaveEnc_obj.realFFT, fre, FRAMESAMPLES_HALF *
sizeof(WebRtc_Word16));
//Store imaginary FFT coefficients for multiple encoding
memcpy(&ISACencUB_obj->SaveEnc_obj.imagFFT, fim,
FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
memcpy(&ISACencUB_obj->SaveEnc_obj.imagFFT, fim, FRAMESAMPLES_HALF *
sizeof(WebRtc_Word16));
// Save the bit-stream object at this point for FEC.
memcpy(&ISACencUB_obj->SaveEnc_obj.bitStreamObj,
@ -1043,27 +1031,22 @@ WebRtcIsac_EncodeUb12(
}
//Store real FFT coefficients for multiple encoding
memcpy(&ISACencUB_obj->SaveEnc_obj.realFFT, fre,
FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
memcpy(&ISACencUB_obj->SaveEnc_obj.realFFT, fre, FRAMESAMPLES_HALF *
sizeof(WebRtc_Word16));
//Store imaginary FFT coefficients for multiple encoding
memcpy(&ISACencUB_obj->SaveEnc_obj.imagFFT, fim,
FRAMESAMPLES_HALF * sizeof(WebRtc_Word16));
memcpy(&ISACencUB_obj->SaveEnc_obj.imagFFT, fim, FRAMESAMPLES_HALF *
sizeof(WebRtc_Word16));
/* Re-store the state of arithmetic coder before coding LPC gains */
ISACencUB_obj->bitstr_obj.W_upper = transcodingParam.W_upper;
ISACencUB_obj->bitstr_obj.stream_index = transcodingParam.stream_index;
ISACencUB_obj->bitstr_obj.streamval = transcodingParam.streamval;
ISACencUB_obj->bitstr_obj.stream[transcodingParam.stream_index - 2] =
transcodingParam.stream[0];
ISACencUB_obj->bitstr_obj.stream[transcodingParam.stream_index - 1] =
transcodingParam.stream[1];
ISACencUB_obj->bitstr_obj.stream[transcodingParam.stream_index] =
transcodingParam.stream[2];
@ -1163,10 +1146,8 @@ int WebRtcIsac_EncodeStoredDataLb(
for (ii = 0;
ii < (FRAMESAMPLES_HALF * (1 + ISACSavedEnc_obj->startIdx));
ii++) {
tmp_fre[ii] = (WebRtc_Word16)((scale) *
(float)ISACSavedEnc_obj->fre[ii]) ;
tmp_fim[ii] = (WebRtc_Word16)((scale) *
(float)ISACSavedEnc_obj->fim[ii]) ;
tmp_fre[ii] = (WebRtc_Word16)((scale) * (float)ISACSavedEnc_obj->fre[ii]);
tmp_fim[ii] = (WebRtc_Word16)((scale) * (float)ISACSavedEnc_obj->fim[ii]);
}
} else {
for (ii = 0;
@ -1190,7 +1171,8 @@ int WebRtcIsac_EncodeStoredDataLb(
/* encode pitch gains */
*WebRtcIsac_kQPitchGainCdf_ptr = WebRtcIsac_kQPitchGainCdf;
WebRtcIsac_EncHistMulti(ISACBitStr_obj,
&ISACSavedEnc_obj->pitchGain_index[ii], WebRtcIsac_kQPitchGainCdf_ptr, 1);
&ISACSavedEnc_obj->pitchGain_index[ii],
WebRtcIsac_kQPitchGainCdf_ptr, 1);
/* entropy coding of quantization pitch lags */
/* voicing classificiation */
@ -1202,36 +1184,40 @@ int WebRtcIsac_EncodeStoredDataLb(
cdf = WebRtcIsac_kQPitchLagCdfPtrHi;
}
WebRtcIsac_EncHistMulti(ISACBitStr_obj,
&ISACSavedEnc_obj->pitchIndex[PITCH_SUBFRAMES*ii], cdf,
PITCH_SUBFRAMES);
&ISACSavedEnc_obj->pitchIndex[PITCH_SUBFRAMES*ii],
cdf, PITCH_SUBFRAMES);
/* LPC */
{
/* Only one model exists. The entropy coding is done only for backward
* compatibility.
*/
const int kModel = 0;
/* entropy coding of model number */
WebRtcIsac_EncHistMulti(ISACBitStr_obj,
&ISACSavedEnc_obj->LPCmodel[ii], WebRtcIsac_kQKltModelCdfPtr, 1);
&kModel, WebRtcIsac_kQKltModelCdfPtr, 1);
}
/* entropy coding of quantization indices - LPC shape only */
WebRtcIsac_EncHistMulti(ISACBitStr_obj,
&ISACSavedEnc_obj->LPCindex_s[KLT_ORDER_SHAPE*ii],
WebRtcIsac_kQKltCdfPtrShape[ISACSavedEnc_obj->LPCmodel[ii]],
WebRtcIsac_kQKltCdfPtrShape,
KLT_ORDER_SHAPE);
/* If transcoding, get new LPC gain indices */
if (scale < 1.0) {
WebRtcIsac_TranscodeLPCCoef(&tmpLPCcoeffs_lo[(ORDERLO+1) *
SUBFRAMES*ii], &tmpLPCcoeffs_hi[(ORDERHI+1)*SUBFRAMES*ii],
ISACSavedEnc_obj->LPCmodel[ii],
WebRtcIsac_TranscodeLPCCoef(&tmpLPCcoeffs_lo[(ORDERLO+1) * SUBFRAMES*ii],
&tmpLPCcoeffs_hi[(ORDERHI+1)*SUBFRAMES*ii],
&tmpLPCindex_g[KLT_ORDER_GAIN * ii]);
}
/* entropy coding of quantization indices - LPC gain */
WebRtcIsac_EncHistMulti(ISACBitStr_obj,
&tmpLPCindex_g[KLT_ORDER_GAIN*ii], WebRtcIsac_kQKltCdfPtrGain[
ISACSavedEnc_obj->LPCmodel[ii]], KLT_ORDER_GAIN);
WebRtcIsac_EncHistMulti(ISACBitStr_obj, &tmpLPCindex_g[KLT_ORDER_GAIN*ii],
WebRtcIsac_kQKltCdfPtrGain, KLT_ORDER_GAIN);
/* quantization and lossless coding */
status = WebRtcIsac_EncodeSpecLb(&tmp_fre[ii*FRAMESAMPLES_HALF],
&tmp_fim[ii*FRAMESAMPLES_HALF], ISACBitStr_obj,
&tmp_fim[ii*FRAMESAMPLES_HALF],
ISACBitStr_obj,
ISACSavedEnc_obj->AvgPitchGain[ii]);
if (status < 0) {
return status;
@ -1273,8 +1259,8 @@ int WebRtcIsac_EncodeStoredDataUb12(
// Encode LPC-shape
WebRtcIsac_EncHistMulti(bitStream, ISACSavedEnc_obj->indexLPCShape,
WebRtcIsac_kLpcShapeCdfMatUb12, UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME);
WebRtcIsac_kLpcShapeCdfMatUb12,
UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME);
// we only consider scales between zero and one.
if((scale <= 0.0) || (scale > 1.0))
@ -1302,8 +1288,10 @@ int WebRtcIsac_EncodeStoredDataUb12(
WebRtcIsac_StoreLpcGainUb(lpcGain, bitStream);
for(n = 0; n < FRAMESAMPLES_HALF; n++)
{
realFFT[n] = (WebRtc_Word16)(scale * (float)ISACSavedEnc_obj->realFFT[n] + 0.5f);
imagFFT[n] = (WebRtc_Word16)(scale * (float)ISACSavedEnc_obj->imagFFT[n] + 0.5f);
realFFT[n] = (WebRtc_Word16)(scale * (float)ISACSavedEnc_obj->realFFT[n] +
0.5f);
imagFFT[n] = (WebRtc_Word16)(scale * (float)ISACSavedEnc_obj->imagFFT[n] +
0.5f);
}
// store FFT coefficients
err = WebRtcIsac_EncodeSpecUB12(realFFT, imagFFT, bitStream);
@ -1347,7 +1335,8 @@ WebRtcIsac_EncodeStoredDataUb16(
}
WebRtcIsac_EncHistMulti(bitStream, ISACSavedEnc_obj->indexLPCShape,
WebRtcIsac_kLpcShapeCdfMatUb16, UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME);
WebRtcIsac_kLpcShapeCdfMatUb16,
UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME);
// we only consider scales between zero and one.
if((scale <= 0.0) || (scale > 1.0))
@ -1360,12 +1349,12 @@ WebRtcIsac_EncodeStoredDataUb16(
// store gains
WebRtcIsac_EncHistMulti(bitStream, ISACSavedEnc_obj->lpcGainIndex,
WebRtcIsac_kLpcGainCdfMat, UB_LPC_GAIN_DIM);
WebRtcIsac_EncHistMulti(bitStream, &ISACSavedEnc_obj->lpcGainIndex[SUBFRAMES],
WebRtcIsac_EncHistMulti(bitStream,
&ISACSavedEnc_obj->lpcGainIndex[SUBFRAMES],
WebRtcIsac_kLpcGainCdfMat, UB_LPC_GAIN_DIM);
// store FFT coefficients
err = WebRtcIsac_EncodeSpecUB16(ISACSavedEnc_obj->realFFT,
ISACSavedEnc_obj->imagFFT, bitStream);
}
else
{
@ -1381,8 +1370,10 @@ WebRtcIsac_EncodeStoredDataUb16(
/* scale FFT coefficients */
for(n = 0; n < FRAMESAMPLES_HALF; n++)
{
realFFT[n] = (WebRtc_Word16)(scale * (float)ISACSavedEnc_obj->realFFT[n] + 0.5f);
imagFFT[n] = (WebRtc_Word16)(scale * (float)ISACSavedEnc_obj->imagFFT[n] + 0.5f);
realFFT[n] = (WebRtc_Word16)(scale * (float)ISACSavedEnc_obj->realFFT[n] +
0.5f);
imagFFT[n] = (WebRtc_Word16)(scale * (float)ISACSavedEnc_obj->imagFFT[n] +
0.5f);
}
// store FFT coefficients
err = WebRtcIsac_EncodeSpecUB16(realFFT, imagFFT, bitStream);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -143,18 +143,22 @@ int WebRtcIsac_EncodeSpecUB12(
/* decode & dequantize LPC Coef */
int WebRtcIsac_DecodeLpcCoef(Bitstr *streamdata, double *LPCCoef, int *outmodel);
int WebRtcIsac_DecodeLpcCoef(Bitstr *streamdata, double *LPCCoef);
int WebRtcIsac_DecodeLpcCoefUB(
Bitstr* streamdata,
double* lpcVecs,
double* percepFilterGains,
WebRtc_Word16 bandwidth);
int WebRtcIsac_DecodeLpc(Bitstr *streamdata, double *LPCCoef_lo, double *LPCCoef_hi, int *outmodel);
int WebRtcIsac_DecodeLpc(Bitstr *streamdata, double *LPCCoef_lo,
double *LPCCoef_hi);
/* quantize & code LPC Coef */
void WebRtcIsac_EncodeLpcLb(double *LPCCoef_lo, double *LPCCoef_hi, int *model, double *size, Bitstr *streamdata, ISAC_SaveEncData_t* encData);
void WebRtcIsac_EncodeLpcGainLb(double *LPCCoef_lo, double *LPCCoef_hi, int model, Bitstr *streamdata, ISAC_SaveEncData_t* encData);
void WebRtcIsac_EncodeLpcLb(double *LPCCoef_lo, double *LPCCoef_hi,
Bitstr *streamdata, ISAC_SaveEncData_t* encData);
void WebRtcIsac_EncodeLpcGainLb(double *LPCCoef_lo, double *LPCCoef_hi,
Bitstr *streamdata,
ISAC_SaveEncData_t* encData);
/******************************************************************************
* WebRtcIsac_EncodeLpcUB()
@ -241,12 +245,17 @@ int WebRtcIsac_DecodeGain2(Bitstr *streamdata, WebRtc_Word32 *Gain2);
/* quantize & code squared Gain (input is squared gain) */
int WebRtcIsac_EncodeGain2(WebRtc_Word32 *gain2, Bitstr *streamdata);
void WebRtcIsac_EncodePitchGain(WebRtc_Word16* PitchGains_Q12, Bitstr* streamdata, ISAC_SaveEncData_t* encData);
void WebRtcIsac_EncodePitchGain(WebRtc_Word16* PitchGains_Q12,
Bitstr* streamdata,
ISAC_SaveEncData_t* encData);
void WebRtcIsac_EncodePitchLag(double* PitchLags, WebRtc_Word16* PitchGain_Q12, Bitstr* streamdata, ISAC_SaveEncData_t* encData);
void WebRtcIsac_EncodePitchLag(double* PitchLags, WebRtc_Word16* PitchGain_Q12,
Bitstr* streamdata, ISAC_SaveEncData_t* encData);
int WebRtcIsac_DecodePitchGain(Bitstr *streamdata, WebRtc_Word16 *PitchGain_Q12);
int WebRtcIsac_DecodePitchLag(Bitstr *streamdata, WebRtc_Word16 *PitchGain_Q12, double *PitchLag);
int WebRtcIsac_DecodePitchGain(Bitstr *streamdata,
WebRtc_Word16 *PitchGain_Q12);
int WebRtcIsac_DecodePitchLag(Bitstr *streamdata, WebRtc_Word16 *PitchGain_Q12,
double *PitchLag);
int WebRtcIsac_DecodeFrameLen(Bitstr *streamdata, WebRtc_Word16 *framelength);
int WebRtcIsac_EncodeFrameLen(WebRtc_Word16 framelength, Bitstr *streamdata);
@ -259,7 +268,7 @@ void WebRtcIsac_Poly2Rc(double *a, int N, double *RC);
/* step-up */
void WebRtcIsac_Rc2Poly(double *RC, int N, double *a);
void WebRtcIsac_TranscodeLPCCoef(double *LPCCoef_lo, double *LPCCoef_hi, int model,
void WebRtcIsac_TranscodeLPCCoef(double *LPCCoef_lo, double *LPCCoef_hi,
int *index_g);

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -37,10 +37,6 @@
#define KLT_ORDER_GAIN (LPC_GAIN_ORDER * SUBFRAMES)
#define KLT_ORDER_SHAPE (LPC_SHAPE_ORDER * SUBFRAMES)
/* indices of KLT coefficients used */
extern const WebRtc_UWord16 WebRtcIsac_kQKltSelIndGain[12];
extern const WebRtc_UWord16 WebRtcIsac_kQKltSelIndShape[108];
/* cdf array for model indicator */
extern const WebRtc_UWord16 WebRtcIsac_kQKltModelCdf[KLT_NUM_MODELS+1];
@ -62,53 +58,43 @@ extern const WebRtc_UWord16 WebRtcIsac_kQKltMaxIndGain[12];
extern const WebRtc_UWord16 WebRtcIsac_kQKltMaxIndShape[108];
/* index offset */
extern const WebRtc_UWord16 WebRtcIsac_kQKltOffsetGain[KLT_NUM_MODELS][12];
extern const WebRtc_UWord16 WebRtcIsac_kQKltOffsetGain[12];
extern const WebRtc_UWord16 WebRtcIsac_kQKltOffsetShape[KLT_NUM_MODELS][108];
extern const WebRtc_UWord16 WebRtcIsac_kQKltOffsetShape[108];
/* initial cdf index for KLT coefficients */
extern const WebRtc_UWord16 WebRtcIsac_kQKltInitIndexGain[KLT_NUM_MODELS][12];
extern const WebRtc_UWord16 WebRtcIsac_kQKltInitIndexGain[12];
extern const WebRtc_UWord16 WebRtcIsac_kQKltInitIndexShape[KLT_NUM_MODELS][108];
/* offsets for quantizer representation levels */
extern const WebRtc_UWord16 WebRtcIsac_kQKltOfLevelsGain[3];
extern const WebRtc_UWord16 WebRtcIsac_kQKltOfLevelsShape[3];
extern const WebRtc_UWord16 WebRtcIsac_kQKltInitIndexShape[108];
/* quantizer representation levels */
extern const double WebRtcIsac_kQKltLevelsGain[1176];
extern const double WebRtcIsac_kQKltLevelsGain[392];
extern const double WebRtcIsac_kQKltLevelsShape[1735];
extern const double WebRtcIsac_kQKltLevelsShape[578];
/* cdf tables for quantizer indices */
extern const WebRtc_UWord16 WebRtcIsac_kQKltCdfGain[1212];
extern const WebRtc_UWord16 WebRtcIsac_kQKltCdfGain[404];
extern const WebRtc_UWord16 WebRtcIsac_kQKltCdfShape[2059];
extern const WebRtc_UWord16 WebRtcIsac_kQKltCdfShape[686];
/* pointers to cdf tables for quantizer indices */
extern const WebRtc_UWord16 *WebRtcIsac_kQKltCdfPtrGain[KLT_NUM_MODELS][12];
extern const WebRtc_UWord16 *WebRtcIsac_kQKltCdfPtrGain[12];
extern const WebRtc_UWord16 *WebRtcIsac_kQKltCdfPtrShape[KLT_NUM_MODELS][108];
/* code length for all coefficients using different models */
extern const double WebRtcIsac_kQKltCodeLenGain[392];
extern const double WebRtcIsac_kQKltCodeLenShape[578];
extern const WebRtc_UWord16 *WebRtcIsac_kQKltCdfPtrShape[108];
/* left KLT transforms */
extern const double WebRtcIsac_kKltT1Gain[KLT_NUM_MODELS][4];
extern const double WebRtcIsac_kKltT1Gain[4];
extern const double WebRtcIsac_kKltT1Shape[KLT_NUM_MODELS][324];
extern const double WebRtcIsac_kKltT1Shape[324];
/* right KLT transforms */
extern const double WebRtcIsac_kKltT2Gain[KLT_NUM_MODELS][36];
extern const double WebRtcIsac_kKltT2Gain[36];
extern const double WebRtcIsac_kKltT2Shape[KLT_NUM_MODELS][36];
extern const double WebRtcIsac_kKltT2Shape[36];
/* means of log gains and LAR coefficients */
extern const double WebRtcIsac_kLpcMeansGain[KLT_NUM_MODELS][12];
extern const double WebRtcIsac_kLpcMeansGain[12];
extern const double WebRtcIsac_kLpcMeansShape[KLT_NUM_MODELS][108];
extern const double WebRtcIsac_kLpcMeansShape[108];
#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_TABLES_H_ */

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -30,15 +30,16 @@
/* do not modify the following; this will have to be modified if we have a 20ms framesize option */
/*************************************************************************************************/
/* do not modify the following; this will have to be modified if we
* have a 20ms framesize option */
/**********************************************************************/
/* miliseconds */
#define FRAMESIZE 30
/* number of samples per frame processed in the encoder, 480 */
#define FRAMESAMPLES 480 /* ((FRAMESIZE*FS)/1000) */
#define FRAMESAMPLES_HALF 240
#define FRAMESAMPLES_QUARTER 120
/*************************************************************************************************/
/**********************************************************************/
@ -56,7 +57,8 @@
#define UPDATE 80
/* length of half a subframe (low/high band) */
#define HALF_SUBFRAMELEN (UPDATE/2)
/* samples of look ahead (in a half-band, so actually half the samples of look ahead @ FS) */
/* samples of look ahead (in a half-band, so actually
* half the samples of look ahead @ FS) */
#define QLOOKAHEAD 24 /* 3 ms */
/* order of AR model in spectral entropy coder */
#define AR_ORDER 6
@ -95,9 +97,10 @@ enum ISACBand{isacLowerBand = 0, isacUpperBand = 1};
/* array size for byte stream in number of bytes. */
#define STREAM_SIZE_MAX 600 /* The old maximum size still needed for the decoding */
#define STREAM_SIZE_MAX_30 200 /* 200 bytes = 53.4 kbit/s @ 30 ms.framelength */
#define STREAM_SIZE_MAX_60 400 /* 400 bytes = 53.4 kbit/s @ 60 ms.framelength */
/* The old maximum size still needed for the decoding */
#define STREAM_SIZE_MAX 600
#define STREAM_SIZE_MAX_30 200 /* 200 bytes=53.4 kbps @ 30 ms.framelength */
#define STREAM_SIZE_MAX_60 400 /* 400 bytes=53.4 kbps @ 60 ms.framelength */
/* storage size for bit counts */
#define BIT_COUNTER_SIZE 30
@ -129,7 +132,8 @@ enum ISACBand{isacLowerBand = 0, isacUpperBand = 1};
#define PITCH_WLPCASYM 0.3 /* asymmetry parameter */
#define PITCH_WLPCBUFLEN PITCH_WLPCWINLEN
/* For pitch filter */
#define PITCH_BUFFSIZE (PITCH_MAX_LAG + 50) /* Extra 50 for fraction and LP filters */
/* Extra 50 for fraction and LP filters */
#define PITCH_BUFFSIZE (PITCH_MAX_LAG + 50)
#define PITCH_INTBUFFSIZE (PITCH_FRAME_LEN+PITCH_BUFFSIZE)
/* Max rel. step for interpolation */
#define PITCH_UPSTEP 1.5
@ -148,7 +152,8 @@ enum ISACBand{isacLowerBand = 0, isacUpperBand = 1};
#define HPORDER 2
/* some mathematical constants */
#define LOG2EXP 1.44269504088896 /* log2(exp) */
/* log2(exp) */
#define LOG2EXP 1.44269504088896
#define PI 3.14159265358979
/* Maximum number of iterations allowed to limit payload size */
@ -193,6 +198,7 @@ enum ISACBand{isacLowerBand = 0, isacUpperBand = 1};
#define ISAC_LENGTH_MISMATCH 6730
#define ISAC_RANGE_ERROR_DECODE_BANDWITH 6740
#define ISAC_DISALLOWED_BANDWIDTH_MODE_DECODER 6750
#define ISAC_DISALLOWED_LPC_MODEL 6760
/* 6800 Call setup formats */
#define ISAC_INCOMPATIBLE_FORMATS 6810

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -278,7 +278,6 @@ typedef struct {
int pitchIndex[PITCH_SUBFRAMES*2];
/* LPC */
int LPCmodel[2];
int LPCindex_s[108*2]; /* KLT_ORDER_SHAPE = 108 */
int LPCindex_g[12*2]; /* KLT_ORDER_GAIN = 12 */
double LPCcoeffs_lo[(ORDERLO+1)*SUBFRAMES*2];