Upconvert various types to int.
Per comments from HL/kwiberg on https://webrtc-codereview.appspot.com/42569004 , when there is existing usage of mixed types (int16_t, int, etc.), we'd prefer to standardize on larger types like int and phase out use of int16_t. Specifically, "Using int16 just because we're sure all reasonable values will fit in 16 bits isn't usually meaningful in C." This converts some existing uses of int16_t (and, in a few cases, other types such as uint16_t) to int (or, in a few places, int32_t). Other locations will be converted to size_t in a separate change. BUG=none R=andrew@webrtc.org, kwiberg@webrtc.org Review URL: https://webrtc-codereview.appspot.com/54629004 Cr-Commit-Position: refs/heads/master@{#9405}
This commit is contained in:
parent
54b0ca553f
commit
83ad33a8ae
@ -16,8 +16,8 @@ void WebRtcSpl_CrossCorrelationC(int32_t* cross_correlation,
|
|||||||
const int16_t* seq2,
|
const int16_t* seq2,
|
||||||
int16_t dim_seq,
|
int16_t dim_seq,
|
||||||
int16_t dim_cross_correlation,
|
int16_t dim_cross_correlation,
|
||||||
int16_t right_shifts,
|
int right_shifts,
|
||||||
int16_t step_seq2) {
|
int step_seq2) {
|
||||||
int i = 0, j = 0;
|
int i = 0, j = 0;
|
||||||
|
|
||||||
for (i = 0; i < dim_cross_correlation; i++) {
|
for (i = 0; i < dim_cross_correlation; i++) {
|
||||||
|
@ -15,8 +15,8 @@ void WebRtcSpl_CrossCorrelation_mips(int32_t* cross_correlation,
|
|||||||
const int16_t* seq2,
|
const int16_t* seq2,
|
||||||
int16_t dim_seq,
|
int16_t dim_seq,
|
||||||
int16_t dim_cross_correlation,
|
int16_t dim_cross_correlation,
|
||||||
int16_t right_shifts,
|
int right_shifts,
|
||||||
int16_t step_seq2) {
|
int step_seq2) {
|
||||||
|
|
||||||
int32_t t0 = 0, t1 = 0, t2 = 0, t3 = 0, sum = 0;
|
int32_t t0 = 0, t1 = 0, t2 = 0, t3 = 0, sum = 0;
|
||||||
int16_t *pseq2 = NULL;
|
int16_t *pseq2 = NULL;
|
||||||
|
@ -74,8 +74,8 @@ void WebRtcSpl_CrossCorrelationNeon(int32_t* cross_correlation,
|
|||||||
const int16_t* seq2,
|
const int16_t* seq2,
|
||||||
int16_t dim_seq,
|
int16_t dim_seq,
|
||||||
int16_t dim_cross_correlation,
|
int16_t dim_cross_correlation,
|
||||||
int16_t right_shifts,
|
int right_shifts,
|
||||||
int16_t step_seq2) {
|
int step_seq2) {
|
||||||
int i = 0;
|
int i = 0;
|
||||||
|
|
||||||
for (i = 0; i < dim_cross_correlation; i++) {
|
for (i = 0; i < dim_cross_correlation; i++) {
|
||||||
|
@ -542,24 +542,24 @@ typedef void (*CrossCorrelation)(int32_t* cross_correlation,
|
|||||||
const int16_t* seq2,
|
const int16_t* seq2,
|
||||||
int16_t dim_seq,
|
int16_t dim_seq,
|
||||||
int16_t dim_cross_correlation,
|
int16_t dim_cross_correlation,
|
||||||
int16_t right_shifts,
|
int right_shifts,
|
||||||
int16_t step_seq2);
|
int step_seq2);
|
||||||
extern CrossCorrelation WebRtcSpl_CrossCorrelation;
|
extern CrossCorrelation WebRtcSpl_CrossCorrelation;
|
||||||
void WebRtcSpl_CrossCorrelationC(int32_t* cross_correlation,
|
void WebRtcSpl_CrossCorrelationC(int32_t* cross_correlation,
|
||||||
const int16_t* seq1,
|
const int16_t* seq1,
|
||||||
const int16_t* seq2,
|
const int16_t* seq2,
|
||||||
int16_t dim_seq,
|
int16_t dim_seq,
|
||||||
int16_t dim_cross_correlation,
|
int16_t dim_cross_correlation,
|
||||||
int16_t right_shifts,
|
int right_shifts,
|
||||||
int16_t step_seq2);
|
int step_seq2);
|
||||||
#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
|
#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
|
||||||
void WebRtcSpl_CrossCorrelationNeon(int32_t* cross_correlation,
|
void WebRtcSpl_CrossCorrelationNeon(int32_t* cross_correlation,
|
||||||
const int16_t* seq1,
|
const int16_t* seq1,
|
||||||
const int16_t* seq2,
|
const int16_t* seq2,
|
||||||
int16_t dim_seq,
|
int16_t dim_seq,
|
||||||
int16_t dim_cross_correlation,
|
int16_t dim_cross_correlation,
|
||||||
int16_t right_shifts,
|
int right_shifts,
|
||||||
int16_t step_seq2);
|
int step_seq2);
|
||||||
#endif
|
#endif
|
||||||
#if defined(MIPS32_LE)
|
#if defined(MIPS32_LE)
|
||||||
void WebRtcSpl_CrossCorrelation_mips(int32_t* cross_correlation,
|
void WebRtcSpl_CrossCorrelation_mips(int32_t* cross_correlation,
|
||||||
@ -567,8 +567,8 @@ void WebRtcSpl_CrossCorrelation_mips(int32_t* cross_correlation,
|
|||||||
const int16_t* seq2,
|
const int16_t* seq2,
|
||||||
int16_t dim_seq,
|
int16_t dim_seq,
|
||||||
int16_t dim_cross_correlation,
|
int16_t dim_cross_correlation,
|
||||||
int16_t right_shifts,
|
int right_shifts,
|
||||||
int16_t step_seq2);
|
int step_seq2);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Creates (the first half of) a Hanning window. Size must be at least 1 and
|
// Creates (the first half of) a Hanning window. Size must be at least 1 and
|
||||||
|
@ -68,8 +68,8 @@ int16_t WebRtcCng_CreateDec(CNG_dec_inst** cng_inst);
|
|||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
|
|
||||||
int16_t WebRtcCng_InitEnc(CNG_enc_inst* cng_inst, uint16_t fs, int16_t interval,
|
int WebRtcCng_InitEnc(CNG_enc_inst* cng_inst, int fs, int16_t interval,
|
||||||
int16_t quality);
|
int16_t quality);
|
||||||
int16_t WebRtcCng_InitDec(CNG_dec_inst* cng_inst);
|
int16_t WebRtcCng_InitDec(CNG_dec_inst* cng_inst);
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
@ -103,9 +103,9 @@ int16_t WebRtcCng_FreeDec(CNG_dec_inst* cng_inst);
|
|||||||
* Return value : 0 - Ok
|
* Return value : 0 - Ok
|
||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
int16_t WebRtcCng_Encode(CNG_enc_inst* cng_inst, int16_t* speech,
|
int WebRtcCng_Encode(CNG_enc_inst* cng_inst, int16_t* speech,
|
||||||
int16_t nrOfSamples, uint8_t* SIDdata,
|
int16_t nrOfSamples, uint8_t* SIDdata,
|
||||||
int16_t* bytesOut, int16_t forceSID);
|
int16_t* bytesOut, int16_t forceSID);
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
* WebRtcCng_UpdateSid(...)
|
* WebRtcCng_UpdateSid(...)
|
||||||
|
@ -36,7 +36,7 @@ typedef struct WebRtcCngDecoder_ {
|
|||||||
|
|
||||||
typedef struct WebRtcCngEncoder_ {
|
typedef struct WebRtcCngEncoder_ {
|
||||||
int16_t enc_nrOfCoefs;
|
int16_t enc_nrOfCoefs;
|
||||||
uint16_t enc_sampfreq;
|
int enc_sampfreq;
|
||||||
int16_t enc_interval;
|
int16_t enc_interval;
|
||||||
int16_t enc_msSinceSID;
|
int16_t enc_msSinceSID;
|
||||||
int32_t enc_Energy;
|
int32_t enc_Energy;
|
||||||
@ -142,8 +142,8 @@ int16_t WebRtcCng_CreateDec(CNG_dec_inst** cng_inst) {
|
|||||||
* Return value : 0 - Ok
|
* Return value : 0 - Ok
|
||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
int16_t WebRtcCng_InitEnc(CNG_enc_inst* cng_inst, uint16_t fs, int16_t interval,
|
int WebRtcCng_InitEnc(CNG_enc_inst* cng_inst, int fs, int16_t interval,
|
||||||
int16_t quality) {
|
int16_t quality) {
|
||||||
int i;
|
int i;
|
||||||
WebRtcCngEncoder* inst = (WebRtcCngEncoder*) cng_inst;
|
WebRtcCngEncoder* inst = (WebRtcCngEncoder*) cng_inst;
|
||||||
memset(inst, 0, sizeof(WebRtcCngEncoder));
|
memset(inst, 0, sizeof(WebRtcCngEncoder));
|
||||||
@ -227,9 +227,9 @@ int16_t WebRtcCng_FreeDec(CNG_dec_inst* cng_inst) {
|
|||||||
* Return value : 0 - Ok
|
* Return value : 0 - Ok
|
||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
int16_t WebRtcCng_Encode(CNG_enc_inst* cng_inst, int16_t* speech,
|
int WebRtcCng_Encode(CNG_enc_inst* cng_inst, int16_t* speech,
|
||||||
int16_t nrOfSamples, uint8_t* SIDdata,
|
int16_t nrOfSamples, uint8_t* SIDdata,
|
||||||
int16_t* bytesOut, int16_t forceSID) {
|
int16_t* bytesOut, int16_t forceSID) {
|
||||||
WebRtcCngEncoder* inst = (WebRtcCngEncoder*) cng_inst;
|
WebRtcCngEncoder* inst = (WebRtcCngEncoder*) cng_inst;
|
||||||
|
|
||||||
int16_t arCoefs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
|
int16_t arCoefs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
|
||||||
@ -388,10 +388,12 @@ int16_t WebRtcCng_Encode(CNG_enc_inst* cng_inst, int16_t* speech,
|
|||||||
inst->enc_msSinceSID = 0;
|
inst->enc_msSinceSID = 0;
|
||||||
*bytesOut = inst->enc_nrOfCoefs + 1;
|
*bytesOut = inst->enc_nrOfCoefs + 1;
|
||||||
|
|
||||||
inst->enc_msSinceSID += (1000 * nrOfSamples) / inst->enc_sampfreq;
|
inst->enc_msSinceSID +=
|
||||||
|
(int16_t)((1000 * nrOfSamples) / inst->enc_sampfreq);
|
||||||
return inst->enc_nrOfCoefs + 1;
|
return inst->enc_nrOfCoefs + 1;
|
||||||
} else {
|
} else {
|
||||||
inst->enc_msSinceSID += (1000 * nrOfSamples) / inst->enc_sampfreq;
|
inst->enc_msSinceSID +=
|
||||||
|
(int16_t)((1000 * nrOfSamples) / inst->enc_sampfreq);
|
||||||
*bytesOut = 0;
|
*bytesOut = 0;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -39,7 +39,7 @@ int16_t WebRtcG722_EncoderInit(G722EncInst *G722enc_inst)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int16_t WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst)
|
int WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst)
|
||||||
{
|
{
|
||||||
// Free encoder memory
|
// Free encoder memory
|
||||||
return WebRtc_g722_encode_release((G722EncoderState*) G722enc_inst);
|
return WebRtc_g722_encode_release((G722EncoderState*) G722enc_inst);
|
||||||
@ -79,7 +79,7 @@ int16_t WebRtcG722_DecoderInit(G722DecInst *G722dec_inst)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int16_t WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst)
|
int WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst)
|
||||||
{
|
{
|
||||||
// Free encoder memory
|
// Free encoder memory
|
||||||
return WebRtc_g722_decode_release((G722DecoderState*) G722dec_inst);
|
return WebRtc_g722_decode_release((G722DecoderState*) G722dec_inst);
|
||||||
|
@ -73,7 +73,7 @@ int16_t WebRtcG722_EncoderInit(G722EncInst *G722enc_inst);
|
|||||||
* Return value : 0 - Ok
|
* Return value : 0 - Ok
|
||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
int16_t WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst);
|
int WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -142,7 +142,7 @@ int16_t WebRtcG722_DecoderInit(G722DecInst *G722dec_inst);
|
|||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
|
|
||||||
int16_t WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst);
|
int WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst);
|
||||||
|
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
|
@ -31,7 +31,7 @@ void WebRtcIlbcfix_AugmentedCbCorr(
|
|||||||
int16_t low, /* (i) Lag to start from (typically
|
int16_t low, /* (i) Lag to start from (typically
|
||||||
20) */
|
20) */
|
||||||
int16_t high, /* (i) Lag to end at (typically 39) */
|
int16_t high, /* (i) Lag to end at (typically 39) */
|
||||||
int16_t scale) /* (i) Scale factor to use for
|
int scale) /* (i) Scale factor to use for
|
||||||
the crossDot */
|
the crossDot */
|
||||||
{
|
{
|
||||||
int lagcount;
|
int lagcount;
|
||||||
|
@ -36,7 +36,6 @@ void WebRtcIlbcfix_AugmentedCbCorr(
|
|||||||
int16_t low, /* (i) Lag to start from (typically
|
int16_t low, /* (i) Lag to start from (typically
|
||||||
20) */
|
20) */
|
||||||
int16_t high, /* (i) Lag to end at (typically 39 */
|
int16_t high, /* (i) Lag to end at (typically 39 */
|
||||||
int16_t scale); /* (i) Scale factor to use for
|
int scale); /* (i) Scale factor to use for the crossDot */
|
||||||
the crossDot */
|
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -34,7 +34,7 @@ void WebRtcIlbcfix_CbMemEnergy(
|
|||||||
int16_t lTarget, /* (i) Length of the target vector */
|
int16_t lTarget, /* (i) Length of the target vector */
|
||||||
int16_t *energyW16, /* (o) Energy in the CB vectors */
|
int16_t *energyW16, /* (o) Energy in the CB vectors */
|
||||||
int16_t *energyShifts, /* (o) Shift value of the energy */
|
int16_t *energyShifts, /* (o) Shift value of the energy */
|
||||||
int16_t scale, /* (i) The scaling of all energy values */
|
int scale, /* (i) The scaling of all energy values */
|
||||||
int16_t base_size /* (i) Index to where the energy values should be stored */
|
int16_t base_size /* (i) Index to where the energy values should be stored */
|
||||||
) {
|
) {
|
||||||
int16_t *ppi, *ppo, *pp;
|
int16_t *ppi, *ppo, *pp;
|
||||||
|
@ -27,7 +27,7 @@ void WebRtcIlbcfix_CbMemEnergy(
|
|||||||
int16_t lTarget, /* (i) Length of the target vector */
|
int16_t lTarget, /* (i) Length of the target vector */
|
||||||
int16_t *energyW16, /* (o) Energy in the CB vectors */
|
int16_t *energyW16, /* (o) Energy in the CB vectors */
|
||||||
int16_t *energyShifts, /* (o) Shift value of the energy */
|
int16_t *energyShifts, /* (o) Shift value of the energy */
|
||||||
int16_t scale, /* (i) The scaling of all energy values */
|
int scale, /* (i) The scaling of all energy values */
|
||||||
int16_t base_size /* (i) Index to where the energy values should be stored */
|
int16_t base_size /* (i) Index to where the energy values should be stored */
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -22,7 +22,7 @@
|
|||||||
void WebRtcIlbcfix_CbMemEnergyAugmentation(
|
void WebRtcIlbcfix_CbMemEnergyAugmentation(
|
||||||
int16_t *interpSamples, /* (i) The interpolated samples */
|
int16_t *interpSamples, /* (i) The interpolated samples */
|
||||||
int16_t *CBmem, /* (i) The CB memory */
|
int16_t *CBmem, /* (i) The CB memory */
|
||||||
int16_t scale, /* (i) The scaling of all energy values */
|
int scale, /* (i) The scaling of all energy values */
|
||||||
int16_t base_size, /* (i) Index to where the energy values should be stored */
|
int16_t base_size, /* (i) Index to where the energy values should be stored */
|
||||||
int16_t *energyW16, /* (o) Energy in the CB vectors */
|
int16_t *energyW16, /* (o) Energy in the CB vectors */
|
||||||
int16_t *energyShifts /* (o) Shift value of the energy */
|
int16_t *energyShifts /* (o) Shift value of the energy */
|
||||||
|
@ -22,7 +22,7 @@
|
|||||||
void WebRtcIlbcfix_CbMemEnergyAugmentation(
|
void WebRtcIlbcfix_CbMemEnergyAugmentation(
|
||||||
int16_t *interpSamples, /* (i) The interpolated samples */
|
int16_t *interpSamples, /* (i) The interpolated samples */
|
||||||
int16_t *CBmem, /* (i) The CB memory */
|
int16_t *CBmem, /* (i) The CB memory */
|
||||||
int16_t scale, /* (i) The scaling of all energy values */
|
int scale, /* (i) The scaling of all energy values */
|
||||||
int16_t base_size, /* (i) Index to where the energy values should be stored */
|
int16_t base_size, /* (i) Index to where the energy values should be stored */
|
||||||
int16_t *energyW16, /* (o) Energy in the CB vectors */
|
int16_t *energyW16, /* (o) Energy in the CB vectors */
|
||||||
int16_t *energyShifts /* (o) Shift value of the energy */
|
int16_t *energyShifts /* (o) Shift value of the energy */
|
||||||
|
@ -28,7 +28,7 @@ void WebRtcIlbcfix_CbMemEnergyCalc(
|
|||||||
int16_t *ppo, /* (i) input pointer 2 */
|
int16_t *ppo, /* (i) input pointer 2 */
|
||||||
int16_t *energyW16, /* (o) Energy in the CB vectors */
|
int16_t *energyW16, /* (o) Energy in the CB vectors */
|
||||||
int16_t *energyShifts, /* (o) Shift value of the energy */
|
int16_t *energyShifts, /* (o) Shift value of the energy */
|
||||||
int16_t scale, /* (i) The scaling of all energy values */
|
int scale, /* (i) The scaling of all energy values */
|
||||||
int16_t base_size /* (i) Index to where the energy values should be stored */
|
int16_t base_size /* (i) Index to where the energy values should be stored */
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
|
@ -26,7 +26,7 @@ void WebRtcIlbcfix_CbMemEnergyCalc(
|
|||||||
int16_t *ppo, /* (i) input pointer 2 */
|
int16_t *ppo, /* (i) input pointer 2 */
|
||||||
int16_t *energyW16, /* (o) Energy in the CB vectors */
|
int16_t *energyW16, /* (o) Energy in the CB vectors */
|
||||||
int16_t *energyShifts, /* (o) Shift value of the energy */
|
int16_t *energyShifts, /* (o) Shift value of the energy */
|
||||||
int16_t scale, /* (i) The scaling of all energy values */
|
int scale, /* (i) The scaling of all energy values */
|
||||||
int16_t base_size /* (i) Index to where the energy values should be stored */
|
int16_t base_size /* (i) Index to where the energy values should be stored */
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -46,7 +46,9 @@ void WebRtcIlbcfix_CbSearch(
|
|||||||
int16_t block /* (i) the subblock number */
|
int16_t block /* (i) the subblock number */
|
||||||
) {
|
) {
|
||||||
int16_t i, j, stage, range;
|
int16_t i, j, stage, range;
|
||||||
int16_t *pp, scale, tmp;
|
int16_t *pp;
|
||||||
|
int16_t tmp;
|
||||||
|
int scale;
|
||||||
int16_t bits, temp1, temp2;
|
int16_t bits, temp1, temp2;
|
||||||
int16_t base_size;
|
int16_t base_size;
|
||||||
int32_t codedEner, targetEner;
|
int32_t codedEner, targetEner;
|
||||||
|
@ -121,8 +121,8 @@ int WebRtcIlbcfix_EnhancerInterface( /* (o) Estimated lag in end of in[] */
|
|||||||
shifts = WEBRTC_SPL_MAX(0, shifts);
|
shifts = WEBRTC_SPL_MAX(0, shifts);
|
||||||
|
|
||||||
/* compute cross correlation */
|
/* compute cross correlation */
|
||||||
WebRtcSpl_CrossCorrelation(corr32, target, regressor,
|
WebRtcSpl_CrossCorrelation(corr32, target, regressor, ENH_BLOCKL_HALF, 50,
|
||||||
ENH_BLOCKL_HALF, 50, (int16_t)shifts, -1);
|
shifts, -1);
|
||||||
|
|
||||||
/* Find 3 highest correlations that should be compared for the
|
/* Find 3 highest correlations that should be compared for the
|
||||||
highest (corr*corr)/ener */
|
highest (corr*corr)/ener */
|
||||||
@ -207,8 +207,8 @@ int WebRtcIlbcfix_EnhancerInterface( /* (o) Estimated lag in end of in[] */
|
|||||||
shifts=0;
|
shifts=0;
|
||||||
|
|
||||||
/* compute cross correlation */
|
/* compute cross correlation */
|
||||||
WebRtcSpl_CrossCorrelation(corr32, target, regressor,
|
WebRtcSpl_CrossCorrelation(corr32, target, regressor, plc_blockl, 3, shifts,
|
||||||
plc_blockl, 3, (int16_t)shifts, 1);
|
1);
|
||||||
|
|
||||||
/* find lag */
|
/* find lag */
|
||||||
lag=WebRtcSpl_MaxIndexW32(corr32, 3);
|
lag=WebRtcSpl_MaxIndexW32(corr32, 3);
|
||||||
|
@ -88,10 +88,10 @@ int16_t WebRtcIlbcfix_EncoderInit(IlbcEncoderInstance* iLBCenc_inst,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int16_t WebRtcIlbcfix_Encode(IlbcEncoderInstance* iLBCenc_inst,
|
int WebRtcIlbcfix_Encode(IlbcEncoderInstance* iLBCenc_inst,
|
||||||
const int16_t* speechIn,
|
const int16_t* speechIn,
|
||||||
int16_t len,
|
int16_t len,
|
||||||
uint8_t* encoded) {
|
uint8_t* encoded) {
|
||||||
int16_t pos = 0;
|
int16_t pos = 0;
|
||||||
int16_t encpos = 0;
|
int16_t encpos = 0;
|
||||||
|
|
||||||
@ -141,11 +141,11 @@ int16_t WebRtcIlbcfix_Decoderinit30Ms(IlbcDecoderInstance *iLBCdec_inst) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
int16_t WebRtcIlbcfix_Decode(IlbcDecoderInstance* iLBCdec_inst,
|
int WebRtcIlbcfix_Decode(IlbcDecoderInstance* iLBCdec_inst,
|
||||||
const uint8_t* encoded,
|
const uint8_t* encoded,
|
||||||
int16_t len,
|
int16_t len,
|
||||||
int16_t* decoded,
|
int16_t* decoded,
|
||||||
int16_t* speechType)
|
int16_t* speechType)
|
||||||
{
|
{
|
||||||
int i=0;
|
int i=0;
|
||||||
/* Allow for automatic switching between the frame sizes
|
/* Allow for automatic switching between the frame sizes
|
||||||
@ -194,11 +194,11 @@ int16_t WebRtcIlbcfix_Decode(IlbcDecoderInstance* iLBCdec_inst,
|
|||||||
return(i*((IlbcDecoder*)iLBCdec_inst)->blockl);
|
return(i*((IlbcDecoder*)iLBCdec_inst)->blockl);
|
||||||
}
|
}
|
||||||
|
|
||||||
int16_t WebRtcIlbcfix_Decode20Ms(IlbcDecoderInstance* iLBCdec_inst,
|
int WebRtcIlbcfix_Decode20Ms(IlbcDecoderInstance* iLBCdec_inst,
|
||||||
const uint8_t* encoded,
|
const uint8_t* encoded,
|
||||||
int16_t len,
|
int16_t len,
|
||||||
int16_t* decoded,
|
int16_t* decoded,
|
||||||
int16_t* speechType)
|
int16_t* speechType)
|
||||||
{
|
{
|
||||||
int i=0;
|
int i=0;
|
||||||
if ((len==((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)||
|
if ((len==((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)||
|
||||||
@ -222,11 +222,11 @@ int16_t WebRtcIlbcfix_Decode20Ms(IlbcDecoderInstance* iLBCdec_inst,
|
|||||||
return(i*((IlbcDecoder*)iLBCdec_inst)->blockl);
|
return(i*((IlbcDecoder*)iLBCdec_inst)->blockl);
|
||||||
}
|
}
|
||||||
|
|
||||||
int16_t WebRtcIlbcfix_Decode30Ms(IlbcDecoderInstance* iLBCdec_inst,
|
int WebRtcIlbcfix_Decode30Ms(IlbcDecoderInstance* iLBCdec_inst,
|
||||||
const uint8_t* encoded,
|
const uint8_t* encoded,
|
||||||
int16_t len,
|
int16_t len,
|
||||||
int16_t* decoded,
|
int16_t* decoded,
|
||||||
int16_t* speechType)
|
int16_t* speechType)
|
||||||
{
|
{
|
||||||
int i=0;
|
int i=0;
|
||||||
if ((len==((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)||
|
if ((len==((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)||
|
||||||
|
@ -23,7 +23,7 @@
|
|||||||
* Initiation of decoder instance.
|
* Initiation of decoder instance.
|
||||||
*---------------------------------------------------------------*/
|
*---------------------------------------------------------------*/
|
||||||
|
|
||||||
int16_t WebRtcIlbcfix_InitDecode( /* (o) Number of decoded samples */
|
int WebRtcIlbcfix_InitDecode( /* (o) Number of decoded samples */
|
||||||
IlbcDecoder *iLBCdec_inst, /* (i/o) Decoder instance */
|
IlbcDecoder *iLBCdec_inst, /* (i/o) Decoder instance */
|
||||||
int16_t mode, /* (i) frame size mode */
|
int16_t mode, /* (i) frame size mode */
|
||||||
int use_enhancer) { /* (i) 1: use enhancer, 0: no enhancer */
|
int use_enhancer) { /* (i) 1: use enhancer, 0: no enhancer */
|
||||||
|
@ -25,7 +25,7 @@
|
|||||||
* Initiation of decoder instance.
|
* Initiation of decoder instance.
|
||||||
*---------------------------------------------------------------*/
|
*---------------------------------------------------------------*/
|
||||||
|
|
||||||
int16_t WebRtcIlbcfix_InitDecode( /* (o) Number of decoded samples */
|
int WebRtcIlbcfix_InitDecode( /* (o) Number of decoded samples */
|
||||||
IlbcDecoder *iLBCdec_inst, /* (i/o) Decoder instance */
|
IlbcDecoder *iLBCdec_inst, /* (i/o) Decoder instance */
|
||||||
int16_t mode, /* (i) frame size mode */
|
int16_t mode, /* (i) frame size mode */
|
||||||
int use_enhancer /* (i) 1 to use enhancer
|
int use_enhancer /* (i) 1 to use enhancer
|
||||||
|
@ -23,7 +23,7 @@
|
|||||||
* Initiation of encoder instance.
|
* Initiation of encoder instance.
|
||||||
*---------------------------------------------------------------*/
|
*---------------------------------------------------------------*/
|
||||||
|
|
||||||
int16_t WebRtcIlbcfix_InitEncode( /* (o) Number of bytes encoded */
|
int WebRtcIlbcfix_InitEncode( /* (o) Number of bytes encoded */
|
||||||
IlbcEncoder *iLBCenc_inst, /* (i/o) Encoder instance */
|
IlbcEncoder *iLBCenc_inst, /* (i/o) Encoder instance */
|
||||||
int16_t mode) { /* (i) frame size mode */
|
int16_t mode) { /* (i) frame size mode */
|
||||||
iLBCenc_inst->mode = mode;
|
iLBCenc_inst->mode = mode;
|
||||||
|
@ -25,7 +25,7 @@
|
|||||||
* Initiation of encoder instance.
|
* Initiation of encoder instance.
|
||||||
*---------------------------------------------------------------*/
|
*---------------------------------------------------------------*/
|
||||||
|
|
||||||
int16_t WebRtcIlbcfix_InitEncode( /* (o) Number of bytes encoded */
|
int WebRtcIlbcfix_InitEncode( /* (o) Number of bytes encoded */
|
||||||
IlbcEncoder *iLBCenc_inst, /* (i/o) Encoder instance */
|
IlbcEncoder *iLBCenc_inst, /* (i/o) Encoder instance */
|
||||||
int16_t mode /* (i) frame size mode */
|
int16_t mode /* (i) frame size mode */
|
||||||
);
|
);
|
||||||
|
@ -135,10 +135,10 @@ extern "C" {
|
|||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
|
|
||||||
int16_t WebRtcIlbcfix_Encode(IlbcEncoderInstance *iLBCenc_inst,
|
int WebRtcIlbcfix_Encode(IlbcEncoderInstance *iLBCenc_inst,
|
||||||
const int16_t *speechIn,
|
const int16_t *speechIn,
|
||||||
int16_t len,
|
int16_t len,
|
||||||
uint8_t* encoded);
|
uint8_t* encoded);
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
* WebRtcIlbcfix_DecoderInit(...)
|
* WebRtcIlbcfix_DecoderInit(...)
|
||||||
@ -180,21 +180,21 @@ extern "C" {
|
|||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
|
|
||||||
int16_t WebRtcIlbcfix_Decode(IlbcDecoderInstance* iLBCdec_inst,
|
int WebRtcIlbcfix_Decode(IlbcDecoderInstance* iLBCdec_inst,
|
||||||
|
const uint8_t* encoded,
|
||||||
|
int16_t len,
|
||||||
|
int16_t* decoded,
|
||||||
|
int16_t* speechType);
|
||||||
|
int WebRtcIlbcfix_Decode20Ms(IlbcDecoderInstance* iLBCdec_inst,
|
||||||
|
const uint8_t* encoded,
|
||||||
|
int16_t len,
|
||||||
|
int16_t* decoded,
|
||||||
|
int16_t* speechType);
|
||||||
|
int WebRtcIlbcfix_Decode30Ms(IlbcDecoderInstance* iLBCdec_inst,
|
||||||
const uint8_t* encoded,
|
const uint8_t* encoded,
|
||||||
int16_t len,
|
int16_t len,
|
||||||
int16_t* decoded,
|
int16_t* decoded,
|
||||||
int16_t* speechType);
|
int16_t* speechType);
|
||||||
int16_t WebRtcIlbcfix_Decode20Ms(IlbcDecoderInstance* iLBCdec_inst,
|
|
||||||
const uint8_t* encoded,
|
|
||||||
int16_t len,
|
|
||||||
int16_t* decoded,
|
|
||||||
int16_t* speechType);
|
|
||||||
int16_t WebRtcIlbcfix_Decode30Ms(IlbcDecoderInstance* iLBCdec_inst,
|
|
||||||
const uint8_t* encoded,
|
|
||||||
int16_t len,
|
|
||||||
int16_t* decoded,
|
|
||||||
int16_t* speechType);
|
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
* WebRtcIlbcfix_DecodePlc(...)
|
* WebRtcIlbcfix_DecodePlc(...)
|
||||||
|
@ -29,7 +29,8 @@ void WebRtcIlbcfix_MyCorr(
|
|||||||
const int16_t *seq2, /* (i) second sequence */
|
const int16_t *seq2, /* (i) second sequence */
|
||||||
int16_t dim2 /* (i) dimension seq2 */
|
int16_t dim2 /* (i) dimension seq2 */
|
||||||
){
|
){
|
||||||
int16_t max, scale, loops;
|
int16_t max, loops;
|
||||||
|
int scale;
|
||||||
|
|
||||||
/* Calculate correlation between the two sequences. Scale the
|
/* Calculate correlation between the two sequences. Scale the
|
||||||
result of the multiplcication to maximum 26 bits in order
|
result of the multiplcication to maximum 26 bits in order
|
||||||
@ -37,7 +38,7 @@ void WebRtcIlbcfix_MyCorr(
|
|||||||
max=WebRtcSpl_MaxAbsValueW16(seq1, dim1);
|
max=WebRtcSpl_MaxAbsValueW16(seq1, dim1);
|
||||||
scale=WebRtcSpl_GetSizeInBits(max);
|
scale=WebRtcSpl_GetSizeInBits(max);
|
||||||
|
|
||||||
scale = (int16_t)(2 * scale - 26);
|
scale = 2 * scale - 26;
|
||||||
if (scale<0) {
|
if (scale<0) {
|
||||||
scale=0;
|
scale=0;
|
||||||
}
|
}
|
||||||
|
@ -41,7 +41,8 @@ int main(int argc, char* argv[])
|
|||||||
{
|
{
|
||||||
FILE *ifileid,*efileid,*ofileid, *chfileid;
|
FILE *ifileid,*efileid,*ofileid, *chfileid;
|
||||||
short encoded_data[55], data[240], speechType;
|
short encoded_data[55], data[240], speechType;
|
||||||
short len, mode, pli;
|
int len;
|
||||||
|
short mode, pli;
|
||||||
int blockcount = 0;
|
int blockcount = 0;
|
||||||
|
|
||||||
IlbcEncoderInstance *Enc_Inst;
|
IlbcEncoderInstance *Enc_Inst;
|
||||||
@ -173,7 +174,8 @@ int main(int argc, char* argv[])
|
|||||||
/* decoding */
|
/* decoding */
|
||||||
fprintf(stderr, "--- Decoding block %i --- ",blockcount);
|
fprintf(stderr, "--- Decoding block %i --- ",blockcount);
|
||||||
if (pli==1) {
|
if (pli==1) {
|
||||||
len=WebRtcIlbcfix_Decode(Dec_Inst, encoded_data, len, data, &speechType);
|
len=WebRtcIlbcfix_Decode(Dec_Inst, encoded_data, (int16_t)len, data,
|
||||||
|
&speechType);
|
||||||
} else {
|
} else {
|
||||||
len=WebRtcIlbcfix_DecodePlc(Dec_Inst, data, 1);
|
len=WebRtcIlbcfix_DecodePlc(Dec_Inst, data, 1);
|
||||||
}
|
}
|
||||||
|
@ -184,7 +184,7 @@ int AudioEncoderDecoderIsacT<T>::DecodeInternal(const uint8_t* encoded,
|
|||||||
decoder_sample_rate_hz_ = sample_rate_hz;
|
decoder_sample_rate_hz_ = sample_rate_hz;
|
||||||
}
|
}
|
||||||
int16_t temp_type = 1; // Default is speech.
|
int16_t temp_type = 1; // Default is speech.
|
||||||
int16_t ret =
|
int ret =
|
||||||
T::DecodeInternal(isac_state_, encoded, static_cast<int16_t>(encoded_len),
|
T::DecodeInternal(isac_state_, encoded, static_cast<int16_t>(encoded_len),
|
||||||
decoded, &temp_type);
|
decoded, &temp_type);
|
||||||
*speech_type = ConvertSpeechType(temp_type);
|
*speech_type = ConvertSpeechType(temp_type);
|
||||||
|
@ -25,12 +25,12 @@ struct IsacFix {
|
|||||||
static const uint16_t kFixSampleRate = 16000;
|
static const uint16_t kFixSampleRate = 16000;
|
||||||
static inline int16_t Control(instance_type* inst,
|
static inline int16_t Control(instance_type* inst,
|
||||||
int32_t rate,
|
int32_t rate,
|
||||||
int16_t framesize) {
|
int framesize) {
|
||||||
return WebRtcIsacfix_Control(inst, rate, framesize);
|
return WebRtcIsacfix_Control(inst, rate, framesize);
|
||||||
}
|
}
|
||||||
static inline int16_t ControlBwe(instance_type* inst,
|
static inline int16_t ControlBwe(instance_type* inst,
|
||||||
int32_t rate_bps,
|
int32_t rate_bps,
|
||||||
int16_t frame_size_ms,
|
int frame_size_ms,
|
||||||
int16_t enforce_frame_size) {
|
int16_t enforce_frame_size) {
|
||||||
return WebRtcIsacfix_ControlBwe(inst, rate_bps, frame_size_ms,
|
return WebRtcIsacfix_ControlBwe(inst, rate_bps, frame_size_ms,
|
||||||
enforce_frame_size);
|
enforce_frame_size);
|
||||||
@ -38,11 +38,11 @@ struct IsacFix {
|
|||||||
static inline int16_t Create(instance_type** inst) {
|
static inline int16_t Create(instance_type** inst) {
|
||||||
return WebRtcIsacfix_Create(inst);
|
return WebRtcIsacfix_Create(inst);
|
||||||
}
|
}
|
||||||
static inline int16_t DecodeInternal(instance_type* inst,
|
static inline int DecodeInternal(instance_type* inst,
|
||||||
const uint8_t* encoded,
|
const uint8_t* encoded,
|
||||||
int16_t len,
|
int16_t len,
|
||||||
int16_t* decoded,
|
int16_t* decoded,
|
||||||
int16_t* speech_type) {
|
int16_t* speech_type) {
|
||||||
return WebRtcIsacfix_Decode(inst, encoded, len, decoded, speech_type);
|
return WebRtcIsacfix_Decode(inst, encoded, len, decoded, speech_type);
|
||||||
}
|
}
|
||||||
static inline int16_t DecodePlc(instance_type* inst,
|
static inline int16_t DecodePlc(instance_type* inst,
|
||||||
@ -53,9 +53,9 @@ struct IsacFix {
|
|||||||
static inline int16_t DecoderInit(instance_type* inst) {
|
static inline int16_t DecoderInit(instance_type* inst) {
|
||||||
return WebRtcIsacfix_DecoderInit(inst);
|
return WebRtcIsacfix_DecoderInit(inst);
|
||||||
}
|
}
|
||||||
static inline int16_t Encode(instance_type* inst,
|
static inline int Encode(instance_type* inst,
|
||||||
const int16_t* speech_in,
|
const int16_t* speech_in,
|
||||||
uint8_t* encoded) {
|
uint8_t* encoded) {
|
||||||
return WebRtcIsacfix_Encode(inst, speech_in, encoded);
|
return WebRtcIsacfix_Encode(inst, speech_in, encoded);
|
||||||
}
|
}
|
||||||
static inline int16_t EncoderInit(instance_type* inst, int16_t coding_mode) {
|
static inline int16_t EncoderInit(instance_type* inst, int16_t coding_mode) {
|
||||||
|
@ -128,9 +128,9 @@ extern "C" {
|
|||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
|
|
||||||
int16_t WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst,
|
int WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst,
|
||||||
const int16_t *speechIn,
|
const int16_t *speechIn,
|
||||||
uint8_t* encoded);
|
uint8_t* encoded);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -251,11 +251,11 @@ extern "C" {
|
|||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
|
|
||||||
int16_t WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst,
|
int WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst,
|
||||||
const uint8_t* encoded,
|
const uint8_t* encoded,
|
||||||
int16_t len,
|
int16_t len,
|
||||||
int16_t *decoded,
|
int16_t *decoded,
|
||||||
int16_t *speechType);
|
int16_t *speechType);
|
||||||
|
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
@ -280,11 +280,11 @@ extern "C" {
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
|
#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
|
||||||
int16_t WebRtcIsacfix_DecodeNb(ISACFIX_MainStruct *ISAC_main_inst,
|
int WebRtcIsacfix_DecodeNb(ISACFIX_MainStruct *ISAC_main_inst,
|
||||||
const uint16_t *encoded,
|
const uint16_t *encoded,
|
||||||
int16_t len,
|
int16_t len,
|
||||||
int16_t *decoded,
|
int16_t *decoded,
|
||||||
int16_t *speechType);
|
int16_t *speechType);
|
||||||
#endif // WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
|
#endif // WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
|
||||||
|
|
||||||
|
|
||||||
@ -378,8 +378,8 @@ extern "C" {
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
int16_t WebRtcIsacfix_Control(ISACFIX_MainStruct *ISAC_main_inst,
|
int16_t WebRtcIsacfix_Control(ISACFIX_MainStruct *ISAC_main_inst,
|
||||||
int16_t rate,
|
int16_t rate,
|
||||||
int16_t framesize);
|
int framesize);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -407,7 +407,7 @@ extern "C" {
|
|||||||
|
|
||||||
int16_t WebRtcIsacfix_ControlBwe(ISACFIX_MainStruct *ISAC_main_inst,
|
int16_t WebRtcIsacfix_ControlBwe(ISACFIX_MainStruct *ISAC_main_inst,
|
||||||
int16_t rateBPS,
|
int16_t rateBPS,
|
||||||
int16_t frameSizeMs,
|
int frameSizeMs,
|
||||||
int16_t enforceFrameSize);
|
int16_t enforceFrameSize);
|
||||||
|
|
||||||
|
|
||||||
|
@ -226,10 +226,10 @@ int WebRtcIsacfix_EncLogisticMulti2(Bitstr_enc *streamData,
|
|||||||
* Return value : number of bytes in the stream so far
|
* Return value : number of bytes in the stream so far
|
||||||
* -1 if error detected
|
* -1 if error detected
|
||||||
*/
|
*/
|
||||||
int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
|
int WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
|
||||||
Bitstr_dec *streamData,
|
Bitstr_dec *streamData,
|
||||||
const int32_t *envQ8,
|
const int32_t *envQ8,
|
||||||
const int16_t lenData)
|
const int16_t lenData)
|
||||||
{
|
{
|
||||||
uint32_t W_lower;
|
uint32_t W_lower;
|
||||||
uint32_t W_upper;
|
uint32_t W_upper;
|
||||||
|
@ -74,7 +74,7 @@ int16_t WebRtcIsacfix_EncTerminate(Bitstr_enc *streamData);
|
|||||||
* Return value : number of bytes in the stream so far
|
* Return value : number of bytes in the stream so far
|
||||||
* <0 if error detected
|
* <0 if error detected
|
||||||
*/
|
*/
|
||||||
int16_t WebRtcIsacfix_DecLogisticMulti2(
|
int WebRtcIsacfix_DecLogisticMulti2(
|
||||||
int16_t *data,
|
int16_t *data,
|
||||||
Bitstr_dec *streamData,
|
Bitstr_dec *streamData,
|
||||||
const int32_t *env,
|
const int32_t *env,
|
||||||
|
@ -32,9 +32,9 @@ int WebRtcIsacfix_EstimateBandwidth(BwEstimatorstr* bwest_str,
|
|||||||
uint32_t send_ts,
|
uint32_t send_ts,
|
||||||
uint32_t arr_ts);
|
uint32_t arr_ts);
|
||||||
|
|
||||||
int16_t WebRtcIsacfix_DecodeImpl(int16_t* signal_out16,
|
int WebRtcIsacfix_DecodeImpl(int16_t* signal_out16,
|
||||||
IsacFixDecoderInstance* ISACdec_obj,
|
IsacFixDecoderInstance* ISACdec_obj,
|
||||||
int16_t* current_framesamples);
|
int16_t* current_framesamples);
|
||||||
|
|
||||||
int16_t WebRtcIsacfix_DecodePlcImpl(int16_t* decoded,
|
int16_t WebRtcIsacfix_DecodePlcImpl(int16_t* decoded,
|
||||||
IsacFixDecoderInstance* ISACdec_obj,
|
IsacFixDecoderInstance* ISACdec_obj,
|
||||||
|
@ -27,14 +27,14 @@
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
int16_t WebRtcIsacfix_DecodeImpl(int16_t *signal_out16,
|
int WebRtcIsacfix_DecodeImpl(int16_t *signal_out16,
|
||||||
IsacFixDecoderInstance *ISACdec_obj,
|
IsacFixDecoderInstance *ISACdec_obj,
|
||||||
int16_t *current_framesamples)
|
int16_t *current_framesamples)
|
||||||
{
|
{
|
||||||
int k;
|
int k;
|
||||||
int err;
|
int err;
|
||||||
int16_t BWno;
|
int16_t BWno;
|
||||||
int16_t len = 0;
|
int len = 0;
|
||||||
|
|
||||||
int16_t model;
|
int16_t model;
|
||||||
|
|
||||||
|
@ -450,10 +450,10 @@ static void GenerateDitherQ7(int16_t *bufQ7,
|
|||||||
* function to decode the complex spectrum from the bitstream
|
* function to decode the complex spectrum from the bitstream
|
||||||
* returns the total number of bytes in the stream
|
* returns the total number of bytes in the stream
|
||||||
*/
|
*/
|
||||||
int16_t WebRtcIsacfix_DecodeSpec(Bitstr_dec *streamdata,
|
int WebRtcIsacfix_DecodeSpec(Bitstr_dec *streamdata,
|
||||||
int16_t *frQ7,
|
int16_t *frQ7,
|
||||||
int16_t *fiQ7,
|
int16_t *fiQ7,
|
||||||
int16_t AvgPitchGain_Q12)
|
int16_t AvgPitchGain_Q12)
|
||||||
{
|
{
|
||||||
int16_t data[FRAMESAMPLES];
|
int16_t data[FRAMESAMPLES];
|
||||||
int32_t invARSpec2_Q16[FRAMESAMPLES/4];
|
int32_t invARSpec2_Q16[FRAMESAMPLES/4];
|
||||||
@ -461,7 +461,7 @@ int16_t WebRtcIsacfix_DecodeSpec(Bitstr_dec *streamdata,
|
|||||||
int16_t RCQ15[AR_ORDER];
|
int16_t RCQ15[AR_ORDER];
|
||||||
int16_t gainQ10;
|
int16_t gainQ10;
|
||||||
int32_t gain2_Q10;
|
int32_t gain2_Q10;
|
||||||
int16_t len;
|
int len;
|
||||||
int k;
|
int k;
|
||||||
|
|
||||||
/* create dither signal */
|
/* create dither signal */
|
||||||
|
@ -22,10 +22,10 @@
|
|||||||
#include "structs.h"
|
#include "structs.h"
|
||||||
|
|
||||||
/* decode complex spectrum (return number of bytes in stream) */
|
/* decode complex spectrum (return number of bytes in stream) */
|
||||||
int16_t WebRtcIsacfix_DecodeSpec(Bitstr_dec *streamdata,
|
int WebRtcIsacfix_DecodeSpec(Bitstr_dec *streamdata,
|
||||||
int16_t *frQ7,
|
int16_t *frQ7,
|
||||||
int16_t *fiQ7,
|
int16_t *fiQ7,
|
||||||
int16_t AvgPitchGain_Q12);
|
int16_t AvgPitchGain_Q12);
|
||||||
|
|
||||||
/* encode complex spectrum */
|
/* encode complex spectrum */
|
||||||
int WebRtcIsacfix_EncodeSpec(const int16_t *fr,
|
int WebRtcIsacfix_EncodeSpec(const int16_t *fr,
|
||||||
|
@ -399,12 +399,12 @@ static void write_be16(const uint16_t* src, size_t nbytes, uint8_t* dest) {
|
|||||||
* : -1 - Error
|
* : -1 - Error
|
||||||
*/
|
*/
|
||||||
|
|
||||||
int16_t WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst,
|
int WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst,
|
||||||
const int16_t *speechIn,
|
const int16_t *speechIn,
|
||||||
uint8_t* encoded)
|
uint8_t* encoded)
|
||||||
{
|
{
|
||||||
ISACFIX_SubStruct *ISAC_inst;
|
ISACFIX_SubStruct *ISAC_inst;
|
||||||
int16_t stream_len;
|
int stream_len;
|
||||||
|
|
||||||
/* typecast pointer to rela structure */
|
/* typecast pointer to rela structure */
|
||||||
ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
|
ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
|
||||||
@ -421,7 +421,7 @@ int16_t WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst,
|
|||||||
&ISAC_inst->bwestimator_obj,
|
&ISAC_inst->bwestimator_obj,
|
||||||
ISAC_inst->CodingMode);
|
ISAC_inst->CodingMode);
|
||||||
if (stream_len<0) {
|
if (stream_len<0) {
|
||||||
ISAC_inst->errorcode = - stream_len;
|
ISAC_inst->errorcode = -(int16_t)stream_len;
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -766,17 +766,17 @@ int16_t WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst,
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
||||||
int16_t WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst,
|
int WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst,
|
||||||
const uint8_t* encoded,
|
const uint8_t* encoded,
|
||||||
int16_t len,
|
int16_t len,
|
||||||
int16_t *decoded,
|
int16_t *decoded,
|
||||||
int16_t *speechType)
|
int16_t *speechType)
|
||||||
{
|
{
|
||||||
ISACFIX_SubStruct *ISAC_inst;
|
ISACFIX_SubStruct *ISAC_inst;
|
||||||
/* number of samples (480 or 960), output from decoder */
|
/* number of samples (480 or 960), output from decoder */
|
||||||
/* that were actually used in the encoder/decoder (determined on the fly) */
|
/* that were actually used in the encoder/decoder (determined on the fly) */
|
||||||
int16_t number_of_samples;
|
int16_t number_of_samples;
|
||||||
int16_t declen = 0;
|
int declen = 0;
|
||||||
|
|
||||||
/* typecast pointer to real structure */
|
/* typecast pointer to real structure */
|
||||||
ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
|
ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
|
||||||
@ -809,7 +809,7 @@ int16_t WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst,
|
|||||||
|
|
||||||
if (declen < 0) {
|
if (declen < 0) {
|
||||||
/* Some error inside the decoder */
|
/* Some error inside the decoder */
|
||||||
ISAC_inst->errorcode = -declen;
|
ISAC_inst->errorcode = -(int16_t)declen;
|
||||||
memset(decoded, 0, sizeof(int16_t) * MAX_FRAMESAMPLES);
|
memset(decoded, 0, sizeof(int16_t) * MAX_FRAMESAMPLES);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
@ -859,17 +859,17 @@ int16_t WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst,
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
|
#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
|
||||||
int16_t WebRtcIsacfix_DecodeNb(ISACFIX_MainStruct *ISAC_main_inst,
|
int WebRtcIsacfix_DecodeNb(ISACFIX_MainStruct *ISAC_main_inst,
|
||||||
const uint16_t *encoded,
|
const uint16_t *encoded,
|
||||||
int16_t len,
|
int16_t len,
|
||||||
int16_t *decoded,
|
int16_t *decoded,
|
||||||
int16_t *speechType)
|
int16_t *speechType)
|
||||||
{
|
{
|
||||||
ISACFIX_SubStruct *ISAC_inst;
|
ISACFIX_SubStruct *ISAC_inst;
|
||||||
/* twice the number of samples (480 or 960), output from decoder */
|
/* twice the number of samples (480 or 960), output from decoder */
|
||||||
/* that were actually used in the encoder/decoder (determined on the fly) */
|
/* that were actually used in the encoder/decoder (determined on the fly) */
|
||||||
int16_t number_of_samples;
|
int16_t number_of_samples;
|
||||||
int16_t declen = 0;
|
int declen = 0;
|
||||||
int16_t dummy[FRAMESAMPLES/2];
|
int16_t dummy[FRAMESAMPLES/2];
|
||||||
|
|
||||||
|
|
||||||
@ -903,7 +903,7 @@ int16_t WebRtcIsacfix_DecodeNb(ISACFIX_MainStruct *ISAC_main_inst,
|
|||||||
|
|
||||||
if (declen < 0) {
|
if (declen < 0) {
|
||||||
/* Some error inside the decoder */
|
/* Some error inside the decoder */
|
||||||
ISAC_inst->errorcode = -declen;
|
ISAC_inst->errorcode = -(int16_t)declen;
|
||||||
memset(decoded, 0, sizeof(int16_t) * FRAMESAMPLES);
|
memset(decoded, 0, sizeof(int16_t) * FRAMESAMPLES);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
@ -1076,8 +1076,8 @@ int16_t WebRtcIsacfix_DecodePlc(ISACFIX_MainStruct *ISAC_main_inst,
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
int16_t WebRtcIsacfix_Control(ISACFIX_MainStruct *ISAC_main_inst,
|
int16_t WebRtcIsacfix_Control(ISACFIX_MainStruct *ISAC_main_inst,
|
||||||
int16_t rate,
|
int16_t rate,
|
||||||
int16_t framesize)
|
int framesize)
|
||||||
{
|
{
|
||||||
ISACFIX_SubStruct *ISAC_inst;
|
ISACFIX_SubStruct *ISAC_inst;
|
||||||
/* typecast pointer to real structure */
|
/* typecast pointer to real structure */
|
||||||
@ -1101,7 +1101,7 @@ int16_t WebRtcIsacfix_Control(ISACFIX_MainStruct *ISAC_main_inst,
|
|||||||
|
|
||||||
|
|
||||||
if (framesize == 30 || framesize == 60)
|
if (framesize == 30 || framesize == 60)
|
||||||
ISAC_inst->ISACenc_obj.new_framelength = (FS/1000) * framesize;
|
ISAC_inst->ISACenc_obj.new_framelength = (int16_t)((FS/1000) * framesize);
|
||||||
else {
|
else {
|
||||||
ISAC_inst->errorcode = ISAC_DISALLOWED_FRAME_LENGTH;
|
ISAC_inst->errorcode = ISAC_DISALLOWED_FRAME_LENGTH;
|
||||||
return -1;
|
return -1;
|
||||||
@ -1136,7 +1136,7 @@ int16_t WebRtcIsacfix_Control(ISACFIX_MainStruct *ISAC_main_inst,
|
|||||||
|
|
||||||
int16_t WebRtcIsacfix_ControlBwe(ISACFIX_MainStruct *ISAC_main_inst,
|
int16_t WebRtcIsacfix_ControlBwe(ISACFIX_MainStruct *ISAC_main_inst,
|
||||||
int16_t rateBPS,
|
int16_t rateBPS,
|
||||||
int16_t frameSizeMs,
|
int frameSizeMs,
|
||||||
int16_t enforceFrameSize)
|
int16_t enforceFrameSize)
|
||||||
{
|
{
|
||||||
ISACFIX_SubStruct *ISAC_inst;
|
ISACFIX_SubStruct *ISAC_inst;
|
||||||
@ -1170,7 +1170,7 @@ int16_t WebRtcIsacfix_ControlBwe(ISACFIX_MainStruct *ISAC_main_inst,
|
|||||||
|
|
||||||
/* Set initial framesize. If enforceFrameSize is set the frame size will not change */
|
/* Set initial framesize. If enforceFrameSize is set the frame size will not change */
|
||||||
if ((frameSizeMs == 30) || (frameSizeMs == 60)) {
|
if ((frameSizeMs == 30) || (frameSizeMs == 60)) {
|
||||||
ISAC_inst->ISACenc_obj.new_framelength = (FS/1000) * frameSizeMs;
|
ISAC_inst->ISACenc_obj.new_framelength = (int16_t)((FS/1000) * frameSizeMs);
|
||||||
} else {
|
} else {
|
||||||
ISAC_inst->errorcode = ISAC_DISALLOWED_FRAME_LENGTH;
|
ISAC_inst->errorcode = ISAC_DISALLOWED_FRAME_LENGTH;
|
||||||
return -1;
|
return -1;
|
||||||
|
@ -101,14 +101,15 @@ int main(int argc, char* argv[])
|
|||||||
int i, errtype, h = 0, k, packetLossPercent = 0;
|
int i, errtype, h = 0, k, packetLossPercent = 0;
|
||||||
int16_t CodingMode;
|
int16_t CodingMode;
|
||||||
int16_t bottleneck;
|
int16_t bottleneck;
|
||||||
int16_t framesize = 30; /* ms */
|
int framesize = 30; /* ms */
|
||||||
int cur_framesmpls, err = 0, lostPackets = 0;
|
int cur_framesmpls, err = 0, lostPackets = 0;
|
||||||
|
|
||||||
/* Runtime statistics */
|
/* Runtime statistics */
|
||||||
double starttime, runtime, length_file;
|
double starttime, runtime, length_file;
|
||||||
|
|
||||||
int16_t stream_len = 0;
|
int16_t stream_len = 0;
|
||||||
int16_t framecnt, declen = 0;
|
int16_t framecnt;
|
||||||
|
int declen = 0;
|
||||||
int16_t shortdata[FRAMESAMPLES_10ms];
|
int16_t shortdata[FRAMESAMPLES_10ms];
|
||||||
int16_t decoded[MAX_FRAMESAMPLES];
|
int16_t decoded[MAX_FRAMESAMPLES];
|
||||||
uint16_t streamdata[500];
|
uint16_t streamdata[500];
|
||||||
@ -766,7 +767,7 @@ int main(int argc, char* argv[])
|
|||||||
#else
|
#else
|
||||||
declen = -1;
|
declen = -1;
|
||||||
#endif
|
#endif
|
||||||
prevFrameSize = declen/240;
|
prevFrameSize = static_cast<int16_t>(declen / 240);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -88,8 +88,8 @@ int main(int argc, char* argv[]) {
|
|||||||
int16_t CodingMode;
|
int16_t CodingMode;
|
||||||
int16_t bottleneck;
|
int16_t bottleneck;
|
||||||
|
|
||||||
int16_t framesize = 30; /* ms */
|
int framesize = 30; /* ms */
|
||||||
// int16_t framesize = 60; /* To invoke cisco complexity case at frame 2252 */
|
// int framesize = 60; /* To invoke cisco complexity case at frame 2252 */
|
||||||
|
|
||||||
int cur_framesmpls, err;
|
int cur_framesmpls, err;
|
||||||
|
|
||||||
@ -99,7 +99,7 @@ int main(int argc, char* argv[]) {
|
|||||||
double length_file;
|
double length_file;
|
||||||
|
|
||||||
int16_t stream_len = 0;
|
int16_t stream_len = 0;
|
||||||
int16_t declen;
|
int declen;
|
||||||
|
|
||||||
int16_t shortdata[FRAMESAMPLES_10ms];
|
int16_t shortdata[FRAMESAMPLES_10ms];
|
||||||
int16_t decoded[MAX_FRAMESAMPLES];
|
int16_t decoded[MAX_FRAMESAMPLES];
|
||||||
|
@ -24,12 +24,12 @@ struct IsacFloat {
|
|||||||
static const bool has_swb = true;
|
static const bool has_swb = true;
|
||||||
static inline int16_t Control(instance_type* inst,
|
static inline int16_t Control(instance_type* inst,
|
||||||
int32_t rate,
|
int32_t rate,
|
||||||
int16_t framesize) {
|
int framesize) {
|
||||||
return WebRtcIsac_Control(inst, rate, framesize);
|
return WebRtcIsac_Control(inst, rate, framesize);
|
||||||
}
|
}
|
||||||
static inline int16_t ControlBwe(instance_type* inst,
|
static inline int16_t ControlBwe(instance_type* inst,
|
||||||
int32_t rate_bps,
|
int32_t rate_bps,
|
||||||
int16_t frame_size_ms,
|
int frame_size_ms,
|
||||||
int16_t enforce_frame_size) {
|
int16_t enforce_frame_size) {
|
||||||
return WebRtcIsac_ControlBwe(inst, rate_bps, frame_size_ms,
|
return WebRtcIsac_ControlBwe(inst, rate_bps, frame_size_ms,
|
||||||
enforce_frame_size);
|
enforce_frame_size);
|
||||||
@ -37,11 +37,11 @@ struct IsacFloat {
|
|||||||
static inline int16_t Create(instance_type** inst) {
|
static inline int16_t Create(instance_type** inst) {
|
||||||
return WebRtcIsac_Create(inst);
|
return WebRtcIsac_Create(inst);
|
||||||
}
|
}
|
||||||
static inline int16_t DecodeInternal(instance_type* inst,
|
static inline int DecodeInternal(instance_type* inst,
|
||||||
const uint8_t* encoded,
|
const uint8_t* encoded,
|
||||||
int16_t len,
|
int16_t len,
|
||||||
int16_t* decoded,
|
int16_t* decoded,
|
||||||
int16_t* speech_type) {
|
int16_t* speech_type) {
|
||||||
return WebRtcIsac_Decode(inst, encoded, len, decoded, speech_type);
|
return WebRtcIsac_Decode(inst, encoded, len, decoded, speech_type);
|
||||||
}
|
}
|
||||||
static inline int16_t DecodePlc(instance_type* inst,
|
static inline int16_t DecodePlc(instance_type* inst,
|
||||||
@ -53,9 +53,9 @@ struct IsacFloat {
|
|||||||
static inline int16_t DecoderInit(instance_type* inst) {
|
static inline int16_t DecoderInit(instance_type* inst) {
|
||||||
return WebRtcIsac_DecoderInit(inst);
|
return WebRtcIsac_DecoderInit(inst);
|
||||||
}
|
}
|
||||||
static inline int16_t Encode(instance_type* inst,
|
static inline int Encode(instance_type* inst,
|
||||||
const int16_t* speech_in,
|
const int16_t* speech_in,
|
||||||
uint8_t* encoded) {
|
uint8_t* encoded) {
|
||||||
return WebRtcIsac_Encode(inst, speech_in, encoded);
|
return WebRtcIsac_Encode(inst, speech_in, encoded);
|
||||||
}
|
}
|
||||||
static inline int16_t EncoderInit(instance_type* inst, int16_t coding_mode) {
|
static inline int16_t EncoderInit(instance_type* inst, int16_t coding_mode) {
|
||||||
|
@ -144,7 +144,7 @@ extern "C" {
|
|||||||
* : -1 - Error
|
* : -1 - Error
|
||||||
*/
|
*/
|
||||||
|
|
||||||
int16_t WebRtcIsac_Encode(
|
int WebRtcIsac_Encode(
|
||||||
ISACStruct* ISAC_main_inst,
|
ISACStruct* ISAC_main_inst,
|
||||||
const int16_t* speechIn,
|
const int16_t* speechIn,
|
||||||
uint8_t* encoded);
|
uint8_t* encoded);
|
||||||
@ -214,7 +214,7 @@ extern "C" {
|
|||||||
* -1 - Error.
|
* -1 - Error.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
int16_t WebRtcIsac_Decode(
|
int WebRtcIsac_Decode(
|
||||||
ISACStruct* ISAC_main_inst,
|
ISACStruct* ISAC_main_inst,
|
||||||
const uint8_t* encoded,
|
const uint8_t* encoded,
|
||||||
int16_t len,
|
int16_t len,
|
||||||
@ -269,7 +269,7 @@ extern "C" {
|
|||||||
int16_t WebRtcIsac_Control(
|
int16_t WebRtcIsac_Control(
|
||||||
ISACStruct* ISAC_main_inst,
|
ISACStruct* ISAC_main_inst,
|
||||||
int32_t rate,
|
int32_t rate,
|
||||||
int16_t framesize);
|
int framesize);
|
||||||
|
|
||||||
|
|
||||||
/******************************************************************************
|
/******************************************************************************
|
||||||
@ -300,7 +300,7 @@ extern "C" {
|
|||||||
int16_t WebRtcIsac_ControlBwe(
|
int16_t WebRtcIsac_ControlBwe(
|
||||||
ISACStruct* ISAC_main_inst,
|
ISACStruct* ISAC_main_inst,
|
||||||
int32_t rateBPS,
|
int32_t rateBPS,
|
||||||
int16_t frameSizeMs,
|
int frameSizeMs,
|
||||||
int16_t enforceFrameSize);
|
int16_t enforceFrameSize);
|
||||||
|
|
||||||
|
|
||||||
@ -701,7 +701,7 @@ extern "C" {
|
|||||||
* Return value : >0 - number of samples in decoded vector
|
* Return value : >0 - number of samples in decoded vector
|
||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
int16_t WebRtcIsac_DecodeRcu(
|
int WebRtcIsac_DecodeRcu(
|
||||||
ISACStruct* ISAC_main_inst,
|
ISACStruct* ISAC_main_inst,
|
||||||
const uint8_t* encoded,
|
const uint8_t* encoded,
|
||||||
int16_t len,
|
int16_t len,
|
||||||
|
@ -80,9 +80,9 @@ static const uint32_t kCrcTable[256] = {
|
|||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
|
|
||||||
int16_t WebRtcIsac_GetCrc(const int16_t* bitstream,
|
int WebRtcIsac_GetCrc(const int16_t* bitstream,
|
||||||
int16_t len_bitstream_in_bytes,
|
int len_bitstream_in_bytes,
|
||||||
uint32_t* crc)
|
uint32_t* crc)
|
||||||
{
|
{
|
||||||
uint8_t* bitstream_ptr_uw8;
|
uint8_t* bitstream_ptr_uw8;
|
||||||
uint32_t crc_state;
|
uint32_t crc_state;
|
||||||
|
@ -36,10 +36,10 @@
|
|||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
|
|
||||||
int16_t WebRtcIsac_GetCrc(
|
int WebRtcIsac_GetCrc(
|
||||||
const int16_t* encoded,
|
const int16_t* encoded,
|
||||||
int16_t no_of_word8s,
|
int no_of_word8s,
|
||||||
uint32_t* crc);
|
uint32_t* crc);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -494,15 +494,15 @@ int16_t WebRtcIsac_EncoderInit(ISACStruct* ISAC_main_inst,
|
|||||||
* samples.
|
* samples.
|
||||||
* : -1 - Error
|
* : -1 - Error
|
||||||
*/
|
*/
|
||||||
int16_t WebRtcIsac_Encode(ISACStruct* ISAC_main_inst,
|
int WebRtcIsac_Encode(ISACStruct* ISAC_main_inst,
|
||||||
const int16_t* speechIn,
|
const int16_t* speechIn,
|
||||||
uint8_t* encoded) {
|
uint8_t* encoded) {
|
||||||
float inFrame[FRAMESAMPLES_10ms];
|
float inFrame[FRAMESAMPLES_10ms];
|
||||||
int16_t speechInLB[FRAMESAMPLES_10ms];
|
int16_t speechInLB[FRAMESAMPLES_10ms];
|
||||||
int16_t speechInUB[FRAMESAMPLES_10ms];
|
int16_t speechInUB[FRAMESAMPLES_10ms];
|
||||||
int16_t streamLenLB = 0;
|
int streamLenLB = 0;
|
||||||
int16_t streamLenUB = 0;
|
int streamLenUB = 0;
|
||||||
int16_t streamLen = 0;
|
int streamLen = 0;
|
||||||
int16_t k = 0;
|
int16_t k = 0;
|
||||||
int garbageLen = 0;
|
int garbageLen = 0;
|
||||||
int32_t bottleneck = 0;
|
int32_t bottleneck = 0;
|
||||||
@ -601,8 +601,8 @@ int16_t WebRtcIsac_Encode(ISACStruct* ISAC_main_inst,
|
|||||||
|
|
||||||
/* Tell to upper-band the number of bytes used so far.
|
/* Tell to upper-band the number of bytes used so far.
|
||||||
* This is for payload limitation. */
|
* This is for payload limitation. */
|
||||||
instUB->ISACencUB_obj.numBytesUsed = streamLenLB + 1 +
|
instUB->ISACencUB_obj.numBytesUsed =
|
||||||
LEN_CHECK_SUM_WORD8;
|
(int16_t)(streamLenLB + 1 + LEN_CHECK_SUM_WORD8);
|
||||||
/* Encode upper-band. */
|
/* Encode upper-band. */
|
||||||
switch (instISAC->bandwidthKHz) {
|
switch (instISAC->bandwidthKHz) {
|
||||||
case isac12kHz: {
|
case isac12kHz: {
|
||||||
@ -1045,12 +1045,12 @@ int16_t WebRtcIsac_UpdateBwEstimate(ISACStruct* ISAC_main_inst,
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int16_t Decode(ISACStruct* ISAC_main_inst,
|
static int Decode(ISACStruct* ISAC_main_inst,
|
||||||
const uint8_t* encoded,
|
const uint8_t* encoded,
|
||||||
int16_t lenEncodedBytes,
|
int16_t lenEncodedBytes,
|
||||||
int16_t* decoded,
|
int16_t* decoded,
|
||||||
int16_t* speechType,
|
int16_t* speechType,
|
||||||
int16_t isRCUPayload) {
|
int16_t isRCUPayload) {
|
||||||
/* Number of samples (480 or 960), output from decoder
|
/* Number of samples (480 or 960), output from decoder
|
||||||
that were actually used in the encoder/decoder
|
that were actually used in the encoder/decoder
|
||||||
(determined on the fly). */
|
(determined on the fly). */
|
||||||
@ -1060,8 +1060,8 @@ static int16_t Decode(ISACStruct* ISAC_main_inst,
|
|||||||
float outFrame[MAX_FRAMESAMPLES];
|
float outFrame[MAX_FRAMESAMPLES];
|
||||||
int16_t outFrameLB[MAX_FRAMESAMPLES];
|
int16_t outFrameLB[MAX_FRAMESAMPLES];
|
||||||
int16_t outFrameUB[MAX_FRAMESAMPLES];
|
int16_t outFrameUB[MAX_FRAMESAMPLES];
|
||||||
int16_t numDecodedBytesLB;
|
int numDecodedBytesLB;
|
||||||
int16_t numDecodedBytesUB;
|
int numDecodedBytesUB;
|
||||||
int16_t lenEncodedLBBytes;
|
int16_t lenEncodedLBBytes;
|
||||||
int16_t validChecksum = 1;
|
int16_t validChecksum = 1;
|
||||||
int16_t k;
|
int16_t k;
|
||||||
@ -1350,11 +1350,11 @@ static int16_t Decode(ISACStruct* ISAC_main_inst,
|
|||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
|
|
||||||
int16_t WebRtcIsac_Decode(ISACStruct* ISAC_main_inst,
|
int WebRtcIsac_Decode(ISACStruct* ISAC_main_inst,
|
||||||
const uint8_t* encoded,
|
const uint8_t* encoded,
|
||||||
int16_t lenEncodedBytes,
|
int16_t lenEncodedBytes,
|
||||||
int16_t* decoded,
|
int16_t* decoded,
|
||||||
int16_t* speechType) {
|
int16_t* speechType) {
|
||||||
int16_t isRCUPayload = 0;
|
int16_t isRCUPayload = 0;
|
||||||
return Decode(ISAC_main_inst, encoded, lenEncodedBytes, decoded,
|
return Decode(ISAC_main_inst, encoded, lenEncodedBytes, decoded,
|
||||||
speechType, isRCUPayload);
|
speechType, isRCUPayload);
|
||||||
@ -1382,11 +1382,11 @@ int16_t WebRtcIsac_Decode(ISACStruct* ISAC_main_inst,
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
int16_t WebRtcIsac_DecodeRcu(ISACStruct* ISAC_main_inst,
|
int WebRtcIsac_DecodeRcu(ISACStruct* ISAC_main_inst,
|
||||||
const uint8_t* encoded,
|
const uint8_t* encoded,
|
||||||
int16_t lenEncodedBytes,
|
int16_t lenEncodedBytes,
|
||||||
int16_t* decoded,
|
int16_t* decoded,
|
||||||
int16_t* speechType) {
|
int16_t* speechType) {
|
||||||
int16_t isRCUPayload = 1;
|
int16_t isRCUPayload = 1;
|
||||||
return Decode(ISAC_main_inst, encoded, lenEncodedBytes, decoded,
|
return Decode(ISAC_main_inst, encoded, lenEncodedBytes, decoded,
|
||||||
speechType, isRCUPayload);
|
speechType, isRCUPayload);
|
||||||
@ -1485,7 +1485,7 @@ static int16_t ControlUb(ISACUBStruct* instISAC, double rate) {
|
|||||||
|
|
||||||
int16_t WebRtcIsac_Control(ISACStruct* ISAC_main_inst,
|
int16_t WebRtcIsac_Control(ISACStruct* ISAC_main_inst,
|
||||||
int32_t bottleneckBPS,
|
int32_t bottleneckBPS,
|
||||||
int16_t frameSize) {
|
int frameSize) {
|
||||||
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
|
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
|
||||||
int16_t status;
|
int16_t status;
|
||||||
double rateLB;
|
double rateLB;
|
||||||
@ -1526,7 +1526,7 @@ int16_t WebRtcIsac_Control(ISACStruct* ISAC_main_inst,
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
status = ControlLb(&instISAC->instLB, rateLB, frameSize);
|
status = ControlLb(&instISAC->instLB, rateLB, (int16_t)frameSize);
|
||||||
if (status < 0) {
|
if (status < 0) {
|
||||||
instISAC->errorCode = -status;
|
instISAC->errorCode = -status;
|
||||||
return -1;
|
return -1;
|
||||||
@ -1594,7 +1594,7 @@ int16_t WebRtcIsac_Control(ISACStruct* ISAC_main_inst,
|
|||||||
*/
|
*/
|
||||||
int16_t WebRtcIsac_ControlBwe(ISACStruct* ISAC_main_inst,
|
int16_t WebRtcIsac_ControlBwe(ISACStruct* ISAC_main_inst,
|
||||||
int32_t bottleneckBPS,
|
int32_t bottleneckBPS,
|
||||||
int16_t frameSizeMs,
|
int frameSizeMs,
|
||||||
int16_t enforceFrameSize) {
|
int16_t enforceFrameSize) {
|
||||||
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
|
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
|
||||||
enum ISACBandwidth bandwidth;
|
enum ISACBandwidth bandwidth;
|
||||||
@ -1641,8 +1641,8 @@ int16_t WebRtcIsac_ControlBwe(ISACStruct* ISAC_main_inst,
|
|||||||
* will not change */
|
* will not change */
|
||||||
if (frameSizeMs != 0) {
|
if (frameSizeMs != 0) {
|
||||||
if ((frameSizeMs == 30) || (frameSizeMs == 60)) {
|
if ((frameSizeMs == 30) || (frameSizeMs == 60)) {
|
||||||
instISAC->instLB.ISACencLB_obj.new_framelength = (FS / 1000) *
|
instISAC->instLB.ISACencLB_obj.new_framelength =
|
||||||
frameSizeMs;
|
(int16_t)((FS / 1000) * frameSizeMs);
|
||||||
} else {
|
} else {
|
||||||
instISAC->errorCode = ISAC_DISALLOWED_FRAME_LENGTH;
|
instISAC->errorCode = ISAC_DISALLOWED_FRAME_LENGTH;
|
||||||
return -1;
|
return -1;
|
||||||
|
@ -79,7 +79,7 @@ TEST_F(IsacTest, IsacUpdateBWE) {
|
|||||||
WebRtcIsac_EncoderInit(isac_codec_, 0);
|
WebRtcIsac_EncoderInit(isac_codec_, 0);
|
||||||
WebRtcIsac_DecoderInit(isac_codec_);
|
WebRtcIsac_DecoderInit(isac_codec_);
|
||||||
|
|
||||||
int16_t encoded_bytes;
|
int encoded_bytes;
|
||||||
|
|
||||||
// Test with call with a small packet (sync packet).
|
// Test with call with a small packet (sync packet).
|
||||||
EXPECT_EQ(-1, WebRtcIsac_UpdateBwEstimate(isac_codec_, bitstream_small_, 7, 1,
|
EXPECT_EQ(-1, WebRtcIsac_UpdateBwEstimate(isac_codec_, bitstream_small_, 7, 1,
|
||||||
|
@ -47,14 +47,15 @@ int main(int argc, char* argv[]) {
|
|||||||
int i, errtype, VADusage = 0, packetLossPercent = 0;
|
int i, errtype, VADusage = 0, packetLossPercent = 0;
|
||||||
int16_t CodingMode;
|
int16_t CodingMode;
|
||||||
int32_t bottleneck = 0;
|
int32_t bottleneck = 0;
|
||||||
int16_t framesize = 30; /* ms */
|
int framesize = 30; /* ms */
|
||||||
int cur_framesmpls, err;
|
int cur_framesmpls, err;
|
||||||
|
|
||||||
/* Runtime statistics */
|
/* Runtime statistics */
|
||||||
double starttime, runtime, length_file;
|
double starttime, runtime, length_file;
|
||||||
|
|
||||||
int16_t stream_len = 0;
|
int16_t stream_len = 0;
|
||||||
int16_t declen = 0, lostFrame = 0, declenTC = 0;
|
int declen = 0, declenTC = 0;
|
||||||
|
int16_t lostFrame = 0;
|
||||||
|
|
||||||
int16_t shortdata[SWBFRAMESAMPLES_10ms];
|
int16_t shortdata[SWBFRAMESAMPLES_10ms];
|
||||||
int16_t vaddata[SWBFRAMESAMPLES_10ms * 3];
|
int16_t vaddata[SWBFRAMESAMPLES_10ms * 3];
|
||||||
|
@ -191,7 +191,7 @@ int main(int argc, char* argv[])
|
|||||||
|
|
||||||
short streamLen;
|
short streamLen;
|
||||||
short numSamplesRead;
|
short numSamplesRead;
|
||||||
short lenDecodedAudio;
|
int lenDecodedAudio;
|
||||||
short senderIdx;
|
short senderIdx;
|
||||||
short receiverIdx;
|
short receiverIdx;
|
||||||
|
|
||||||
|
@ -62,7 +62,7 @@ int main(int argc, char* argv[]) {
|
|||||||
unsigned long totalsmpls = 0;
|
unsigned long totalsmpls = 0;
|
||||||
|
|
||||||
int32_t bottleneck = 39;
|
int32_t bottleneck = 39;
|
||||||
int16_t frameSize = 30; /* ms */
|
int frameSize = 30; /* ms */
|
||||||
int16_t codingMode = 1;
|
int16_t codingMode = 1;
|
||||||
int16_t shortdata[FRAMESAMPLES_SWB_10ms];
|
int16_t shortdata[FRAMESAMPLES_SWB_10ms];
|
||||||
int16_t decoded[MAX_FRAMESAMPLES_SWB];
|
int16_t decoded[MAX_FRAMESAMPLES_SWB];
|
||||||
@ -73,9 +73,9 @@ int main(int argc, char* argv[]) {
|
|||||||
ISACStruct* ISAC_main_inst;
|
ISACStruct* ISAC_main_inst;
|
||||||
|
|
||||||
int16_t stream_len = 0;
|
int16_t stream_len = 0;
|
||||||
int16_t declen = 0;
|
int declen = 0;
|
||||||
int16_t err;
|
int16_t err;
|
||||||
int16_t cur_framesmpls;
|
int cur_framesmpls;
|
||||||
int endfile;
|
int endfile;
|
||||||
#ifdef WIN32
|
#ifdef WIN32
|
||||||
double length_file;
|
double length_file;
|
||||||
|
@ -198,7 +198,7 @@ AudioEncoder::EncodedInfo AudioEncoderOpus::EncodeInternal(
|
|||||||
CHECK_EQ(input_buffer_.size(),
|
CHECK_EQ(input_buffer_.size(),
|
||||||
static_cast<size_t>(num_10ms_frames_per_packet_) *
|
static_cast<size_t>(num_10ms_frames_per_packet_) *
|
||||||
samples_per_10ms_frame_);
|
samples_per_10ms_frame_);
|
||||||
int16_t status = WebRtcOpus_Encode(
|
int status = WebRtcOpus_Encode(
|
||||||
inst_, &input_buffer_[0],
|
inst_, &input_buffer_[0],
|
||||||
rtc::CheckedDivExact(CastInt16(input_buffer_.size()),
|
rtc::CheckedDivExact(CastInt16(input_buffer_.size()),
|
||||||
static_cast<int16_t>(num_channels_)),
|
static_cast<int16_t>(num_channels_)),
|
||||||
|
@ -64,11 +64,11 @@ int16_t WebRtcOpus_EncoderFree(OpusEncInst* inst);
|
|||||||
* Return value : >=0 - Length (in bytes) of coded data
|
* Return value : >=0 - Length (in bytes) of coded data
|
||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
int16_t WebRtcOpus_Encode(OpusEncInst* inst,
|
int WebRtcOpus_Encode(OpusEncInst* inst,
|
||||||
const int16_t* audio_in,
|
const int16_t* audio_in,
|
||||||
int16_t samples,
|
int16_t samples,
|
||||||
int16_t length_encoded_buffer,
|
int16_t length_encoded_buffer,
|
||||||
uint8_t* encoded);
|
uint8_t* encoded);
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
* WebRtcOpus_SetBitRate(...)
|
* WebRtcOpus_SetBitRate(...)
|
||||||
@ -236,9 +236,9 @@ int16_t WebRtcOpus_DecoderInit(OpusDecInst* inst);
|
|||||||
* Return value : >0 - Samples per channel in decoded vector
|
* Return value : >0 - Samples per channel in decoded vector
|
||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
int16_t WebRtcOpus_Decode(OpusDecInst* inst, const uint8_t* encoded,
|
int WebRtcOpus_Decode(OpusDecInst* inst, const uint8_t* encoded,
|
||||||
int16_t encoded_bytes, int16_t* decoded,
|
int16_t encoded_bytes, int16_t* decoded,
|
||||||
int16_t* audio_type);
|
int16_t* audio_type);
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
* WebRtcOpus_DecodePlc(...)
|
* WebRtcOpus_DecodePlc(...)
|
||||||
@ -254,8 +254,8 @@ int16_t WebRtcOpus_Decode(OpusDecInst* inst, const uint8_t* encoded,
|
|||||||
* Return value : >0 - number of samples in decoded PLC vector
|
* Return value : >0 - number of samples in decoded PLC vector
|
||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
int16_t WebRtcOpus_DecodePlc(OpusDecInst* inst, int16_t* decoded,
|
int WebRtcOpus_DecodePlc(OpusDecInst* inst, int16_t* decoded,
|
||||||
int16_t number_of_lost_frames);
|
int number_of_lost_frames);
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
* WebRtcOpus_DecodeFec(...)
|
* WebRtcOpus_DecodeFec(...)
|
||||||
@ -275,9 +275,9 @@ int16_t WebRtcOpus_DecodePlc(OpusDecInst* inst, int16_t* decoded,
|
|||||||
* 0 - No FEC data in the packet
|
* 0 - No FEC data in the packet
|
||||||
* -1 - Error
|
* -1 - Error
|
||||||
*/
|
*/
|
||||||
int16_t WebRtcOpus_DecodeFec(OpusDecInst* inst, const uint8_t* encoded,
|
int WebRtcOpus_DecodeFec(OpusDecInst* inst, const uint8_t* encoded,
|
||||||
int16_t encoded_bytes, int16_t* decoded,
|
int16_t encoded_bytes, int16_t* decoded,
|
||||||
int16_t* audio_type);
|
int16_t* audio_type);
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
* WebRtcOpus_DurationEst(...)
|
* WebRtcOpus_DurationEst(...)
|
||||||
|
@ -131,10 +131,10 @@ OpusFecTest::OpusFecTest()
|
|||||||
}
|
}
|
||||||
|
|
||||||
void OpusFecTest::EncodeABlock() {
|
void OpusFecTest::EncodeABlock() {
|
||||||
int16_t value = WebRtcOpus_Encode(opus_encoder_,
|
int value = WebRtcOpus_Encode(opus_encoder_,
|
||||||
&in_data_[data_pointer_],
|
&in_data_[data_pointer_],
|
||||||
block_length_sample_,
|
block_length_sample_,
|
||||||
max_bytes_, &bit_stream_[0]);
|
max_bytes_, &bit_stream_[0]);
|
||||||
EXPECT_GT(value, 0);
|
EXPECT_GT(value, 0);
|
||||||
|
|
||||||
encoded_bytes_ = value;
|
encoded_bytes_ = value;
|
||||||
@ -142,7 +142,7 @@ void OpusFecTest::EncodeABlock() {
|
|||||||
|
|
||||||
void OpusFecTest::DecodeABlock(bool lost_previous, bool lost_current) {
|
void OpusFecTest::DecodeABlock(bool lost_previous, bool lost_current) {
|
||||||
int16_t audio_type;
|
int16_t audio_type;
|
||||||
int16_t value_1 = 0, value_2 = 0;
|
int value_1 = 0, value_2 = 0;
|
||||||
|
|
||||||
if (lost_previous) {
|
if (lost_previous) {
|
||||||
// Decode previous frame.
|
// Decode previous frame.
|
||||||
|
@ -78,11 +78,11 @@ int16_t WebRtcOpus_EncoderFree(OpusEncInst* inst) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int16_t WebRtcOpus_Encode(OpusEncInst* inst,
|
int WebRtcOpus_Encode(OpusEncInst* inst,
|
||||||
const int16_t* audio_in,
|
const int16_t* audio_in,
|
||||||
int16_t samples,
|
int16_t samples,
|
||||||
int16_t length_encoded_buffer,
|
int16_t length_encoded_buffer,
|
||||||
uint8_t* encoded) {
|
uint8_t* encoded) {
|
||||||
int res;
|
int res;
|
||||||
|
|
||||||
if (samples > 48 * kWebRtcOpusMaxEncodeFrameSizeMs) {
|
if (samples > 48 * kWebRtcOpusMaxEncodeFrameSizeMs) {
|
||||||
@ -291,9 +291,9 @@ static int DecodeNative(OpusDecInst* inst, const uint8_t* encoded,
|
|||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
int16_t WebRtcOpus_Decode(OpusDecInst* inst, const uint8_t* encoded,
|
int WebRtcOpus_Decode(OpusDecInst* inst, const uint8_t* encoded,
|
||||||
int16_t encoded_bytes, int16_t* decoded,
|
int16_t encoded_bytes, int16_t* decoded,
|
||||||
int16_t* audio_type) {
|
int16_t* audio_type) {
|
||||||
int decoded_samples;
|
int decoded_samples;
|
||||||
|
|
||||||
if (encoded_bytes == 0) {
|
if (encoded_bytes == 0) {
|
||||||
@ -318,8 +318,8 @@ int16_t WebRtcOpus_Decode(OpusDecInst* inst, const uint8_t* encoded,
|
|||||||
return decoded_samples;
|
return decoded_samples;
|
||||||
}
|
}
|
||||||
|
|
||||||
int16_t WebRtcOpus_DecodePlc(OpusDecInst* inst, int16_t* decoded,
|
int WebRtcOpus_DecodePlc(OpusDecInst* inst, int16_t* decoded,
|
||||||
int16_t number_of_lost_frames) {
|
int number_of_lost_frames) {
|
||||||
int16_t audio_type = 0;
|
int16_t audio_type = 0;
|
||||||
int decoded_samples;
|
int decoded_samples;
|
||||||
int plc_samples;
|
int plc_samples;
|
||||||
@ -339,9 +339,9 @@ int16_t WebRtcOpus_DecodePlc(OpusDecInst* inst, int16_t* decoded,
|
|||||||
return decoded_samples;
|
return decoded_samples;
|
||||||
}
|
}
|
||||||
|
|
||||||
int16_t WebRtcOpus_DecodeFec(OpusDecInst* inst, const uint8_t* encoded,
|
int WebRtcOpus_DecodeFec(OpusDecInst* inst, const uint8_t* encoded,
|
||||||
int16_t encoded_bytes, int16_t* decoded,
|
int16_t encoded_bytes, int16_t* decoded,
|
||||||
int16_t* audio_type) {
|
int16_t* audio_type) {
|
||||||
int decoded_samples;
|
int decoded_samples;
|
||||||
int fec_samples;
|
int fec_samples;
|
||||||
|
|
||||||
|
@ -273,17 +273,11 @@ void OpusTest::Run(TestPackStereo* channel, int channels, int bitrate,
|
|||||||
int16_t bitstream_len_byte;
|
int16_t bitstream_len_byte;
|
||||||
uint8_t bitstream[kMaxBytes];
|
uint8_t bitstream[kMaxBytes];
|
||||||
for (int i = 0; i < loop_encode; i++) {
|
for (int i = 0; i < loop_encode; i++) {
|
||||||
if (channels == 1) {
|
int bitstream_len_byte_int = WebRtcOpus_Encode(
|
||||||
bitstream_len_byte = WebRtcOpus_Encode(
|
(channels == 1) ? opus_mono_encoder_ : opus_stereo_encoder_,
|
||||||
opus_mono_encoder_, &audio[read_samples],
|
&audio[read_samples], frame_length, kMaxBytes, bitstream);
|
||||||
frame_length, kMaxBytes, bitstream);
|
ASSERT_GT(bitstream_len_byte_int, -1);
|
||||||
ASSERT_GT(bitstream_len_byte, -1);
|
bitstream_len_byte = static_cast<int16_t>(bitstream_len_byte_int);
|
||||||
} else {
|
|
||||||
bitstream_len_byte = WebRtcOpus_Encode(
|
|
||||||
opus_stereo_encoder_, &audio[read_samples],
|
|
||||||
frame_length, kMaxBytes, bitstream);
|
|
||||||
ASSERT_GT(bitstream_len_byte, -1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Simulate packet loss by setting |packet_loss_| to "true" in
|
// Simulate packet loss by setting |packet_loss_| to "true" in
|
||||||
// |percent_loss| percent of the loops.
|
// |percent_loss| percent of the loops.
|
||||||
|
@ -163,9 +163,9 @@ int AudioDecoderIlbc::DecodeInternal(const uint8_t* encoded,
|
|||||||
SpeechType* speech_type) {
|
SpeechType* speech_type) {
|
||||||
DCHECK_EQ(sample_rate_hz, 8000);
|
DCHECK_EQ(sample_rate_hz, 8000);
|
||||||
int16_t temp_type = 1; // Default is speech.
|
int16_t temp_type = 1; // Default is speech.
|
||||||
int16_t ret = WebRtcIlbcfix_Decode(dec_state_, encoded,
|
int ret = WebRtcIlbcfix_Decode(dec_state_, encoded,
|
||||||
static_cast<int16_t>(encoded_len), decoded,
|
static_cast<int16_t>(encoded_len), decoded,
|
||||||
&temp_type);
|
&temp_type);
|
||||||
*speech_type = ConvertSpeechType(temp_type);
|
*speech_type = ConvertSpeechType(temp_type);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
@ -330,11 +330,11 @@ int AudioDecoderOpus::DecodeInternal(const uint8_t* encoded,
|
|||||||
SpeechType* speech_type) {
|
SpeechType* speech_type) {
|
||||||
DCHECK_EQ(sample_rate_hz, 48000);
|
DCHECK_EQ(sample_rate_hz, 48000);
|
||||||
int16_t temp_type = 1; // Default is speech.
|
int16_t temp_type = 1; // Default is speech.
|
||||||
int16_t ret = WebRtcOpus_Decode(dec_state_, encoded,
|
int ret = WebRtcOpus_Decode(dec_state_, encoded,
|
||||||
static_cast<int16_t>(encoded_len), decoded,
|
static_cast<int16_t>(encoded_len), decoded,
|
||||||
&temp_type);
|
&temp_type);
|
||||||
if (ret > 0)
|
if (ret > 0)
|
||||||
ret *= static_cast<int16_t>(channels_); // Return total number of samples.
|
ret *= static_cast<int>(channels_); // Return total number of samples.
|
||||||
*speech_type = ConvertSpeechType(temp_type);
|
*speech_type = ConvertSpeechType(temp_type);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
@ -352,11 +352,11 @@ int AudioDecoderOpus::DecodeRedundantInternal(const uint8_t* encoded,
|
|||||||
|
|
||||||
DCHECK_EQ(sample_rate_hz, 48000);
|
DCHECK_EQ(sample_rate_hz, 48000);
|
||||||
int16_t temp_type = 1; // Default is speech.
|
int16_t temp_type = 1; // Default is speech.
|
||||||
int16_t ret = WebRtcOpus_DecodeFec(dec_state_, encoded,
|
int ret = WebRtcOpus_DecodeFec(dec_state_, encoded,
|
||||||
static_cast<int16_t>(encoded_len), decoded,
|
static_cast<int16_t>(encoded_len), decoded,
|
||||||
&temp_type);
|
&temp_type);
|
||||||
if (ret > 0)
|
if (ret > 0)
|
||||||
ret *= static_cast<int16_t>(channels_); // Return total number of samples.
|
ret *= static_cast<int>(channels_); // Return total number of samples.
|
||||||
*speech_type = ConvertSpeechType(temp_type);
|
*speech_type = ConvertSpeechType(temp_type);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
@ -272,7 +272,7 @@ void DspHelper::CrossFade(const int16_t* input1, const int16_t* input2,
|
|||||||
}
|
}
|
||||||
|
|
||||||
void DspHelper::UnmuteSignal(const int16_t* input, size_t length,
|
void DspHelper::UnmuteSignal(const int16_t* input, size_t length,
|
||||||
int16_t* factor, int16_t increment,
|
int16_t* factor, int increment,
|
||||||
int16_t* output) {
|
int16_t* output) {
|
||||||
uint16_t factor_16b = *factor;
|
uint16_t factor_16b = *factor;
|
||||||
int32_t factor_32b = (static_cast<int32_t>(factor_16b) << 6) + 32;
|
int32_t factor_32b = (static_cast<int32_t>(factor_16b) << 6) + 32;
|
||||||
@ -284,7 +284,7 @@ void DspHelper::UnmuteSignal(const int16_t* input, size_t length,
|
|||||||
*factor = factor_16b;
|
*factor = factor_16b;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DspHelper::MuteSignal(int16_t* signal, int16_t mute_slope, size_t length) {
|
void DspHelper::MuteSignal(int16_t* signal, int mute_slope, size_t length) {
|
||||||
int32_t factor = (16384 << 6) + 32;
|
int32_t factor = (16384 << 6) + 32;
|
||||||
for (size_t i = 0; i < length; i++) {
|
for (size_t i = 0; i < length; i++) {
|
||||||
signal[i] = ((factor >> 6) * signal[i] + 8192) >> 14;
|
signal[i] = ((factor >> 6) * signal[i] + 8192) >> 14;
|
||||||
|
@ -110,11 +110,11 @@ class DspHelper {
|
|||||||
// sample and increases the gain by |increment| (Q20) for each sample. The
|
// sample and increases the gain by |increment| (Q20) for each sample. The
|
||||||
// result is written to |output|. |length| samples are processed.
|
// result is written to |output|. |length| samples are processed.
|
||||||
static void UnmuteSignal(const int16_t* input, size_t length, int16_t* factor,
|
static void UnmuteSignal(const int16_t* input, size_t length, int16_t* factor,
|
||||||
int16_t increment, int16_t* output);
|
int increment, int16_t* output);
|
||||||
|
|
||||||
// Starts at unity gain and gradually fades out |signal|. For each sample,
|
// Starts at unity gain and gradually fades out |signal|. For each sample,
|
||||||
// the gain is reduced by |mute_slope| (Q14). |length| samples are processed.
|
// the gain is reduced by |mute_slope| (Q14). |length| samples are processed.
|
||||||
static void MuteSignal(int16_t* signal, int16_t mute_slope, size_t length);
|
static void MuteSignal(int16_t* signal, int mute_slope, size_t length);
|
||||||
|
|
||||||
// Downsamples |input| from |sample_rate_hz| to 4 kHz sample rate. The input
|
// Downsamples |input| from |sample_rate_hz| to 4 kHz sample rate. The input
|
||||||
// has |input_length| samples, and the method will write |output_length|
|
// has |input_length| samples, and the method will write |output_length|
|
||||||
|
@ -239,14 +239,12 @@ int Expand::Process(AudioMultiVector* output) {
|
|||||||
if (consecutive_expands_ == 3) {
|
if (consecutive_expands_ == 3) {
|
||||||
// Let the mute factor decrease from 1.0 to 0.95 in 6.25 ms.
|
// Let the mute factor decrease from 1.0 to 0.95 in 6.25 ms.
|
||||||
// mute_slope = 0.0010 / fs_mult in Q20.
|
// mute_slope = 0.0010 / fs_mult in Q20.
|
||||||
parameters.mute_slope = std::max(parameters.mute_slope,
|
parameters.mute_slope = std::max(parameters.mute_slope, 1049 / fs_mult);
|
||||||
static_cast<int16_t>(1049 / fs_mult));
|
|
||||||
}
|
}
|
||||||
if (consecutive_expands_ == 7) {
|
if (consecutive_expands_ == 7) {
|
||||||
// Let the mute factor decrease from 1.0 to 0.90 in 6.25 ms.
|
// Let the mute factor decrease from 1.0 to 0.90 in 6.25 ms.
|
||||||
// mute_slope = 0.0020 / fs_mult in Q20.
|
// mute_slope = 0.0020 / fs_mult in Q20.
|
||||||
parameters.mute_slope = std::max(parameters.mute_slope,
|
parameters.mute_slope = std::max(parameters.mute_slope, 2097 / fs_mult);
|
||||||
static_cast<int16_t>(2097 / fs_mult));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Mute segment according to slope value.
|
// Mute segment according to slope value.
|
||||||
@ -368,7 +366,7 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
|
|||||||
InitializeForAnExpandPeriod();
|
InitializeForAnExpandPeriod();
|
||||||
|
|
||||||
// Calculate correlation in downsampled domain (4 kHz sample rate).
|
// Calculate correlation in downsampled domain (4 kHz sample rate).
|
||||||
int16_t correlation_scale;
|
int correlation_scale;
|
||||||
int correlation_length = 51; // TODO(hlundin): Legacy bit-exactness.
|
int correlation_length = 51; // TODO(hlundin): Legacy bit-exactness.
|
||||||
// If it is decided to break bit-exactness |correlation_length| should be
|
// If it is decided to break bit-exactness |correlation_length| should be
|
||||||
// initialized to the return value of Correlation().
|
// initialized to the return value of Correlation().
|
||||||
@ -446,7 +444,7 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
|
|||||||
correlation_length + start_index + correlation_lags - 1);
|
correlation_length + start_index + correlation_lags - 1);
|
||||||
correlation_scale = ((31 - WebRtcSpl_NormW32(signal_max * signal_max))
|
correlation_scale = ((31 - WebRtcSpl_NormW32(signal_max * signal_max))
|
||||||
+ (31 - WebRtcSpl_NormW32(correlation_length))) - 31;
|
+ (31 - WebRtcSpl_NormW32(correlation_length))) - 31;
|
||||||
correlation_scale = std::max(static_cast<int16_t>(0), correlation_scale);
|
correlation_scale = std::max(0, correlation_scale);
|
||||||
|
|
||||||
// Calculate the correlation, store in |correlation_vector2|.
|
// Calculate the correlation, store in |correlation_vector2|.
|
||||||
WebRtcSpl_CrossCorrelation(
|
WebRtcSpl_CrossCorrelation(
|
||||||
@ -473,7 +471,7 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
|
|||||||
|
|
||||||
// Calculate the correlation coefficient between the two portions of the
|
// Calculate the correlation coefficient between the two portions of the
|
||||||
// signal.
|
// signal.
|
||||||
int16_t corr_coefficient;
|
int32_t corr_coefficient;
|
||||||
if ((energy1 > 0) && (energy2 > 0)) {
|
if ((energy1 > 0) && (energy2 > 0)) {
|
||||||
int energy1_scale = std::max(16 - WebRtcSpl_NormW32(energy1), 0);
|
int energy1_scale = std::max(16 - WebRtcSpl_NormW32(energy1), 0);
|
||||||
int energy2_scale = std::max(16 - WebRtcSpl_NormW32(energy2), 0);
|
int energy2_scale = std::max(16 - WebRtcSpl_NormW32(energy2), 0);
|
||||||
@ -482,17 +480,17 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
|
|||||||
// If sum is odd, add 1 to make it even.
|
// If sum is odd, add 1 to make it even.
|
||||||
energy1_scale += 1;
|
energy1_scale += 1;
|
||||||
}
|
}
|
||||||
int16_t scaled_energy1 = energy1 >> energy1_scale;
|
int32_t scaled_energy1 = energy1 >> energy1_scale;
|
||||||
int16_t scaled_energy2 = energy2 >> energy2_scale;
|
int32_t scaled_energy2 = energy2 >> energy2_scale;
|
||||||
int16_t sqrt_energy_product = WebRtcSpl_SqrtFloor(
|
int16_t sqrt_energy_product = static_cast<int16_t>(
|
||||||
scaled_energy1 * scaled_energy2);
|
WebRtcSpl_SqrtFloor(scaled_energy1 * scaled_energy2));
|
||||||
// Calculate max_correlation / sqrt(energy1 * energy2) in Q14.
|
// Calculate max_correlation / sqrt(energy1 * energy2) in Q14.
|
||||||
int cc_shift = 14 - (energy1_scale + energy2_scale) / 2;
|
int cc_shift = 14 - (energy1_scale + energy2_scale) / 2;
|
||||||
max_correlation = WEBRTC_SPL_SHIFT_W32(max_correlation, cc_shift);
|
max_correlation = WEBRTC_SPL_SHIFT_W32(max_correlation, cc_shift);
|
||||||
corr_coefficient = WebRtcSpl_DivW32W16(max_correlation,
|
corr_coefficient = WebRtcSpl_DivW32W16(max_correlation,
|
||||||
sqrt_energy_product);
|
sqrt_energy_product);
|
||||||
corr_coefficient = std::min(static_cast<int16_t>(16384),
|
// Cap at 1.0 in Q14.
|
||||||
corr_coefficient); // Cap at 1.0 in Q14.
|
corr_coefficient = std::min(16384, corr_coefficient);
|
||||||
} else {
|
} else {
|
||||||
corr_coefficient = 0;
|
corr_coefficient = 0;
|
||||||
}
|
}
|
||||||
@ -513,8 +511,8 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
|
|||||||
if ((energy1 / 4 < energy2) && (energy1 > energy2 / 4)) {
|
if ((energy1 / 4 < energy2) && (energy1 > energy2 / 4)) {
|
||||||
// Energy constraint fulfilled. Use both vectors and scale them
|
// Energy constraint fulfilled. Use both vectors and scale them
|
||||||
// accordingly.
|
// accordingly.
|
||||||
int16_t scaled_energy2 = std::max(16 - WebRtcSpl_NormW32(energy2), 0);
|
int32_t scaled_energy2 = std::max(16 - WebRtcSpl_NormW32(energy2), 0);
|
||||||
int16_t scaled_energy1 = scaled_energy2 - 13;
|
int32_t scaled_energy1 = scaled_energy2 - 13;
|
||||||
// Calculate scaled_energy1 / scaled_energy2 in Q13.
|
// Calculate scaled_energy1 / scaled_energy2 in Q13.
|
||||||
int32_t energy_ratio = WebRtcSpl_DivW32W16(
|
int32_t energy_ratio = WebRtcSpl_DivW32W16(
|
||||||
WEBRTC_SPL_SHIFT_W32(energy1, -scaled_energy1),
|
WEBRTC_SPL_SHIFT_W32(energy1, -scaled_energy1),
|
||||||
@ -684,7 +682,8 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
|
|||||||
// voice_mix_factor = 0;
|
// voice_mix_factor = 0;
|
||||||
if (corr_coefficient > 7875) {
|
if (corr_coefficient > 7875) {
|
||||||
int16_t x1, x2, x3;
|
int16_t x1, x2, x3;
|
||||||
x1 = corr_coefficient; // |corr_coefficient| is in Q14.
|
// |corr_coefficient| is in Q14.
|
||||||
|
x1 = static_cast<int16_t>(corr_coefficient);
|
||||||
x2 = (x1 * x1) >> 14; // Shift 14 to keep result in Q14.
|
x2 = (x1 * x1) >> 14; // Shift 14 to keep result in Q14.
|
||||||
x3 = (x1 * x2) >> 14;
|
x3 = (x1 * x2) >> 14;
|
||||||
static const int kCoefficients[4] = { -5179, 19931, -16422, 5776 };
|
static const int kCoefficients[4] = { -5179, 19931, -16422, 5776 };
|
||||||
@ -712,8 +711,8 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
|
|||||||
// the division.
|
// the division.
|
||||||
// Shift the denominator from Q13 to Q5 before the division. The result of
|
// Shift the denominator from Q13 to Q5 before the division. The result of
|
||||||
// the division will then be in Q20.
|
// the division will then be in Q20.
|
||||||
int16_t temp_ratio = WebRtcSpl_DivW32W16((slope - 8192) << 12,
|
int temp_ratio = WebRtcSpl_DivW32W16((slope - 8192) << 12,
|
||||||
(distortion_lag * slope) >> 8);
|
(distortion_lag * slope) >> 8);
|
||||||
if (slope > 14746) {
|
if (slope > 14746) {
|
||||||
// slope > 1.8.
|
// slope > 1.8.
|
||||||
// Divide by 2, with proper rounding.
|
// Divide by 2, with proper rounding.
|
||||||
@ -732,8 +731,7 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
|
|||||||
// Make sure the mute factor decreases from 1.0 to 0.9 in no more than
|
// Make sure the mute factor decreases from 1.0 to 0.9 in no more than
|
||||||
// 6.25 ms.
|
// 6.25 ms.
|
||||||
// mute_slope >= 0.005 / fs_mult in Q20.
|
// mute_slope >= 0.005 / fs_mult in Q20.
|
||||||
parameters.mute_slope = std::max(static_cast<int16_t>(5243 / fs_mult),
|
parameters.mute_slope = std::max(5243 / fs_mult, parameters.mute_slope);
|
||||||
parameters.mute_slope);
|
|
||||||
} else if (slope > 8028) {
|
} else if (slope > 8028) {
|
||||||
parameters.mute_slope = 0;
|
parameters.mute_slope = 0;
|
||||||
}
|
}
|
||||||
@ -755,7 +753,7 @@ Expand::ChannelParameters::ChannelParameters()
|
|||||||
}
|
}
|
||||||
|
|
||||||
int16_t Expand::Correlation(const int16_t* input, size_t input_length,
|
int16_t Expand::Correlation(const int16_t* input, size_t input_length,
|
||||||
int16_t* output, int16_t* output_scale) const {
|
int16_t* output, int* output_scale) const {
|
||||||
// Set parameters depending on sample rate.
|
// Set parameters depending on sample rate.
|
||||||
const int16_t* filter_coefficients;
|
const int16_t* filter_coefficients;
|
||||||
int16_t num_coefficients;
|
int16_t num_coefficients;
|
||||||
@ -844,7 +842,7 @@ Expand* ExpandFactory::Create(BackgroundNoise* background_noise,
|
|||||||
// TODO(turajs): This can be moved to BackgroundNoise class.
|
// TODO(turajs): This can be moved to BackgroundNoise class.
|
||||||
void Expand::GenerateBackgroundNoise(int16_t* random_vector,
|
void Expand::GenerateBackgroundNoise(int16_t* random_vector,
|
||||||
size_t channel,
|
size_t channel,
|
||||||
int16_t mute_slope,
|
int mute_slope,
|
||||||
bool too_many_expands,
|
bool too_many_expands,
|
||||||
size_t num_noise_samples,
|
size_t num_noise_samples,
|
||||||
int16_t* buffer) {
|
int16_t* buffer) {
|
||||||
@ -887,7 +885,7 @@ void Expand::GenerateBackgroundNoise(int16_t* random_vector,
|
|||||||
bgn_mute_factor > 0) {
|
bgn_mute_factor > 0) {
|
||||||
// Fade BGN to zero.
|
// Fade BGN to zero.
|
||||||
// Calculate muting slope, approximately -2^18 / fs_hz.
|
// Calculate muting slope, approximately -2^18 / fs_hz.
|
||||||
int16_t mute_slope;
|
int mute_slope;
|
||||||
if (fs_hz_ == 8000) {
|
if (fs_hz_ == 8000) {
|
||||||
mute_slope = -32;
|
mute_slope = -32;
|
||||||
} else if (fs_hz_ == 16000) {
|
} else if (fs_hz_ == 16000) {
|
||||||
|
@ -72,7 +72,7 @@ class Expand {
|
|||||||
|
|
||||||
void GenerateBackgroundNoise(int16_t* random_vector,
|
void GenerateBackgroundNoise(int16_t* random_vector,
|
||||||
size_t channel,
|
size_t channel,
|
||||||
int16_t mute_slope,
|
int mute_slope,
|
||||||
bool too_many_expands,
|
bool too_many_expands,
|
||||||
size_t num_noise_samples,
|
size_t num_noise_samples,
|
||||||
int16_t* buffer);
|
int16_t* buffer);
|
||||||
@ -113,7 +113,7 @@ class Expand {
|
|||||||
AudioVector expand_vector0;
|
AudioVector expand_vector0;
|
||||||
AudioVector expand_vector1;
|
AudioVector expand_vector1;
|
||||||
bool onset;
|
bool onset;
|
||||||
int16_t mute_slope; /* Q20 */
|
int mute_slope; /* Q20 */
|
||||||
};
|
};
|
||||||
|
|
||||||
// Calculate the auto-correlation of |input|, with length |input_length|
|
// Calculate the auto-correlation of |input|, with length |input_length|
|
||||||
@ -121,7 +121,7 @@ class Expand {
|
|||||||
// |input|, and is written to |output|. The scale factor is written to
|
// |input|, and is written to |output|. The scale factor is written to
|
||||||
// |output_scale|. Returns the length of the correlation vector.
|
// |output_scale|. Returns the length of the correlation vector.
|
||||||
int16_t Correlation(const int16_t* input, size_t input_length,
|
int16_t Correlation(const int16_t* input, size_t input_length,
|
||||||
int16_t* output, int16_t* output_scale) const;
|
int16_t* output, int* output_scale) const;
|
||||||
|
|
||||||
void UpdateLagIndex();
|
void UpdateLagIndex();
|
||||||
|
|
||||||
|
@ -311,7 +311,7 @@ int16_t Merge::CorrelateAndPeakSearch(int16_t expanded_max, int16_t input_max,
|
|||||||
const int max_corr_length = kMaxCorrelationLength;
|
const int max_corr_length = kMaxCorrelationLength;
|
||||||
int stop_position_downsamp = std::min(
|
int stop_position_downsamp = std::min(
|
||||||
max_corr_length, expand_->max_lag() / (fs_mult_ * 2) + 1);
|
max_corr_length, expand_->max_lag() / (fs_mult_ * 2) + 1);
|
||||||
int16_t correlation_shift = 0;
|
int correlation_shift = 0;
|
||||||
if (expanded_max * input_max > 26843546) {
|
if (expanded_max * input_max > 26843546) {
|
||||||
correlation_shift = 3;
|
correlation_shift = 3;
|
||||||
}
|
}
|
||||||
@ -330,7 +330,7 @@ int16_t Merge::CorrelateAndPeakSearch(int16_t expanded_max, int16_t input_max,
|
|||||||
int16_t* correlation_ptr = &correlation16[pad_length];
|
int16_t* correlation_ptr = &correlation16[pad_length];
|
||||||
int32_t max_correlation = WebRtcSpl_MaxAbsValueW32(correlation,
|
int32_t max_correlation = WebRtcSpl_MaxAbsValueW32(correlation,
|
||||||
stop_position_downsamp);
|
stop_position_downsamp);
|
||||||
int16_t norm_shift = std::max(0, 17 - WebRtcSpl_NormW32(max_correlation));
|
int norm_shift = std::max(0, 17 - WebRtcSpl_NormW32(max_correlation));
|
||||||
WebRtcSpl_VectorBitShiftW32ToW16(correlation_ptr, stop_position_downsamp,
|
WebRtcSpl_VectorBitShiftW32ToW16(correlation_ptr, stop_position_downsamp,
|
||||||
correlation, norm_shift);
|
correlation, norm_shift);
|
||||||
|
|
||||||
|
@ -1271,7 +1271,7 @@ int NetEqImpl::DecodeLoop(PacketList* packet_list, Operations* operation,
|
|||||||
*operation == kPreemptiveExpand);
|
*operation == kPreemptiveExpand);
|
||||||
packet_list->pop_front();
|
packet_list->pop_front();
|
||||||
size_t payload_length = packet->payload_length;
|
size_t payload_length = packet->payload_length;
|
||||||
int16_t decode_length;
|
int decode_length;
|
||||||
if (packet->sync_packet) {
|
if (packet->sync_packet) {
|
||||||
// Decode to silence with the same frame size as the last decode.
|
// Decode to silence with the same frame size as the last decode.
|
||||||
LOG(LS_VERBOSE) << "Decoding sync-packet: " <<
|
LOG(LS_VERBOSE) << "Decoding sync-packet: " <<
|
||||||
|
@ -108,7 +108,7 @@ int Normal::Process(const int16_t* input,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14).
|
// If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14).
|
||||||
int16_t increment = 64 / fs_mult;
|
int increment = static_cast<int>(64 / fs_mult);
|
||||||
for (size_t i = 0; i < length_per_channel; i++) {
|
for (size_t i = 0; i < length_per_channel; i++) {
|
||||||
// Scale with mute factor.
|
// Scale with mute factor.
|
||||||
assert(channel_ix < output->Channels());
|
assert(channel_ix < output->Channels());
|
||||||
@ -174,7 +174,7 @@ int Normal::Process(const int16_t* input,
|
|||||||
// Previous was neither of Expand, FadeToBGN or RFC3389_CNG, but we are
|
// Previous was neither of Expand, FadeToBGN or RFC3389_CNG, but we are
|
||||||
// still ramping up from previous muting.
|
// still ramping up from previous muting.
|
||||||
// If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14).
|
// If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14).
|
||||||
int16_t increment = 64 / fs_mult;
|
int increment = static_cast<int>(64 / fs_mult);
|
||||||
size_t length_per_channel = length / output->Channels();
|
size_t length_per_channel = length / output->Channels();
|
||||||
for (size_t i = 0; i < length_per_channel; i++) {
|
for (size_t i = 0; i < length_per_channel; i++) {
|
||||||
for (size_t channel_ix = 0; channel_ix < output->Channels();
|
for (size_t channel_ix = 0; channel_ix < output->Channels();
|
||||||
|
@ -1605,7 +1605,7 @@ int NetEQTest_encode(int coder,
|
|||||||
int useVAD,
|
int useVAD,
|
||||||
int bitrate,
|
int bitrate,
|
||||||
int numChannels) {
|
int numChannels) {
|
||||||
short cdlen = 0;
|
int cdlen = 0;
|
||||||
int16_t* tempdata;
|
int16_t* tempdata;
|
||||||
static int first_cng = 1;
|
static int first_cng = 1;
|
||||||
int16_t tempLen;
|
int16_t tempLen;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user