git-svn-id: http://webrtc.googlecode.com/svn/trunk@166 4adac7df-926f-26a2-2b94-8c16560cd09d

This commit is contained in:
niklase@google.com 2011-07-07 08:46:41 +00:00
parent 569c805674
commit 5adc73aad3
1132 changed files with 0 additions and 408239 deletions

View File

@ -1,2 +0,0 @@
hlundin@google.com
tlegrand@google.com

View File

@ -1,215 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This is the main API for NetEQ. Helper macros are located in webrtc_neteq_help_macros.h,
* while some internal API functions are found in webrtc_neteq_internal.h.
*/
#include "typedefs.h"
#ifndef WEBRTC_NETEQ_H
#define WEBRTC_NETEQ_H
#ifdef __cplusplus
extern "C"
{
#endif
/**********************************************************
* Definitions
*/
enum WebRtcNetEQDecoder
{
kDecoderReservedStart,
kDecoderPCMu,
kDecoderPCMa,
kDecoderILBC,
kDecoderISAC,
kDecoderISACswb,
kDecoderPCM16B,
kDecoderPCM16Bwb,
kDecoderPCM16Bswb32kHz,
kDecoderPCM16Bswb48kHz,
kDecoderG722,
kDecoderRED,
kDecoderAVT,
kDecoderCNG,
kDecoderArbitrary,
kDecoderG729,
kDecoderG729_1,
kDecoderG726_16,
kDecoderG726_24,
kDecoderG726_32,
kDecoderG726_40,
kDecoderG722_1_16,
kDecoderG722_1_24,
kDecoderG722_1_32,
kDecoderG722_1C_24,
kDecoderG722_1C_32,
kDecoderG722_1C_48,
kDecoderSPEEX_8,
kDecoderSPEEX_16,
kDecoderGSMFR,
kDecoderAMR,
kDecoderAMRWB,
kDecoderReservedEnd
};
enum WebRtcNetEQNetworkType
{
kUDPNormal,
kUDPVideoSync,
kTCPNormal,
kTCPLargeJitter,
kTCPXLargeJitter
};
enum WebRtcNetEQOutputType
{
kOutputNormal,
kOutputPLC,
kOutputCNG,
kOutputPLCtoCNG,
kOutputVADPassive
};
enum WebRtcNetEQPlayoutMode
{
kPlayoutOn, kPlayoutOff, kPlayoutFax, kPlayoutStreaming
};
/* Available modes for background noise (inserted after long expands) */
enum WebRtcNetEQBGNMode
{
kBGNOn, /* default "normal" behavior with eternal noise */
kBGNFade, /* noise fades to zero after some time */
kBGNOff
/* background noise is always zero */
};
/*************************************************
* Definitions of decoder calls and the default
* API function calls for each codec
*/
typedef WebRtc_Word16 (*WebRtcNetEQ_FuncDecode)(void* state, WebRtc_Word16* encoded,
WebRtc_Word16 len, WebRtc_Word16* decoded,
WebRtc_Word16* speechType);
typedef WebRtc_Word16 (*WebRtcNetEQ_FuncDecodePLC)(void* state, WebRtc_Word16* decoded,
WebRtc_Word16 frames);
typedef WebRtc_Word16 (*WebRtcNetEQ_FuncDecodeInit)(void* state);
typedef WebRtc_Word16 (*WebRtcNetEQ_FuncAddLatePkt)(void* state, WebRtc_Word16* encoded,
WebRtc_Word16 len);
typedef WebRtc_Word16 (*WebRtcNetEQ_FuncGetMDinfo)(void* state);
typedef WebRtc_Word16 (*WebRtcNetEQ_FuncGetPitchInfo)(void* state, WebRtc_Word16* encoded,
WebRtc_Word16* length);
typedef WebRtc_Word16 (*WebRtcNetEQ_FuncUpdBWEst)(void* state, const WebRtc_UWord16 *encoded,
WebRtc_Word32 packet_size,
WebRtc_UWord16 rtp_seq_number,
WebRtc_UWord32 send_ts,
WebRtc_UWord32 arr_ts);
typedef WebRtc_Word16 (*WebRtcNetEQ_FuncGetErrorCode)(void* state);
/**********************************************************
* Structures
*/
typedef struct
{
enum WebRtcNetEQDecoder codec;
WebRtc_Word16 payloadType;
WebRtcNetEQ_FuncDecode funcDecode;
WebRtcNetEQ_FuncDecode funcDecodeRCU;
WebRtcNetEQ_FuncDecodePLC funcDecodePLC;
WebRtcNetEQ_FuncDecodeInit funcDecodeInit;
WebRtcNetEQ_FuncAddLatePkt funcAddLatePkt;
WebRtcNetEQ_FuncGetMDinfo funcGetMDinfo;
WebRtcNetEQ_FuncGetPitchInfo funcGetPitch;
WebRtcNetEQ_FuncUpdBWEst funcUpdBWEst;
WebRtcNetEQ_FuncGetErrorCode funcGetErrorCode;
void* codec_state;
WebRtc_UWord16 codec_fs;
} WebRtcNetEQ_CodecDef;
typedef struct
{
WebRtc_UWord16 fraction_lost;
WebRtc_UWord32 cum_lost;
WebRtc_UWord32 ext_max;
WebRtc_UWord32 jitter;
} WebRtcNetEQ_RTCPStat;
/**********************************************************
* NETEQ Functions
*/
/* Info functions */
#define WEBRTC_NETEQ_MAX_ERROR_NAME 40
int WebRtcNetEQ_GetVersion(WebRtc_Word8 *version);
int WebRtcNetEQ_GetErrorCode(void *inst);
int WebRtcNetEQ_GetErrorName(int errorCode, WebRtc_Word8 *errorName, int maxStrLen);
/* Instance memory assign functions */
int WebRtcNetEQ_AssignSize(int *sizeinbytes);
int WebRtcNetEQ_Assign(void **inst, void *NETEQ_inst_Addr);
int WebRtcNetEQ_GetRecommendedBufferSize(void *inst, enum WebRtcNetEQDecoder *codec,
int noOfCodecs, enum WebRtcNetEQNetworkType nwType,
int *MaxNoOfPackets, int *sizeinbytes);
int WebRtcNetEQ_AssignBuffer(void *inst, int MaxNoOfPackets, void *NETEQ_Buffer_Addr,
int sizeinbytes);
/* Init functions */
int WebRtcNetEQ_Init(void *inst, WebRtc_UWord16 fs);
int WebRtcNetEQ_SetAVTPlayout(void *inst, int PlayoutAVTon);
int WebRtcNetEQ_SetExtraDelay(void *inst, int DelayInMs);
int WebRtcNetEQ_SetPlayoutMode(void *inst, enum WebRtcNetEQPlayoutMode playoutMode);
int WebRtcNetEQ_SetBGNMode(void *inst, enum WebRtcNetEQBGNMode bgnMode);
int WebRtcNetEQ_GetBGNMode(const void *inst, enum WebRtcNetEQBGNMode *bgnMode);
/* Codec Database functions */
int WebRtcNetEQ_CodecDbReset(void *inst);
int WebRtcNetEQ_CodecDbAdd(void *inst, WebRtcNetEQ_CodecDef *codecInst);
int WebRtcNetEQ_CodecDbRemove(void *inst, enum WebRtcNetEQDecoder codec);
int WebRtcNetEQ_CodecDbGetSizeInfo(void *inst, WebRtc_Word16 *UsedEntries,
WebRtc_Word16 *MaxEntries);
int WebRtcNetEQ_CodecDbGetCodecInfo(void *inst, WebRtc_Word16 Entry,
enum WebRtcNetEQDecoder *codec);
/* Real-time functions */
int WebRtcNetEQ_RecIn(void *inst, WebRtc_Word16 *p_w16datagramstart, WebRtc_Word16 w16_RTPlen,
WebRtc_UWord32 uw32_timeRec);
int WebRtcNetEQ_RecOut(void *inst, WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len);
int WebRtcNetEQ_GetRTCPStats(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst);
int WebRtcNetEQ_GetRTCPStatsNoReset(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst);
int WebRtcNetEQ_GetSpeechTimeStamp(void *inst, WebRtc_UWord32 *timestamp);
int WebRtcNetEQ_GetSpeechOutputType(void *inst, enum WebRtcNetEQOutputType *outputType);
/* VQmon related functions */
int WebRtcNetEQ_VQmonRecOutStatistics(void *inst, WebRtc_UWord16 *validVoiceDurationMs,
WebRtc_UWord16 *concealedVoiceDurationMs,
WebRtc_UWord8 *concealedVoiceFlags);
int WebRtcNetEQ_VQmonGetConfiguration(void *inst, WebRtc_UWord16 *absMaxDelayMs,
WebRtc_UWord8 *adaptationRate);
int WebRtcNetEQ_VQmonGetRxStatistics(void *inst, WebRtc_UWord16 *avgDelayMs,
WebRtc_UWord16 *maxDelayMs);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -1,365 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains some helper macros that can be used when loading the
* NetEQ codec database.
*/
#ifndef WEBRTC_NETEQ_HELP_MACROS_H
#define WEBRTC_NETEQ_HELP_MACROS_H
#ifndef NULL
#define NULL 0
#endif
/**********************************************************
* Help macros for NetEQ initialization
*/
#define SET_CODEC_PAR(inst,decoder,pt,state,fs) \
inst.codec=decoder; \
inst.payloadType=pt; \
inst.codec_state=state; \
inst.codec_fs=fs;
#define SET_PCMU_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG711_DecodeU; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=NULL; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_PCMA_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG711_DecodeA; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=NULL; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_ILBC_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIlbcfix_Decode; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcIlbcfix_NetEqPlc; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIlbcfix_Decoderinit30Ms; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_ISAC_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsac_Decode; \
inst.funcDecodeRCU=(WebRtcNetEQ_FuncDecode)WebRtcIsac_DecodeRcu; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsac_DecoderInit; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsac_UpdateBwEstimate; \
inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsac_GetErrorCode;
#define SET_ISACfix_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsacfix_Decode; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsacfix_DecoderInit; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsacfix_UpdateBwEstimate; \
inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsacfix_GetErrorCode;
#define SET_ISACSWB_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsac_Decode; \
inst.funcDecodeRCU=(WebRtcNetEQ_FuncDecode)WebRtcIsac_DecodeRcu; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsac_DecoderInit; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsac_UpdateBwEstimate; \
inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsac_GetErrorCode;
#define SET_G729_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG729_Decode; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG729_DecodePlc; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG729_DecoderInit; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_G729_1_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7291_Decode; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7291_DecoderInit; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcG7291_DecodeBwe; \
inst.funcGetErrorCode=NULL;
#define SET_PCM16B_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=NULL; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_PCM16B_WB_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=NULL; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_PCM16B_SWB32_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=NULL; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_PCM16B_SWB48_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=NULL; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_G722_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG722_Decode; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG722_DecoderInit;\
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_G722_1_16_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221_Decode16; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221_DecodePlc16; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221_DecoderInit16; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_G722_1_24_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221_Decode24; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221_DecodePlc24; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221_DecoderInit24; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_G722_1_32_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221_Decode32; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221_DecodePlc32; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221_DecoderInit32; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_G722_1C_24_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221C_Decode24; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221C_DecodePlc24; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221C_DecoderInit24; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_G722_1C_32_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221C_Decode32; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221C_DecodePlc32; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221C_DecoderInit32; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_G722_1C_48_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221C_Decode48; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221C_DecodePlc48; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221C_DecoderInit48; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_AMR_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcAmr_Decode; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcAmr_DecodePlc; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcAmr_DecoderInit; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_AMRWB_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcAmrWb_Decode; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcAmrWb_DecodePlc; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcAmrWb_DecoderInit; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_GSMFR_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcGSMFR_Decode; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcGSMFR_DecodePlc; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcGSMFR_DecoderInit; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_G726_16_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode16; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit16; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_G726_24_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode24; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit24; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_G726_32_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode32; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit32; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_G726_40_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode40; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit40; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_SPEEX_FUNCTIONS(inst) \
inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcSpeex_Decode; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcSpeex_DecodePlc; \
inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcSpeex_DecoderInit; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_RED_FUNCTIONS(inst) \
inst.funcDecode=NULL; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=NULL; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_AVT_FUNCTIONS(inst) \
inst.funcDecode=NULL; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=NULL; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#define SET_CNG_FUNCTIONS(inst) \
inst.funcDecode=NULL; \
inst.funcDecodeRCU=NULL; \
inst.funcDecodePLC=NULL; \
inst.funcDecodeInit=NULL; \
inst.funcAddLatePkt=NULL; \
inst.funcGetMDinfo=NULL; \
inst.funcGetPitch=NULL; \
inst.funcUpdBWEst=NULL; \
inst.funcGetErrorCode=NULL;
#endif /* WEBRTC_NETEQ_HELP_MACROS_H */

View File

@ -1,274 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains the internal API functions.
*/
#include "typedefs.h"
#ifndef WEBRTC_NETEQ_INTERNAL_H
#define WEBRTC_NETEQ_INTERNAL_H
#ifdef __cplusplus
extern "C"
{
#endif
typedef struct
{
WebRtc_UWord8 payloadType;
WebRtc_UWord16 sequenceNumber;
WebRtc_UWord32 timeStamp;
WebRtc_UWord32 SSRC;
WebRtc_UWord8 markerBit;
} WebRtcNetEQ_RTPInfo;
/****************************************************************************
* WebRtcNetEQ_RecInRTPStruct(...)
*
* Alternative RecIn function, used when the RTP data has already been
* parsed into an RTP info struct (WebRtcNetEQ_RTPInfo).
*
* Input:
* - inst : NetEQ instance
* - rtpInfo : Pointer to RTP info
* - payloadPtr : Pointer to the RTP payload (first byte after header)
* - payloadLenBytes : Length (in bytes) of the payload in payloadPtr
* - timeRec : Receive time (in timestamps of the used codec)
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_RecInRTPStruct(void *inst, WebRtcNetEQ_RTPInfo *rtpInfo,
const WebRtc_UWord8 *payloadPtr, WebRtc_Word16 payloadLenBytes,
WebRtc_UWord32 timeRec);
/****************************************************************************
* WebRtcNetEQ_GetMasterSlaveInfoSize(...)
*
* Get size in bytes for master/slave struct msInfo used in
* WebRtcNetEQ_RecOutMasterSlave.
*
* Return value : Struct size in bytes
*
*/
int WebRtcNetEQ_GetMasterSlaveInfoSize();
/****************************************************************************
* WebRtcNetEQ_RecOutMasterSlave(...)
*
* RecOut function for running several NetEQ instances in master/slave mode.
* One master can be used to control several slaves.
* The MasterSlaveInfo struct must be allocated outside NetEQ.
* Use function WebRtcNetEQ_GetMasterSlaveInfoSize to get the size needed.
*
* Input:
* - inst : NetEQ instance
* - isMaster : Non-zero indicates that this is the master channel
* - msInfo : (slave only) Information from master
*
* Output:
* - inst : Updated NetEQ instance
* - pw16_outData : Pointer to vector where output should be written
* - pw16_len : Pointer to variable where output length is returned
* - msInfo : (master only) Information to slave(s)
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_RecOutMasterSlave(void *inst, WebRtc_Word16 *pw16_outData,
WebRtc_Word16 *pw16_len, void *msInfo,
WebRtc_Word16 isMaster);
typedef struct
{
WebRtc_UWord16 currentBufferSize; /* current jitter buffer size in ms */
WebRtc_UWord16 preferredBufferSize; /* preferred (optimal) buffer size in ms */
WebRtc_UWord16 currentPacketLossRate; /* loss rate (network + late) (in Q14) */
WebRtc_UWord16 currentDiscardRate; /* late loss rate (in Q14) */
WebRtc_UWord16 currentExpandRate; /* fraction (of original stream) of synthesized speech
* inserted through expansion (in Q14) */
WebRtc_UWord16 currentPreemptiveRate; /* fraction of synthesized speech inserted through
* pre-emptive expansion (in Q14) */
WebRtc_UWord16 currentAccelerateRate; /* fraction of data removed through acceleration
* (in Q14) */
} WebRtcNetEQ_NetworkStatistics;
typedef struct
{
WebRtc_UWord32 jbMinSize; /* smallest Jitter Buffer size during call in ms */
WebRtc_UWord32 jbMaxSize; /* largest Jitter Buffer size during call in ms */
WebRtc_UWord32 jbAvgSize; /* the average JB size, measured over time - ms */
WebRtc_UWord32 jbChangeCount; /* number of times the Jitter Buffer changed
* (using Accelerate or Pre-emptive Expand) */
WebRtc_UWord32 lateLossMs; /* amount (in ms) of audio data received late */
WebRtc_UWord32 accelerateMs; /* milliseconds removed to reduce jitter buffer size */
WebRtc_UWord32 flushedMs; /* milliseconds discarded through buffer flushing */
WebRtc_UWord32 generatedSilentMs; /* milliseconds of generated silence */
WebRtc_UWord32 interpolatedVoiceMs; /* milliseconds of synthetic audio data
* (non-background noise) */
WebRtc_UWord32 interpolatedSilentMs; /* milliseconds of synthetic audio data
* (background noise level) */
WebRtc_UWord32 countExpandMoreThan120ms; /* count of tiny expansions in output audio */
WebRtc_UWord32 countExpandMoreThan250ms; /* count of small expansions in output audio */
WebRtc_UWord32 countExpandMoreThan500ms; /* count of medium expansions in output audio */
WebRtc_UWord32 countExpandMoreThan2000ms; /* count of long expansions in output audio */
WebRtc_UWord32 longestExpandDurationMs; /* duration of longest audio drop-out */
WebRtc_UWord32 countIAT500ms; /* count of times we got small network outage (inter-arrival
* time in [500, 1000) ms) */
WebRtc_UWord32 countIAT1000ms; /* count of times we got medium network outage
* (inter-arrival time in [1000, 2000) ms) */
WebRtc_UWord32 countIAT2000ms; /* count of times we got large network outage
* (inter-arrival time >= 2000 ms) */
WebRtc_UWord32 longestIATms; /* longest packet inter-arrival time in ms */
WebRtc_UWord32 minPacketDelayMs; /* min time incoming Packet "waited" to be played */
WebRtc_UWord32 maxPacketDelayMs; /* max time incoming Packet "waited" to be played */
WebRtc_UWord32 avgPacketDelayMs; /* avg time incoming Packet "waited" to be played */
} WebRtcNetEQ_JitterStatistics;
/*
* Get the "in-call" statistics from NetEQ.
* The statistics are reset after the query.
*/
int WebRtcNetEQ_GetNetworkStatistics(void *inst, WebRtcNetEQ_NetworkStatistics *stats);
/*
* Get the optimal buffer size calculated for the current network conditions.
*/
int WebRtcNetEQ_GetPreferredBufferSize(void *inst, WebRtc_UWord16 *preferredBufferSize);
/*
* Get the current buffer size in ms. Return value is 0 if ok, -1 if error.
*/
int WebRtcNetEQ_GetCurrentDelay(const void *inst, WebRtc_UWord16 *currentDelayMs);
/*
* Get the "post-call" jitter statistics from NetEQ.
* The statistics are not reset by the query. Use the function
* WebRtcNetEQ_ResetJitterStatistics to reset the statistics.
*/
int WebRtcNetEQ_GetJitterStatistics(void *inst, WebRtcNetEQ_JitterStatistics *jitterStats);
/*
* Reset "post-call" jitter statistics.
*/
int WebRtcNetEQ_ResetJitterStatistics(void *inst);
/***********************************************/
/* Functions for post-decode VAD functionality */
/***********************************************/
/* NetEQ must be compiled with the flag NETEQ_VAD enabled for these functions to work. */
/*
* VAD function pointer types
*
* These function pointers match the definitions of webrtc VAD functions WebRtcVad_Init,
* WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in webrtc_vad.h.
*/
typedef WebRtc_Word16 (*WebRtcNetEQ_VADInitFunction)(void *VAD_inst);
typedef WebRtc_Word16 (*WebRtcNetEQ_VADSetmodeFunction)(void *VAD_inst, WebRtc_Word16 mode);
typedef WebRtc_Word16 (*WebRtcNetEQ_VADFunction)(void *VAD_inst, WebRtc_Word16 fs,
WebRtc_Word16 *frame, WebRtc_Word16 frameLen);
/****************************************************************************
* WebRtcNetEQ_SetVADInstance(...)
*
* Provide a pointer to an allocated VAD instance. If function is never
* called or it is called with NULL pointer as VAD_inst, the post-decode
* VAD functionality is disabled. Also provide pointers to init, setmode
* and VAD functions. These are typically pointers to WebRtcVad_Init,
* WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in the
* interface file webrtc_vad.h.
*
* Input:
* - NetEQ_inst : NetEQ instance
* - VADinst : VAD instance
* - initFunction : Pointer to VAD init function
* - setmodeFunction : Pointer to VAD setmode function
* - VADfunction : Pointer to VAD function
*
* Output:
* - NetEQ_inst : Updated NetEQ instance
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_SetVADInstance(void *NetEQ_inst, void *VAD_inst,
WebRtcNetEQ_VADInitFunction initFunction,
WebRtcNetEQ_VADSetmodeFunction setmodeFunction,
WebRtcNetEQ_VADFunction VADFunction);
/****************************************************************************
* WebRtcNetEQ_SetVADMode(...)
*
* Pass an aggressiveness mode parameter to the post-decode VAD instance.
* If this function is never called, mode 0 (quality mode) is used as default.
*
* Input:
* - inst : NetEQ instance
* - mode : mode parameter (same range as WebRtc VAD mode)
*
* Output:
* - inst : Updated NetEQ instance
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_SetVADMode(void *NetEQ_inst, WebRtc_Word16 mode);
/****************************************************************************
* WebRtcNetEQ_RecOutNoDecode(...)
*
* Special RecOut that does not do any decoding.
*
* Input:
* - inst : NetEQ instance
*
* Output:
* - inst : Updated NetEQ instance
* - pw16_outData : Pointer to vector where output should be written
* - pw16_len : Pointer to variable where output length is returned
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_RecOutNoDecode(void *inst, WebRtc_Word16 *pw16_outData,
WebRtc_Word16 *pw16_len);
/****************************************************************************
* WebRtcNetEQ_FlushBuffers(...)
*
* Flush packet and speech buffers. Does not reset codec database or
* jitter statistics.
*
* Input:
* - inst : NetEQ instance
*
* Output:
* - inst : Updated NetEQ instance
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_FlushBuffers(void *inst);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -1,87 +0,0 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_ARM_MODE := arm
LOCAL_MODULE_CLASS := STATIC_LIBRARIES
LOCAL_MODULE := libwebrtc_neteq
LOCAL_MODULE_TAGS := optional
LOCAL_GENERATED_SOURCES :=
LOCAL_SRC_FILES := accelerate.c \
automode.c \
bgn_update.c \
bufstats_decision.c \
cng_internal.c \
codec_db.c \
correlator.c \
dsp.c \
dsp_helpfunctions.c \
dtmf_buffer.c \
dtmf_tonegen.c \
expand.c \
mcu_address_init.c \
mcu_dsp_common.c \
mcu_reset.c \
merge.c \
min_distortion.c \
mix_voice_unvoice.c \
mute_signal.c \
normal.c \
packet_buffer.c \
peak_detection.c \
preemptive_expand.c \
random_vector.c \
recin.c \
recout.c \
rtcp.c \
rtp.c \
set_fs.c \
signal_mcu.c \
split_and_insert.c \
unmute_signal.c \
webrtc_neteq.c
# Flags passed to both C and C++ files.
MY_CFLAGS :=
MY_CFLAGS_C :=
MY_DEFS := '-DNO_TCMALLOC' \
'-DNO_HEAPCHECKER' \
'-DWEBRTC_TARGET_PC' \
'-DWEBRTC_LINUX' \
'-DWEBRTC_THREAD_RR' \
'-DNETEQ_VOICEENGINE_CODECS' \
'-DWEBRTC_ANDROID' \
'-DANDROID'
LOCAL_CFLAGS := $(MY_CFLAGS_C) $(MY_CFLAGS) $(MY_DEFS)
# Include paths placed before CFLAGS/CPPFLAGS
LOCAL_C_INCLUDES := $(LOCAL_PATH)/../../../../.. \
$(LOCAL_PATH)/../interface \
$(LOCAL_PATH)/../../../codecs/CNG/main/interface \
$(LOCAL_PATH)/../../../../../common_audio/signal_processing_library/main/interface
# Flags passed to only C++ (and not C) files.
LOCAL_CPPFLAGS :=
LOCAL_LDFLAGS :=
LOCAL_STATIC_LIBRARIES :=
LOCAL_SHARED_LIBRARIES := libcutils \
libdl \
libstlport
LOCAL_ADDITIONAL_DEPENDENCIES :=
ifneq ($(MY_WEBRTC_NDK_BUILD),true)
#include external/stlport/libstlport.mk
include $(BUILD_STATIC_LIBRARY)
endif

View File

@ -1,489 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains the Accelerate algorithm that is used to reduce
* the delay by removing a part of the audio stream.
*/
#include "dsp.h"
#include "signal_processing_library.h"
#include "dsp_helpfunctions.h"
#include "neteq_error_codes.h"
#define ACCELERATE_CORR_LEN 50
#define ACCELERATE_MIN_LAG 10
#define ACCELERATE_MAX_LAG 60
#define ACCELERATE_DOWNSAMPLED_LEN (ACCELERATE_CORR_LEN + ACCELERATE_MAX_LAG)
/* Scratch usage:
Type Name size startpos endpos
WebRtc_Word16 pw16_downSampSpeech 110 0 109
WebRtc_Word32 pw32_corr 2*50 110 209
WebRtc_Word16 pw16_corr 50 0 49
Total: 110+2*50
*/
#define SCRATCH_PW16_DS_SPEECH 0
#define SCRATCH_PW32_CORR ACCELERATE_DOWNSAMPLED_LEN
#define SCRATCH_PW16_CORR 0
/****************************************************************************
* WebRtcNetEQ_Accelerate(...)
*
* This function tries to shorten the audio data by removing one or several
* pitch periods. The operation is only carried out if the correlation is
* strong or if the signal energy is very low.
*
* Input:
* - inst : NetEQ DSP instance
* - scratchPtr : Pointer to scratch vector.
* - decoded : Pointer to newly decoded speech.
* - len : Length of decoded speech.
* - BGNonly : If non-zero, Accelerate will only remove the last
* DEFAULT_TIME_ADJUST seconds of the input.
* No signal matching is done.
*
* Output:
* - inst : Updated instance
* - outData : Pointer to a memory space where the output data
* should be stored
* - pw16_len : Number of samples written to outData.
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_Accelerate(DSPInst_t *inst,
#ifdef SCRATCH
WebRtc_Word16 *pw16_scratchPtr,
#endif
const WebRtc_Word16 *pw16_decoded, int len,
WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
WebRtc_Word16 BGNonly)
{
#ifdef SCRATCH
/* Use scratch memory for internal temporary vectors */
WebRtc_Word16 *pw16_downSampSpeech = pw16_scratchPtr + SCRATCH_PW16_DS_SPEECH;
WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_PW32_CORR);
WebRtc_Word16 *pw16_corr = pw16_scratchPtr + SCRATCH_PW16_CORR;
#else
/* Allocate memory for temporary vectors */
WebRtc_Word16 pw16_downSampSpeech[ACCELERATE_DOWNSAMPLED_LEN];
WebRtc_Word32 pw32_corr[ACCELERATE_CORR_LEN];
WebRtc_Word16 pw16_corr[ACCELERATE_CORR_LEN];
#endif
WebRtc_Word16 w16_decodedMax = 0;
WebRtc_Word16 w16_tmp;
WebRtc_Word16 w16_tmp2;
WebRtc_Word32 w32_tmp;
WebRtc_Word32 w32_tmp2;
const WebRtc_Word16 w16_startLag = ACCELERATE_MIN_LAG;
const WebRtc_Word16 w16_endLag = ACCELERATE_MAX_LAG;
const WebRtc_Word16 w16_corrLen = ACCELERATE_CORR_LEN;
const WebRtc_Word16 *pw16_vec1, *pw16_vec2;
WebRtc_Word16 *pw16_vectmp;
WebRtc_Word16 w16_inc, w16_startfact;
WebRtc_Word16 w16_bestIndex, w16_bestVal;
WebRtc_Word16 w16_VAD = 1;
WebRtc_Word16 fsMult;
WebRtc_Word16 fsMult120;
WebRtc_Word32 w32_en1, w32_en2, w32_cc;
WebRtc_Word16 w16_en1, w16_en2;
WebRtc_Word16 w16_en1Scale, w16_en2Scale;
WebRtc_Word16 w16_sqrtEn1En2;
WebRtc_Word16 w16_bestCorr = 0;
int ok;
#ifdef NETEQ_STEREO
MasterSlaveInfo *msInfo = inst->msInfo;
#endif
fsMult = WebRtcNetEQ_CalcFsMult(inst->fs); /* Calculate fs/8000 */
/* Pre-calculate common multiplication with fsMult */
fsMult120 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fsMult, 120); /* 15 ms */
inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
/* Sanity check for len variable; must be (almost) 30 ms
(120*fsMult + max(bestIndex)) */
if (len < (WebRtc_Word16) WEBRTC_SPL_MUL_16_16((120 + 119), fsMult))
{
/* Length of decoded data too short */
inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
*pw16_len = len;
/* simply move all data from decoded to outData */
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
return NETEQ_OTHER_ERROR;
}
/***********************************/
/* Special operations for BGN only */
/***********************************/
/* Check if "background noise only" flag is set */
if (BGNonly)
{
/* special operation for BGN only; simply remove a chunk of data */
w16_bestIndex = DEFAULT_TIME_ADJUST * WEBRTC_SPL_LSHIFT_W16(fsMult, 3); /* X*fs/1000 */
/* Sanity check for bestIndex */
if (w16_bestIndex > len)
{ /* not good, do nothing instead */
inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
*pw16_len = len;
/* simply move all data from decoded to outData */
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
return NETEQ_OTHER_ERROR;
}
/* set length parameter */
*pw16_len = len - w16_bestIndex; /* we remove bestIndex samples */
/* copy to output */
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, *pw16_len);
/* set mode */
inst->w16_mode = MODE_LOWEN_ACCELERATE;
/* update statistics */
inst->statInst.accelerateLength += w16_bestIndex;
return 0;
} /* end of special code for BGN mode */
#ifdef NETEQ_STEREO
/* Sanity for msInfo */
if (msInfo == NULL)
{
/* this should not happen here */
return MASTER_SLAVE_ERROR;
}
if (msInfo->msMode != NETEQ_SLAVE)
{
/* Find correlation lag only for non-slave instances */
#endif
/****************************************************************/
/* Find the strongest correlation lag by downsampling to 4 kHz, */
/* calculating correlation for downsampled signal and finding */
/* the strongest correlation peak. */
/****************************************************************/
/* find maximum absolute value */
w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
/* downsample the decoded speech to 4 kHz */
ok = WebRtcNetEQ_DownSampleTo4kHz(pw16_decoded, len, inst->fs, pw16_downSampSpeech,
ACCELERATE_DOWNSAMPLED_LEN, 1 /* compensate delay*/);
if (ok != 0)
{
/* error */
inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
*pw16_len = len;
/* simply move all data from decoded to outData */
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
return NETEQ_OTHER_ERROR;
}
/*
* Set scaling factor for cross correlation to protect against overflow
* (log2(50) => 6)
*/
w16_tmp = 6 - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
/* Perform correlation from lag 10 to lag 60 in 4 kHz domain */
WebRtcNetEQ_CrossCorr(
pw32_corr, &pw16_downSampSpeech[w16_endLag],
&pw16_downSampSpeech[w16_endLag - w16_startLag], w16_corrLen,
(WebRtc_Word16) (w16_endLag - w16_startLag), w16_tmp, -1);
/* Normalize correlation to 14 bits and put in a WebRtc_Word16 vector */
w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_corrLen);
w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
WebRtcSpl_VectorBitShiftW32ToW16(pw16_corr, w16_corrLen, pw32_corr, w16_tmp);
#ifdef NETEQ_STEREO
} /* end if (msInfo->msMode != NETEQ_SLAVE) */
if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
{
/* Find the strongest correlation peak by using the parabolic fit method */
WebRtcNetEQ_PeakDetection(pw16_corr, (WebRtc_Word16) w16_corrLen, 1, fsMult,
&w16_bestIndex, &w16_bestVal);
/* 0 <= bestIndex <= (2*corrLen - 1)*fsMult = 99*fsMult */
/* Compensate bestIndex for displaced starting position */
w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
/* 20*fsMult <= bestIndex <= 119*fsMult */
msInfo->bestIndex = w16_bestIndex;
}
else if (msInfo->msMode == NETEQ_SLAVE)
{
if (msInfo->extraInfo == ACC_FAIL)
{
/* Master has signaled an unsuccessful accelerate */
w16_bestIndex = 0;
}
else
{
/* Get best index from master */
w16_bestIndex = msInfo->bestIndex;
}
}
else
{
/* Invalid mode */
return MASTER_SLAVE_ERROR;
}
#else /* NETEQ_STEREO */
/* Find the strongest correlation peak by using the parabolic fit method */
WebRtcNetEQ_PeakDetection(pw16_corr, (WebRtc_Word16) w16_corrLen, 1, fsMult,
&w16_bestIndex, &w16_bestVal);
/* 0 <= bestIndex <= (2*corrLen - 1)*fsMult = 99*fsMult */
/* Compensate bestIndex for displaced starting position */
w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
/* 20*fsMult <= bestIndex <= 119*fsMult */
#endif /* NETEQ_STEREO */
#ifdef NETEQ_STEREO
if (msInfo->msMode != NETEQ_SLAVE)
{
/* Calculate correlation only for non-slave instances */
#endif /* NETEQ_STEREO */
/*****************************************************/
/* Calculate correlation bestCorr for the found lag. */
/* Also do a simple VAD decision. */
/*****************************************************/
/*
* Calculate scaling to ensure that bestIndex samples can be square-summed
* without overflowing
*/
w16_tmp = (31
- WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax)));
w16_tmp += (31 - WebRtcSpl_NormW32(w16_bestIndex));
w16_tmp -= 31;
w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
/* vec1 starts at 15 ms minus one pitch period */
pw16_vec1 = &pw16_decoded[fsMult120 - w16_bestIndex];
/* vec2 start at 15 ms */
pw16_vec2 = &pw16_decoded[fsMult120];
/* Calculate energies for vec1 and vec2 */
w32_en1 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1,
(WebRtc_Word16*) pw16_vec1, w16_bestIndex, w16_tmp);
w32_en2 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec2,
(WebRtc_Word16*) pw16_vec2, w16_bestIndex, w16_tmp);
/* Calculate cross-correlation at the found lag */
w32_cc = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1, (WebRtc_Word16*) pw16_vec2,
w16_bestIndex, w16_tmp);
/* Check VAD constraint
((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_en1 + w32_en2, 4); /* (en1+en2)/(2*8) */
if (inst->BGNInst.w16_initialized == 1)
{
w32_tmp2 = inst->BGNInst.w32_energy;
}
else
{
/* if BGN parameters have not been estimated, use a fixed threshold */
w32_tmp2 = 75000;
}
w16_tmp2 = 16 - WebRtcSpl_NormW32(w32_tmp2);
w16_tmp2 = WEBRTC_SPL_MAX(0, w16_tmp2);
w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_tmp, w16_tmp2);
w16_tmp2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp2, w16_tmp2);
w32_tmp2 = WEBRTC_SPL_MUL_16_16(w16_bestIndex, w16_tmp2);
/* Scale w32_tmp properly before comparing with w32_tmp2 */
/* (w16_tmp is scaling before energy calculation, thus 2*w16_tmp) */
if (WebRtcSpl_NormW32(w32_tmp) < WEBRTC_SPL_LSHIFT_W32(w16_tmp,1))
{
/* Cannot scale only w32_tmp, must scale w32_temp2 too */
WebRtc_Word16 tempshift = WebRtcSpl_NormW32(w32_tmp);
w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, tempshift);
w32_tmp2 = WEBRTC_SPL_RSHIFT_W32(w32_tmp2,
WEBRTC_SPL_LSHIFT_W32(w16_tmp,1) - tempshift);
}
else
{
w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp,
WEBRTC_SPL_LSHIFT_W32(w16_tmp,1));
}
if (w32_tmp <= w32_tmp2) /*((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
{
/* The signal seems to be passive speech */
w16_VAD = 0;
w16_bestCorr = 0; /* Correlation does not matter */
}
else
{
/* The signal is active speech */
w16_VAD = 1;
/* Calculate correlation (cc/sqrt(en1*en2)) */
/* Start with calculating scale values */
w16_en1Scale = 16 - WebRtcSpl_NormW32(w32_en1);
w16_en1Scale = WEBRTC_SPL_MAX(0, w16_en1Scale);
w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
/* Make sure total scaling is even (to simplify scale factor after sqrt) */
if ((w16_en1Scale + w16_en2Scale) & 1)
{
w16_en1Scale += 1;
}
/* Convert energies to WebRtc_Word16 */
w16_en1 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
w16_en2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
/* Calculate energy product */
w32_tmp = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
/* Calculate square-root of energy product */
w16_sqrtEn1En2 = (WebRtc_Word16) WebRtcSpl_Sqrt(w32_tmp);
/* Calculate cc/sqrt(en1*en2) in Q14 */
w16_tmp = 14 - WEBRTC_SPL_RSHIFT_W16(w16_en1Scale+w16_en2Scale, 1);
w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_tmp);
w32_cc = WEBRTC_SPL_MAX(0, w32_cc); /* Don't divide with negative number */
w16_bestCorr = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_cc, w16_sqrtEn1En2);
w16_bestCorr = WEBRTC_SPL_MIN(16384, w16_bestCorr); /* set maximum to 1.0 */
}
#ifdef NETEQ_STEREO
} /* end if (msInfo->msMode != NETEQ_SLAVE) */
#endif /* NETEQ_STEREO */
/************************************************/
/* Check accelerate criteria and remove samples */
/************************************************/
/* Check for strong correlation (>0.9) or passive speech */
#ifdef NETEQ_STEREO
if ((((w16_bestCorr > 14746) || (w16_VAD == 0)) && (msInfo->msMode != NETEQ_SLAVE))
|| ((msInfo->msMode == NETEQ_SLAVE) && (msInfo->extraInfo != ACC_FAIL)))
#else
if ((w16_bestCorr > 14746) || (w16_VAD == 0))
#endif
{
/* Do accelerate operation by overlap add */
/*
* Calculate cross-fading slope so that the fading factor goes from
* 1 (16384 in Q14) to 0 in one pitch period (bestIndex).
*/
w16_inc = (WebRtc_Word16) WebRtcSpl_DivW32W16((WebRtc_Word32) 16384,
(WebRtc_Word16) (w16_bestIndex + 1)); /* in Q14 */
/* Initiate fading factor */
w16_startfact = 16384 - w16_inc;
/* vec1 starts at 15 ms minus one pitch period */
pw16_vec1 = &pw16_decoded[fsMult120 - w16_bestIndex];
/* vec2 start at 15 ms */
pw16_vec2 = &pw16_decoded[fsMult120];
/* Copy unmodified part [0 to 15 ms minus 1 pitch period] */
w16_tmp = (fsMult120 - w16_bestIndex);
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, w16_tmp);
/* Generate interpolated part of length bestIndex (1 pitch period) */
pw16_vectmp = pw16_outData + w16_tmp; /* start of interpolation output */
/* Reuse mixing function from Expand */
WebRtcNetEQ_MixVoiceUnvoice(pw16_vectmp, (WebRtc_Word16*) pw16_vec1,
(WebRtc_Word16*) pw16_vec2, &w16_startfact, w16_inc, w16_bestIndex);
/* Move the last part (also unmodified) */
/* Take from decoded at 15 ms + 1 pitch period */
pw16_vec2 = &pw16_decoded[fsMult120 + w16_bestIndex];
WEBRTC_SPL_MEMMOVE_W16(&pw16_outData[fsMult120], pw16_vec2,
(WebRtc_Word16) (len - fsMult120 - w16_bestIndex));
/* Set the mode flag */
if (w16_VAD)
{
inst->w16_mode = MODE_SUCCESS_ACCELERATE;
}
else
{
inst->w16_mode = MODE_LOWEN_ACCELERATE;
}
/* Calculate resulting length = original length - pitch period */
*pw16_len = len - w16_bestIndex;
/* Update in-call statistics */
inst->statInst.accelerateLength += w16_bestIndex;
return 0;
}
else
{
/* Accelerate not allowed */
#ifdef NETEQ_STEREO
/* Signal to slave(s) that this was unsuccessful */
if (msInfo->msMode == NETEQ_MASTER)
{
msInfo->extraInfo = ACC_FAIL;
}
#endif
/* Set mode flag to unsuccessful accelerate */
inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
/* Length is unmodified */
*pw16_len = len;
/* Simply move all data from decoded to outData */
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
return 0;
}
}
#undef SCRATCH_PW16_DS_SPEECH
#undef SCRATCH_PW32_CORR
#undef SCRATCH_PW16_CORR

View File

@ -1,717 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains the implementation of automatic buffer level optimization.
*/
#include "automode.h"
#include "signal_processing_library.h"
#include "neteq_defines.h"
#ifdef NETEQ_DELAY_LOGGING
/* special code for offline delay logging */
#include <stdio.h>
#include "delay_logging.h"
extern FILE *delay_fid2; /* file pointer to delay log file */
#endif /* NETEQ_DELAY_LOGGING */
int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen,
WebRtc_UWord16 seqNumber, WebRtc_UWord32 timeStamp,
WebRtc_Word32 fsHz, int mdCodec, int streamingMode)
{
WebRtc_UWord32 timeIat; /* inter-arrival time */
int i;
WebRtc_Word32 tempsum = 0; /* temp summation */
WebRtc_Word32 tempvar; /* temporary variable */
int retval = 0; /* return value */
WebRtc_Word16 packetLenSamp; /* packet speech length in samples */
/****************/
/* Sanity check */
/****************/
if (maxBufLen <= 1 || fsHz <= 0)
{
/* maxBufLen must be at least 2 and fsHz must both be strictly positive */
return -1;
}
/****************************/
/* Update packet statistics */
/****************************/
/* Try calculating packet length from current and previous timestamps */
if ((timeStamp <= inst->lastTimeStamp) || (seqNumber <= inst->lastSeqNo))
{
/* Wrong timestamp or sequence order; revert to backup plan */
packetLenSamp = inst->packetSpeechLenSamp; /* use stored value */
}
else if (timeStamp > inst->lastTimeStamp)
{
/* calculate timestamps per packet */
packetLenSamp = (WebRtc_Word16) WebRtcSpl_DivU32U16(timeStamp - inst->lastTimeStamp,
seqNumber - inst->lastSeqNo);
}
/* Check that the packet size is positive; if not, the statistics cannot be updated. */
if (packetLenSamp > 0)
{ /* packet size ok */
/* calculate inter-arrival time in integer packets (rounding down) */
timeIat = WebRtcSpl_DivW32W16(inst->packetIatCountSamp, packetLenSamp);
/* Special operations for streaming mode */
if (streamingMode != 0)
{
/*
* Calculate IAT in Q8, including fractions of a packet (i.e., more accurate
* than timeIat).
*/
WebRtc_Word16 timeIatQ8 = (WebRtc_Word16) WebRtcSpl_DivW32W16(
WEBRTC_SPL_LSHIFT_W32(inst->packetIatCountSamp, 8), packetLenSamp);
/*
* Calculate cumulative sum iat with sequence number compensation (ideal arrival
* times makes this sum zero).
*/
inst->cSumIatQ8 += (timeIatQ8
- WEBRTC_SPL_LSHIFT_W32(seqNumber - inst->lastSeqNo, 8));
/* subtract drift term */
inst->cSumIatQ8 -= CSUM_IAT_DRIFT;
/* ensure not negative */
inst->cSumIatQ8 = WEBRTC_SPL_MAX(inst->cSumIatQ8, 0);
/* remember max */
if (inst->cSumIatQ8 > inst->maxCSumIatQ8)
{
inst->maxCSumIatQ8 = inst->cSumIatQ8;
inst->maxCSumUpdateTimer = 0;
}
/* too long since the last maximum was observed; decrease max value */
if (inst->maxCSumUpdateTimer > (WebRtc_UWord32) WEBRTC_SPL_MUL_32_16(fsHz,
MAX_STREAMING_PEAK_PERIOD))
{
inst->maxCSumIatQ8 -= 4; /* remove 1000*4/256 = 15.6 ms/s */
}
} /* end of streaming mode */
/* check for discontinuous packet sequence and re-ordering */
if (seqNumber > inst->lastSeqNo + 1)
{
/* Compensate for gap in the sequence numbers.
* Reduce IAT with expected extra time due to lost packets, but ensure that
* the IAT is not negative.
*/
timeIat -= WEBRTC_SPL_MIN(timeIat,
(WebRtc_UWord32) (seqNumber - inst->lastSeqNo - 1));
}
else if (seqNumber < inst->lastSeqNo)
{
/* compensate for re-ordering */
timeIat += (WebRtc_UWord32) (inst->lastSeqNo + 1 - seqNumber);
}
/* saturate IAT at maximum value */
timeIat = WEBRTC_SPL_MIN( timeIat, MAX_IAT );
/* update iatProb = forgetting_factor * iatProb for all elements */
for (i = 0; i <= MAX_IAT; i++)
{
WebRtc_Word32 tempHi, tempLo; /* Temporary variables */
/*
* Multiply iatProbFact (Q15) with iatProb (Q30) and right-shift 15 steps
* to come back to Q30. The operation is done in two steps:
*/
/*
* 1) Multiply the high 16 bits (15 bits + sign) of iatProb. Shift iatProb
* 16 steps right to get the high 16 bits in a WebRtc_Word16 prior to
* multiplication, and left-shift with 1 afterwards to come back to
* Q30 = (Q15 * (Q30>>16)) << 1.
*/
tempHi = WEBRTC_SPL_MUL_16_16(inst->iatProbFact,
(WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(inst->iatProb[i], 16));
tempHi = WEBRTC_SPL_LSHIFT_W32(tempHi, 1); /* left-shift 1 step */
/*
* 2) Isolate and multiply the low 16 bits of iatProb. Right-shift 15 steps
* afterwards to come back to Q30 = (Q15 * Q30) >> 15.
*/
tempLo = inst->iatProb[i] & 0x0000FFFF; /* sift out the 16 low bits */
tempLo = WEBRTC_SPL_MUL_16_U16(inst->iatProbFact,
(WebRtc_UWord16) tempLo);
tempLo = WEBRTC_SPL_RSHIFT_W32(tempLo, 15);
/* Finally, add the high and low parts */
inst->iatProb[i] = tempHi + tempLo;
/* Sum all vector elements while we are at it... */
tempsum += inst->iatProb[i];
}
/*
* Increase the probability for the currently observed inter-arrival time
* with 1 - iatProbFact. The factor is in Q15, iatProb in Q30;
* hence, left-shift 15 steps to obtain result in Q30.
*/
inst->iatProb[timeIat] += (32768 - inst->iatProbFact) << 15;
tempsum += (32768 - inst->iatProbFact) << 15; /* add to vector sum */
/*
* Update iatProbFact (changes only during the first seconds after reset)
* The factor converges to IAT_PROB_FACT.
*/
inst->iatProbFact += (IAT_PROB_FACT - inst->iatProbFact + 3) >> 2;
/* iatProb should sum up to 1 (in Q30). */
tempsum -= 1 << 30; /* should be zero */
/* Check if it does, correct if it doesn't. */
if (tempsum > 0)
{
/* tempsum too large => decrease a few values in the beginning */
i = 0;
while (i <= MAX_IAT && tempsum > 0)
{
/* Remove iatProb[i] / 16 from iatProb, but not more than tempsum */
tempvar = WEBRTC_SPL_MIN(tempsum, inst->iatProb[i] >> 4);
inst->iatProb[i++] -= tempvar;
tempsum -= tempvar;
}
}
else if (tempsum < 0)
{
/* tempsum too small => increase a few values in the beginning */
i = 0;
while (i <= MAX_IAT && tempsum < 0)
{
/* Add iatProb[i] / 16 to iatProb, but not more than tempsum */
tempvar = WEBRTC_SPL_MIN(-tempsum, inst->iatProb[i] >> 4);
inst->iatProb[i++] += tempvar;
tempsum += tempvar;
}
}
/* Calculate optimal buffer level based on updated statistics */
tempvar = (WebRtc_Word32) WebRtcNetEQ_CalcOptimalBufLvl(inst, fsHz, mdCodec, timeIat,
streamingMode);
if (tempvar > 0)
{
inst->optBufLevel = (WebRtc_UWord16) tempvar;
if (streamingMode != 0)
{
inst->optBufLevel = WEBRTC_SPL_MAX(inst->optBufLevel,
inst->maxCSumIatQ8);
}
/*********/
/* Limit */
/*********/
/* Subtract extra delay from maxBufLen */
if (inst->extraDelayMs > 0 && inst->packetSpeechLenSamp > 0)
{
maxBufLen -= inst->extraDelayMs / inst->packetSpeechLenSamp * fsHz / 1000;
maxBufLen = WEBRTC_SPL_MAX(maxBufLen, 1); // sanity: at least one packet
}
maxBufLen = WEBRTC_SPL_LSHIFT_W32(maxBufLen, 8); /* shift to Q8 */
/* Enforce upper limit; 75% of maxBufLen */
inst->optBufLevel = (WebRtc_UWord16) WEBRTC_SPL_MIN( inst->optBufLevel,
(maxBufLen >> 1) + (maxBufLen >> 2) ); /* 1/2 + 1/4 = 75% */
}
else
{
retval = (int) tempvar;
}
} /* end if */
/*******************************/
/* Update post-call statistics */
/*******************************/
/* Calculate inter-arrival time in ms = packetIatCountSamp / (fsHz / 1000) */
timeIat = WEBRTC_SPL_UDIV(
WEBRTC_SPL_UMUL_32_16(inst->packetIatCountSamp, (WebRtc_Word16) 1000),
(WebRtc_UWord32) fsHz);
/* Increase counter corresponding to current inter-arrival time */
if (timeIat > 2000)
{
inst->countIAT2000ms++;
}
else if (timeIat > 1000)
{
inst->countIAT1000ms++;
}
else if (timeIat > 500)
{
inst->countIAT500ms++;
}
if (timeIat > inst->longestIATms)
{
/* update maximum value */
inst->longestIATms = timeIat;
}
/***********************************/
/* Prepare for next packet arrival */
/***********************************/
inst->packetIatCountSamp = 0; /* reset inter-arrival time counter */
inst->lastSeqNo = seqNumber; /* remember current sequence number */
inst->lastTimeStamp = timeStamp; /* remember current timestamp */
return retval;
}
WebRtc_Word16 WebRtcNetEQ_CalcOptimalBufLvl(AutomodeInst_t *inst, WebRtc_Word32 fsHz,
int mdCodec, WebRtc_UWord32 timeIatPkts,
int streamingMode)
{
WebRtc_Word32 sum1 = 1 << 30; /* assign to 1 in Q30 */
WebRtc_Word16 B;
WebRtc_UWord16 Bopt;
int i;
WebRtc_Word32 betaInv; /* optimization parameter */
#ifdef NETEQ_DELAY_LOGGING
/* special code for offline delay logging */
int temp_var;
#endif
/****************/
/* Sanity check */
/****************/
if (fsHz <= 0)
{
/* fsHz must be strictly positive */
return -1;
}
/***********************************************/
/* Get betaInv parameter based on playout mode */
/***********************************************/
if (streamingMode)
{
/* streaming (listen-only) mode */
betaInv = AUTOMODE_STREAMING_BETA_INV_Q30;
}
else
{
/* normal mode */
betaInv = AUTOMODE_BETA_INV_Q30;
}
/*******************************************************************/
/* Calculate optimal buffer level without considering jitter peaks */
/*******************************************************************/
/*
* Find the B for which the probability of observing an inter-arrival time larger
* than or equal to B is less than or equal to betaInv.
*/
B = 0; /* start from the beginning of iatProb */
sum1 -= inst->iatProb[B]; /* ensure that optimal level is not less than 1 */
do
{
/*
* Subtract the probabilities one by one until the sum is no longer greater
* than betaInv.
*/
sum1 -= inst->iatProb[++B];
}
while ((sum1 > betaInv) && (B < MAX_IAT));
Bopt = B; /* This is our primary value for the optimal buffer level Bopt */
if (mdCodec)
{
/*
* Use alternative cost function when multiple description codec is in use.
* Do not have to re-calculate all points, just back off a few steps from
* previous value of B.
*/
WebRtc_Word32 sum2 = sum1; /* copy sum1 */
while ((sum2 <= betaInv + inst->iatProb[Bopt]) && (Bopt > 0))
{
/* Go backwards in the sum until the modified cost function solution is found */
sum2 += inst->iatProb[Bopt--];
}
Bopt++; /* This is the optimal level when using an MD codec */
/* Now, Bopt and B can have different values. */
}
#ifdef NETEQ_DELAY_LOGGING
/* special code for offline delay logging */
temp_var = NETEQ_DELAY_LOGGING_SIGNAL_OPTBUF;
fwrite( &temp_var, sizeof(int), 1, delay_fid2 );
temp_var = (int) (Bopt * inst->packetSpeechLenSamp);
#endif
/******************************************************************/
/* Make levelFiltFact adaptive: Larger B <=> larger levelFiltFact */
/******************************************************************/
switch (B)
{
case 0:
case 1:
{
inst->levelFiltFact = 251;
break;
}
case 2:
case 3:
{
inst->levelFiltFact = 252;
break;
}
case 4:
case 5:
case 6:
case 7:
{
inst->levelFiltFact = 253;
break;
}
default: /* B > 7 */
{
inst->levelFiltFact = 254;
break;
}
}
/************************/
/* Peak mode operations */
/************************/
/* Compare current IAT with peak threshold
*
* If IAT > optimal level + threshold (+1 for MD codecs)
* or if IAT > 2 * optimal level (note: optimal level is in Q8):
*/
if (timeIatPkts > (WebRtc_UWord32) (Bopt + inst->peakThresholdPkt + (mdCodec != 0))
|| timeIatPkts > (WebRtc_UWord32) WEBRTC_SPL_LSHIFT_U16(Bopt, 1))
{
/* A peak is observed */
if (inst->peakIndex == -1)
{
/* this is the first peak; prepare for next peak */
inst->peakIndex = 0;
/* set the mode-disable counter */
inst->peakModeDisabled = WEBRTC_SPL_LSHIFT_W16(1, NUM_PEAKS_REQUIRED-2);
}
else if (inst->peakIatCountSamp
<=
(WebRtc_UWord32) WEBRTC_SPL_MUL_32_16(fsHz, MAX_PEAK_PERIOD))
{
/* This is not the first peak and the period time is valid */
/* store time elapsed since last peak */
inst->peakPeriodSamp[inst->peakIndex] = inst->peakIatCountSamp;
/* saturate height to 16 bits */
inst->peakHeightPkt[inst->peakIndex]
=
(WebRtc_Word16) WEBRTC_SPL_MIN(timeIatPkts, WEBRTC_SPL_WORD16_MAX);
/* increment peakIndex and wrap/modulo */
inst->peakIndex = ++inst->peakIndex & PEAK_INDEX_MASK;
/* process peak vectors */
inst->curPeakHeight = 0;
inst->curPeakPeriod = 0;
for (i = 0; i < NUM_PEAKS; i++)
{
/* Find maximum of peak heights and peak periods */
inst->curPeakHeight
= WEBRTC_SPL_MAX(inst->curPeakHeight, inst->peakHeightPkt[i]);
inst->curPeakPeriod
= WEBRTC_SPL_MAX(inst->curPeakPeriod, inst->peakPeriodSamp[i]);
}
inst->peakModeDisabled >>= 1; /* decrease mode-disable "counter" */
}
else if (inst->peakIatCountSamp > (WebRtc_UWord32) WEBRTC_SPL_MUL_32_16(fsHz,
WEBRTC_SPL_LSHIFT_W16(MAX_PEAK_PERIOD, 1)))
{
/*
* More than 2 * MAX_PEAK_PERIOD has elapsed since last peak;
* too long time => reset peak statistics
*/
inst->curPeakHeight = 0;
inst->curPeakPeriod = 0;
for (i = 0; i < NUM_PEAKS; i++)
{
inst->peakHeightPkt[i] = 0;
inst->peakPeriodSamp[i] = 0;
}
inst->peakIndex = -1; /* Next peak is first peak */
inst->peakIatCountSamp = 0;
}
inst->peakIatCountSamp = 0; /* Reset peak interval timer */
} /* end if peak is observed */
/* Evaluate peak mode conditions */
/*
* If not disabled (enough peaks have been observed) and
* time since last peak is less than two peak periods.
*/
if ((!inst->peakModeDisabled) && (inst->peakIatCountSamp
<= WEBRTC_SPL_LSHIFT_W32(inst->curPeakPeriod , 1)))
{
/* Engage peak mode */
/* Set optimal buffer level to curPeakHeight (if it's not already larger) */
Bopt = WEBRTC_SPL_MAX(Bopt, inst->curPeakHeight);
#ifdef NETEQ_DELAY_LOGGING
/* special code for offline delay logging */
temp_var = (int) -(Bopt * inst->packetSpeechLenSamp);
#endif
}
/* Scale Bopt to Q8 */
Bopt = WEBRTC_SPL_LSHIFT_U16(Bopt,8);
#ifdef NETEQ_DELAY_LOGGING
/* special code for offline delay logging */
fwrite( &temp_var, sizeof(int), 1, delay_fid2 );
#endif
/* Sanity check: Bopt must be strictly positive */
if (Bopt <= 0)
{
Bopt = WEBRTC_SPL_LSHIFT_W16(1, 8); /* 1 in Q8 */
}
return Bopt; /* return value in Q8 */
}
int WebRtcNetEQ_BufferLevelFilter(WebRtc_Word32 curSizeMs8, AutomodeInst_t *inst,
int sampPerCall, WebRtc_Word16 fsMult)
{
WebRtc_Word16 curSizeFrames;
/****************/
/* Sanity check */
/****************/
if (sampPerCall <= 0 || fsMult <= 0)
{
/* sampPerCall and fsMult must both be strictly positive */
return -1;
}
/* Check if packet size has been detected */
if (inst->packetSpeechLenSamp > 0)
{
/*
* Current buffer level in packet lengths
* = (curSizeMs8 * fsMult) / packetSpeechLenSamp
*/
curSizeFrames = (WebRtc_Word16) WebRtcSpl_DivW32W16(
WEBRTC_SPL_MUL_32_16(curSizeMs8, fsMult), inst->packetSpeechLenSamp);
}
else
{
curSizeFrames = 0;
}
/* Filter buffer level */
if (inst->levelFiltFact > 0) /* check that filter factor is set */
{
/* Filter:
* buffLevelFilt = levelFiltFact * buffLevelFilt
* + (1-levelFiltFact) * curSizeFrames
*
* levelFiltFact is in Q8
*/
inst->buffLevelFilt = (WebRtc_UWord16) (WEBRTC_SPL_RSHIFT_W32(
WEBRTC_SPL_MUL_16_U16(inst->levelFiltFact, inst->buffLevelFilt), 8)
+ WEBRTC_SPL_MUL_16_16(256 - inst->levelFiltFact, curSizeFrames));
}
/* Account for time-scale operations (accelerate and pre-emptive expand) */
if (inst->prevTimeScale)
{
/*
* Time-scaling has been performed since last filter update.
* Subtract the sampleMemory from buffLevelFilt after converting sampleMemory
* from samples to packets in Q8. Make sure that the filtered value is
* non-negative.
*/
inst->buffLevelFilt = (WebRtc_UWord16) WEBRTC_SPL_MAX( inst->buffLevelFilt -
WebRtcSpl_DivW32W16(
WEBRTC_SPL_LSHIFT_W32(inst->sampleMemory, 8), /* sampleMemory in Q8 */
inst->packetSpeechLenSamp ), /* divide by packetSpeechLenSamp */
0);
/*
* Reset flag and set timescaleHoldOff timer to prevent further time-scaling
* for some time.
*/
inst->prevTimeScale = 0;
inst->timescaleHoldOff = AUTOMODE_TIMESCALE_LIMIT;
}
/* Update time counters and HoldOff timer */
inst->packetIatCountSamp += sampPerCall; /* packet inter-arrival time */
inst->peakIatCountSamp += sampPerCall; /* peak inter-arrival time */
inst->timescaleHoldOff >>= 1; /* time-scaling limiter */
inst->maxCSumUpdateTimer += sampPerCall; /* cumulative-sum timer */
return 0;
}
int WebRtcNetEQ_SetPacketSpeechLen(AutomodeInst_t *inst, WebRtc_Word16 newLenSamp,
WebRtc_Word32 fsHz)
{
/* Sanity check for newLenSamp and fsHz */
if (newLenSamp <= 0 || fsHz <= 0)
{
return -1;
}
inst->packetSpeechLenSamp = newLenSamp; /* Store packet size in instance */
/* Make NetEQ wait for first regular packet before starting the timer */
inst->lastPackCNGorDTMF = 1;
inst->packetIatCountSamp = 0; /* Reset packet time counter */
/*
* Calculate peak threshold from packet size. The threshold is defined as
* the (fractional) number of packets that corresponds to PEAK_HEIGHT
* (in Q8 seconds). That is, threshold = PEAK_HEIGHT/256 * fsHz / packLen.
*/
inst->peakThresholdPkt = (WebRtc_UWord16) WebRtcSpl_DivW32W16ResW16(
WEBRTC_SPL_MUL_16_16_RSFT(PEAK_HEIGHT,
(WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(fsHz, 6), 2), inst->packetSpeechLenSamp);
return 0;
}
int WebRtcNetEQ_ResetAutomode(AutomodeInst_t *inst, int maxBufLenPackets)
{
int i;
WebRtc_UWord16 tempprob = 0x4002; /* 16384 + 2 = 100000000000010 binary; */
/* Sanity check for maxBufLenPackets */
if (maxBufLenPackets <= 1)
{
/* Invalid value; set to 10 instead (arbitary small number) */
maxBufLenPackets = 10;
}
/* Reset filtered buffer level */
inst->buffLevelFilt = 0;
/* Reset packet size to unknown */
inst->packetSpeechLenSamp = 0;
/*
* Flag that last packet was special payload, so that automode will treat the next speech
* payload as the first payload received.
*/
inst->lastPackCNGorDTMF = 1;
/* Reset peak detection parameters */
inst->peakModeDisabled = 1; /* disable peak mode */
inst->peakIatCountSamp = 0;
inst->peakIndex = -1; /* indicates that no peak is registered */
inst->curPeakHeight = 0;
inst->curPeakPeriod = 0;
for (i = 0; i < NUM_PEAKS; i++)
{
inst->peakHeightPkt[i] = 0;
inst->peakPeriodSamp[i] = 0;
}
/*
* Set the iatProb PDF vector to an exponentially decaying distribution
* iatProb[i] = 0.5^(i+1), i = 0, 1, 2, ...
* iatProb is in Q30.
*/
for (i = 0; i <= MAX_IAT; i++)
{
/* iatProb[i] = 0.5^(i+1) = iatProb[i-1] / 2 */
tempprob = WEBRTC_SPL_RSHIFT_U16(tempprob, 1);
/* store in PDF vector */
inst->iatProb[i] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) tempprob, 16);
}
/*
* Calculate the optimal buffer level corresponing to the initial PDF.
* No need to call WebRtcNetEQ_CalcOptimalBufLvl() since we have just hard-coded
* all the variables that the buffer level depends on => we know the result
*/
inst->optBufLevel = WEBRTC_SPL_MIN(4,
(maxBufLenPackets >> 1) + (maxBufLenPackets >> 1)); /* 75% of maxBufLenPackets */
inst->levelFiltFact = 253;
/*
* Reset the iat update forgetting factor to 0 to make the impact of the first
* incoming packets greater.
*/
inst->iatProbFact = 0;
/* Reset packet inter-arrival time counter */
inst->packetIatCountSamp = 0;
/* Clear time-scaling related variables */
inst->prevTimeScale = 0;
inst->timescaleHoldOff = AUTOMODE_TIMESCALE_LIMIT; /* don't allow time-scaling immediately */
inst->cSumIatQ8 = 0;
inst->maxCSumIatQ8 = 0;
return 0;
}

View File

@ -1,243 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains the functionality for automatic buffer level optimization.
*/
#ifndef AUTOMODE_H
#define AUTOMODE_H
#include "typedefs.h"
/*************/
/* Constants */
/*************/
/* The beta parameter defines the trade-off between delay and underrun probability. */
/* It is defined through its inverse in Q30 */
#define AUTOMODE_BETA_INV_Q30 53687091 /* 1/20 in Q30 */
#define AUTOMODE_STREAMING_BETA_INV_Q30 536871 /* 1/2000 in Q30 */
/* Forgetting factor for the inter-arrival time statistics */
#define IAT_PROB_FACT 32745 /* 0.9993 in Q15 */
/* Maximum inter-arrival time to register (in "packet-times") */
#define MAX_IAT 64
#define PEAK_HEIGHT 20 /* 0.08s in Q8 */
/* The value (1<<5) sets maximum accelerate "speed" to about 100 ms/s */
#define AUTOMODE_TIMESCALE_LIMIT (1<<5)
/* Peak mode related parameters */
/* Number of peaks in peak vector; must be a power of 2 */
#define NUM_PEAKS 8
/* Must be NUM_PEAKS-1 */
#define PEAK_INDEX_MASK 0x0007
/* Longest accepted peak distance */
#define MAX_PEAK_PERIOD 10
#define MAX_STREAMING_PEAK_PERIOD 600 /* 10 minutes */
/* Number of peaks required before peak mode can be engaged */
#define NUM_PEAKS_REQUIRED 3
/* Drift term for cumulative sum */
#define CSUM_IAT_DRIFT 2
/*******************/
/* Automode struct */
/*******************/
/* The automode struct is a sub-struct of the
bufstats-struct (BufstatsInst_t). */
typedef struct
{
/* Filtered current buffer level */
WebRtc_UWord16 levelFiltFact; /* filter forgetting factor in Q8 */
WebRtc_UWord16 buffLevelFilt; /* filtered buffer level in Q8 */
/* Inter-arrival time (iat) statistics */
WebRtc_Word32 iatProb[MAX_IAT + 1]; /* iat probabilities in Q30 */
WebRtc_Word16 iatProbFact; /* iat forgetting factor in Q15 */
WebRtc_UWord32 packetIatCountSamp; /* time (in timestamps) elapsed since last
packet arrival, based on RecOut calls */
WebRtc_UWord16 optBufLevel; /* current optimal buffer level in Q8 */
/* Packet related information */
WebRtc_Word16 packetSpeechLenSamp; /* speech samples per incoming packet */
WebRtc_Word16 lastPackCNGorDTMF; /* indicates that the last received packet
contained special information */
WebRtc_UWord16 lastSeqNo; /* sequence number for last packet received */
WebRtc_UWord32 lastTimeStamp; /* timestamp for the last packet received */
WebRtc_Word32 sampleMemory; /* memory position for keeping track of how many
samples we cut during expand */
WebRtc_Word16 prevTimeScale; /* indicates that the last mode was an accelerate
or pre-emptive expand operation */
WebRtc_UWord32 timescaleHoldOff; /* counter that is shifted one step right each
RecOut call; time-scaling allowed when it has
reached 0 */
WebRtc_Word16 extraDelayMs; /* extra delay for sync with video */
/* Peak-detection */
/* vector with the latest peak periods (peak spacing in samples) */
WebRtc_UWord32 peakPeriodSamp[NUM_PEAKS];
/* vector with the latest peak heights (in packets) */
WebRtc_Word16 peakHeightPkt[NUM_PEAKS];
WebRtc_Word16 peakIndex; /* index for the vectors peakPeriodSamp and peakHeightPkt;
-1 if still waiting for first peak */
WebRtc_UWord16 peakThresholdPkt; /* definition of peak (in packets);
calculated from PEAK_HEIGHT */
WebRtc_UWord32 peakIatCountSamp; /* samples elapsed since last peak was observed */
WebRtc_UWord32 curPeakPeriod; /* current maximum of peakPeriodSamp vector */
WebRtc_Word16 curPeakHeight; /* derived from peakHeightPkt vector;
used as optimal buffer level in peak mode */
WebRtc_Word16 peakModeDisabled; /* ==0 if peak mode can be engaged; >0 if not */
/* Post-call statistics */
WebRtc_UWord32 countIAT500ms; /* number of times we got small network outage */
WebRtc_UWord32 countIAT1000ms; /* number of times we got medium network outage */
WebRtc_UWord32 countIAT2000ms; /* number of times we got large network outage */
WebRtc_UWord32 longestIATms; /* mSec duration of longest network outage */
WebRtc_Word16 cSumIatQ8; /* cumulative sum of inter-arrival times */
WebRtc_Word16 maxCSumIatQ8; /* max cumulative sum IAT */
WebRtc_UWord32 maxCSumUpdateTimer;/* time elapsed since maximum was observed */
} AutomodeInst_t;
/*************/
/* Functions */
/*************/
/****************************************************************************
* WebRtcNetEQ_UpdateIatStatistics(...)
*
* Update the packet inter-arrival time statistics when a new packet arrives.
* This function should be called for every arriving packet, with some
* exceptions when using DTX/VAD and DTMF. A new optimal buffer level is
* calculated after the update.
*
* Input:
* - inst : Automode instance
* - maxBufLen : Maximum number of packets the buffer can hold
* - seqNumber : RTP sequence number of incoming packet
* - timeStamp : RTP timestamp of incoming packet
* - fsHz : Sample rate in Hz
* - mdCodec : Non-zero if the current codec is a multiple-
* description codec
* - streamingMode : A non-zero value will increase jitter robustness (and delay)
*
* Output:
* - inst : Updated automode instance
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen,
WebRtc_UWord16 seqNumber, WebRtc_UWord32 timeStamp,
WebRtc_Word32 fsHz, int mdCodec, int streamingMode);
/****************************************************************************
* WebRtcNetEQ_CalcOptimalBufLvl(...)
*
* Calculate the optimal buffer level based on packet inter-arrival time
* statistics.
*
* Input:
* - inst : Automode instance
* - fsHz : Sample rate in Hz
* - mdCodec : Non-zero if the current codec is a multiple-
* description codec
* - timeIatPkts : Currently observed inter-arrival time in packets
* - streamingMode : A non-zero value will increase jitter robustness (and delay)
*
* Output:
* - inst : Updated automode instance
*
* Return value : >0 - Optimal buffer level
* <0 - Error
*/
WebRtc_Word16 WebRtcNetEQ_CalcOptimalBufLvl(AutomodeInst_t *inst, WebRtc_Word32 fsHz,
int mdCodec, WebRtc_UWord32 timeIatPkts,
int streamingMode);
/****************************************************************************
* WebRtcNetEQ_BufferLevelFilter(...)
*
* Update filtered buffer level. The function must be called once for each
* RecOut call, since the timing of automode hinges on counters that are
* updated by this function.
*
* Input:
* - curSizeMs8 : Total length of unused speech data in packet buffer
* and sync buffer, in ms * 8
* - inst : Automode instance
* - sampPerCall : Number of samples per RecOut call
* - fsMult : Sample rate in Hz divided by 8000
*
* Output:
* - inst : Updated automode instance
*
* Return value : 0 - Ok
* : <0 - Error
*/
int WebRtcNetEQ_BufferLevelFilter(WebRtc_Word32 curSizeMs8, AutomodeInst_t *inst,
int sampPerCall, WebRtc_Word16 fsMult);
/****************************************************************************
* WebRtcNetEQ_SetPacketSpeechLen(...)
*
* Provide the number of speech samples extracted from a packet to the
* automode instance. Several of the calculations within automode depend
* on knowing the packet size.
*
*
* Input:
* - inst : Automode instance
* - newLenSamp : Number of samples per RecOut call
* - fsHz : Sample rate in Hz
*
* Output:
* - inst : Updated automode instance
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_SetPacketSpeechLen(AutomodeInst_t *inst, WebRtc_Word16 newLenSamp,
WebRtc_Word32 fsHz);
/****************************************************************************
* WebRtcNetEQ_ResetAutomode(...)
*
* Reset the automode instance.
*
*
* Input:
* - inst : Automode instance
* - maxBufLenPackets : Maximum number of packets that the packet
* buffer can hold (>1)
*
* Output:
* - inst : Updated automode instance
*
* Return value : 0 - Ok
*/
int WebRtcNetEQ_ResetAutomode(AutomodeInst_t *inst, int maxBufLenPackets);
#endif /* AUTOMODE_H */

View File

@ -1,247 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains the function for updating the background noise estimate.
*/
#include "dsp.h"
#include "signal_processing_library.h"
#include "dsp_helpfunctions.h"
/* Scratch usage:
Designed for BGN_LPC_ORDER <= 10
Type Name size startpos endpos
WebRtc_Word32 pw32_autoCorr 22 0 21 (Length (BGN_LPC_ORDER + 1)*2)
WebRtc_Word16 pw16_tempVec 10 22 31 (Length BGN_LPC_ORDER)
WebRtc_Word16 pw16_rc 10 32 41 (Length BGN_LPC_ORDER)
WebRtc_Word16 pw16_outVec 74 0 73 (Length BGN_LPC_ORDER + 64)
Total: 74
*/
#if (BGN_LPC_ORDER > 10) && (defined SCRATCH)
#error BGN_LPC_ORDER is too large for current scratch memory allocation
#endif
#define SCRATCH_PW32_AUTO_CORR 0
#define SCRATCH_PW16_TEMP_VEC 22
#define SCRATCH_PW16_RC 32
#define SCRATCH_PW16_OUT_VEC 0
#define NETEQFIX_BGNFRAQINCQ16 229 /* 0.0035 in Q16 */
/****************************************************************************
* WebRtcNetEQ_BGNUpdate(...)
*
* This function updates the background noise parameter estimates.
*
* Input:
* - inst : NetEQ instance, where the speech history is stored.
* - scratchPtr : Pointer to scratch vector.
*
* Output:
* - inst : Updated information about the BGN characteristics.
*
* Return value : No return value
*/
void WebRtcNetEQ_BGNUpdate(
#ifdef SCRATCH
DSPInst_t *inst, WebRtc_Word16 *pw16_scratchPtr
#else
DSPInst_t *inst
#endif
)
{
const WebRtc_Word16 w16_vecLen = 256;
BGNInst_t *BGN_Inst = &(inst->BGNInst);
#ifdef SCRATCH
WebRtc_Word32 *pw32_autoCorr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_PW32_AUTO_CORR);
WebRtc_Word16 *pw16_tempVec = pw16_scratchPtr + SCRATCH_PW16_TEMP_VEC;
WebRtc_Word16 *pw16_rc = pw16_scratchPtr + SCRATCH_PW16_RC;
WebRtc_Word16 *pw16_outVec = pw16_scratchPtr + SCRATCH_PW16_OUT_VEC;
#else
WebRtc_Word32 pw32_autoCorr[BGN_LPC_ORDER + 1];
WebRtc_Word16 pw16_tempVec[BGN_LPC_ORDER];
WebRtc_Word16 pw16_outVec[BGN_LPC_ORDER + 64];
WebRtc_Word16 pw16_rc[BGN_LPC_ORDER];
#endif
WebRtc_Word16 pw16_A[BGN_LPC_ORDER + 1];
WebRtc_Word32 w32_tmp;
WebRtc_Word16 *pw16_vec;
WebRtc_Word16 w16_maxSample;
WebRtc_Word16 w16_tmp, w16_tmp2;
WebRtc_Word16 w16_enSampleShift;
WebRtc_Word32 w32_en, w32_enBGN;
WebRtc_Word32 w32_enUpdateThreashold;
WebRtc_Word16 stability;
pw16_vec = inst->pw16_speechHistory + inst->w16_speechHistoryLen - w16_vecLen;
#ifdef NETEQ_VAD
if( !inst->VADInst.VADEnabled /* we are not using post-decode VAD */
|| inst->VADInst.VADDecision == 0 )
{ /* ... or, post-decode VAD says passive speaker */
#endif /* NETEQ_VAD */
/*Insert zeros to guarantee that boundary values do not distort autocorrelation */
WEBRTC_SPL_MEMCPY_W16(pw16_tempVec, pw16_vec - BGN_LPC_ORDER, BGN_LPC_ORDER);
WebRtcSpl_MemSetW16(pw16_vec - BGN_LPC_ORDER, 0, BGN_LPC_ORDER);
w16_maxSample = WebRtcSpl_MaxAbsValueW16(pw16_vec, w16_vecLen);
w16_tmp = 8 /* log2(w16_veclen) = 8 */
- WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_maxSample, w16_maxSample));
w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
WebRtcNetEQ_CrossCorr(pw32_autoCorr, pw16_vec, pw16_vec, w16_vecLen, BGN_LPC_ORDER + 1,
w16_tmp, -1);
/* Copy back data */
WEBRTC_SPL_MEMCPY_W16(pw16_vec - BGN_LPC_ORDER, pw16_tempVec, BGN_LPC_ORDER);
w16_enSampleShift = 8 - w16_tmp; /* Number of shifts to get energy/sample */
/* pw32_autoCorr[0]>>w16_enSampleShift */
w32_en = WEBRTC_SPL_RSHIFT_W32(pw32_autoCorr[0], w16_enSampleShift);
if ((w32_en < BGN_Inst->w32_energyUpdate
#ifdef NETEQ_VAD
/* post-decode VAD disabled and w32_en sufficiently low */
&& !inst->VADInst.VADEnabled)
/* ... or, post-decode VAD says passive speaker */
|| (inst->VADInst.VADEnabled && inst->VADInst.VADDecision == 0)
#else
) /* just close the extra parenthesis */
#endif /* NETEQ_VAD */
)
{
/* Generate LPC coefficients */
if (pw32_autoCorr[0] > 0)
{
/* regardless of whether the filter is actually updated or not,
update energy threshold levels, since we have in fact observed
a low energy signal */
if (w32_en < BGN_Inst->w32_energyUpdate)
{
/* Never get under 1.0 in average sample energy */
BGN_Inst->w32_energyUpdate = WEBRTC_SPL_MAX(w32_en, 1);
BGN_Inst->w32_energyUpdateLow = 0;
}
stability = WebRtcSpl_LevinsonDurbin(pw32_autoCorr, pw16_A, pw16_rc, BGN_LPC_ORDER);
/* Only update BGN if filter is stable */
if (stability != 1)
{
return;
}
}
else
{
/* Do not update */
return;
}
/* Generate the CNG gain factor by looking at the energy of the residual */
WebRtcSpl_FilterMAFastQ12(pw16_vec + w16_vecLen - 64, pw16_outVec, pw16_A,
BGN_LPC_ORDER + 1, 64);
w32_enBGN = WebRtcNetEQ_DotW16W16(pw16_outVec, pw16_outVec, 64, 0);
/* Dot product should never overflow since it is BGN and residual! */
/*
* Check spectral flatness
* Comparing the residual variance with the input signal variance tells
* if the spectrum is flat or not.
* (20*w32_enBGN) >= (w32_en<<6)
* Also ensure that the energy is non-zero.
*/
if ((WEBRTC_SPL_MUL_32_16(w32_enBGN, 20) >= WEBRTC_SPL_LSHIFT_W32(w32_en, 6))
&& (w32_en > 0))
{
/* spectrum is flat enough; save filter parameters */
WEBRTC_SPL_MEMCPY_W16(BGN_Inst->pw16_filter, pw16_A, BGN_LPC_ORDER+1);
WEBRTC_SPL_MEMCPY_W16(BGN_Inst->pw16_filterState,
pw16_vec + w16_vecLen - BGN_LPC_ORDER, BGN_LPC_ORDER);
/* Save energy level */
BGN_Inst->w32_energy = WEBRTC_SPL_MAX(w32_en, 1);
/* Update energy threshold levels */
/* Never get under 1.0 in average sample energy */
BGN_Inst->w32_energyUpdate = WEBRTC_SPL_MAX(w32_en, 1);
BGN_Inst->w32_energyUpdateLow = 0;
/* Normalize w32_enBGN to 29 or 30 bits before sqrt */
w16_tmp2 = WebRtcSpl_NormW32(w32_enBGN) - 1;
if (w16_tmp2 & 0x1)
{
w16_tmp2 -= 1; /* Even number of shifts required */
}
w32_enBGN = WEBRTC_SPL_SHIFT_W32(w32_enBGN, w16_tmp2);
/* Calculate scale and shift factor */
BGN_Inst->w16_scale = (WebRtc_Word16) WebRtcSpl_Sqrt(w32_enBGN);
BGN_Inst->w16_scaleShift = 13 + ((6 + w16_tmp2) >> 1); /* RANDN table is in Q13, */
/* 6=log2(64) */
BGN_Inst->w16_initialized = 1;
}
}
else
{
/*
* Will only happen if post-decode VAD is disabled and w32_en is not low enough.
* Increase the threshold for update so that it increases by a factor 4 in four
* seconds.
* energy = energy * 1.0035
*/
w32_tmp = WEBRTC_SPL_MUL_16_16_RSFT(NETEQFIX_BGNFRAQINCQ16,
BGN_Inst->w32_energyUpdateLow, 16);
w32_tmp += WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
(WebRtc_Word16)(BGN_Inst->w32_energyUpdate & 0xFF));
w32_tmp += (WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
(WebRtc_Word16)((BGN_Inst->w32_energyUpdate>>8) & 0xFF)) << 8);
BGN_Inst->w32_energyUpdateLow += w32_tmp;
BGN_Inst->w32_energyUpdate += WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
(WebRtc_Word16)(BGN_Inst->w32_energyUpdate>>16));
BGN_Inst->w32_energyUpdate += BGN_Inst->w32_energyUpdateLow >> 16;
BGN_Inst->w32_energyUpdateLow = (BGN_Inst->w32_energyUpdateLow & 0x0FFFF);
/* Update maximum energy */
/* Decrease by a factor 1/1024 each time */
BGN_Inst->w32_energyMax = BGN_Inst->w32_energyMax - (BGN_Inst->w32_energyMax >> 10);
if (w32_en > BGN_Inst->w32_energyMax)
{
BGN_Inst->w32_energyMax = w32_en;
}
/* Set update level to at the minimum 60.21dB lower then the maximum energy */
w32_enUpdateThreashold = (BGN_Inst->w32_energyMax + 524288) >> 20;
if (w32_enUpdateThreashold > BGN_Inst->w32_energyUpdate)
{
BGN_Inst->w32_energyUpdate = w32_enUpdateThreashold;
}
}
#ifdef NETEQ_VAD
} /* closing initial if-statement */
#endif /* NETEQ_VAD */
return;
}
#undef SCRATCH_PW32_AUTO_CORR
#undef SCRATCH_PW16_TEMP_VEC
#undef SCRATCH_PW16_RC
#undef SCRATCH_PW16_OUT_VEC

View File

@ -1,95 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Calculates and stores the packet buffer statistics.
*/
#ifndef BUFFER_STATS_H
#define BUFFER_STATS_H
#include "automode.h"
#include "webrtc_neteq.h" /* to define enum WebRtcNetEQPlayoutMode */
/* NetEQ related decisions */
#define BUFSTATS_DO_NORMAL 0
#define BUFSTATS_DO_ACCELERATE 1
#define BUFSTATS_DO_MERGE 2
#define BUFSTATS_DO_EXPAND 3
#define BUFSTAT_REINIT 4
#define BUFSTATS_DO_RFC3389CNG_PACKET 5
#define BUFSTATS_DO_RFC3389CNG_NOPACKET 6
#define BUFSTATS_DO_INTERNAL_CNG_NOPACKET 7
#define BUFSTATS_DO_PREEMPTIVE_EXPAND 8
#define BUFSTAT_REINIT_DECODER 9
#define BUFSTATS_DO_DTMF_ONLY 10
/* Decisions related to when NetEQ is switched off (or in FAX mode) */
#define BUFSTATS_DO_ALTERNATIVE_PLC 11
#define BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS 12
#define BUFSTATS_DO_AUDIO_REPETITION 13
#define BUFSTATS_DO_AUDIO_REPETITION_INC_TS 14
/* Reinit decoder states after this number of expands (upon arrival of new packet) */
#define REINIT_AFTER_EXPANDS 100
/* Wait no longer than this number of RecOut calls before using an "early" packet */
#define MAX_WAIT_FOR_PACKET 10
/* CNG modes */
#define CNG_OFF 0
#define CNG_RFC3389_ON 1
#define CNG_INTERNAL_ON 2
typedef struct
{
/* store statistical data here */
WebRtc_Word16 w16_cngOn; /* remember if CNG is interrupted by other event (e.g. DTMF) */
WebRtc_Word16 w16_noExpand;
WebRtc_Word32 uw32_CNGplayedTS;
/* VQmon data */
WebRtc_UWord16 avgDelayMsQ8;
WebRtc_Word16 maxDelayMs;
AutomodeInst_t Automode_inst;
} BufstatsInst_t;
/****************************************************************************
* WebRtcNetEQ_BufstatsDecision()
*
* Gives a decision about what action that is currently desired
*
*
* Input:
* inst: The bufstat instance
* cur_size: Current buffer size in ms in Q3 domain
* targetTS: The desired timestamp to start playout from
* availableTS: The closest future value available in buffer
* noPacket 1 if no packet is available, makes availableTS undefined
* prevPlayMode mode of last NetEq playout
* timestampsPerCall number of timestamp for 10ms
*
* Output:
* Returns: A decision, as defined above (see top of file)
*
*/
WebRtc_UWord16 WebRtcNetEQ_BufstatsDecision(BufstatsInst_t *inst, WebRtc_Word16 frameSize,
WebRtc_Word32 cur_size, WebRtc_UWord32 targetTS,
WebRtc_UWord32 availableTS, int noPacket,
int cngPacket, int prevPlayMode,
enum WebRtcNetEQPlayoutMode playoutMode,
int timestampsPerCall, int NoOfExpandCalls,
WebRtc_Word16 fs_mult,
WebRtc_Word16 lastModeBGNonly, int playDtmf);
#endif

View File

@ -1,413 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains the function where the main decision logic for buffer level
* adaptation happens.
*/
#include "buffer_stats.h"
#include <assert.h>
#include "signal_processing_library.h"
#include "automode.h"
#include "neteq_defines.h"
#include "neteq_error_codes.h"
#include "webrtc_neteq.h"
#define NETEQ_BUFSTAT_20MS_Q7 2560 /* = 20 ms in Q7 */
WebRtc_UWord16 WebRtcNetEQ_BufstatsDecision(BufstatsInst_t *inst, WebRtc_Word16 frameSize,
WebRtc_Word32 cur_size, WebRtc_UWord32 targetTS,
WebRtc_UWord32 availableTS, int noPacket,
int cngPacket, int prevPlayMode,
enum WebRtcNetEQPlayoutMode playoutMode,
int timestampsPerCall, int NoOfExpandCalls,
WebRtc_Word16 fs_mult,
WebRtc_Word16 lastModeBGNonly, int playDtmf)
{
int currentDelayMs;
WebRtc_Word32 currSizeSamples = cur_size;
WebRtc_Word16 extraDelayPacketsQ8 = 0;
/* Avoid overflow if the buffer size should be really large (cur_size is limited 256ms) */
WebRtc_Word32 curr_sizeQ7 = WEBRTC_SPL_LSHIFT_W32(cur_size, 4);
WebRtc_UWord16 level_limit_hi, level_limit_lo;
inst->Automode_inst.prevTimeScale &= (prevPlayMode == MODE_SUCCESS_ACCELERATE
|| prevPlayMode == MODE_LOWEN_ACCELERATE || prevPlayMode == MODE_SUCCESS_PREEMPTIVE
|| prevPlayMode == MODE_LOWEN_PREEMPTIVE);
if ((prevPlayMode != MODE_RFC3389CNG) && (prevPlayMode != MODE_CODEC_INTERNAL_CNG))
{
/*
* Do not update buffer history if currently playing CNG
* since it will bias the filtered buffer level.
*/
WebRtcNetEQ_BufferLevelFilter(cur_size, &(inst->Automode_inst), timestampsPerCall,
fs_mult);
}
else
{
/* only update time counters */
inst->Automode_inst.packetIatCountSamp += timestampsPerCall; /* packet inter-arrival time */
inst->Automode_inst.peakIatCountSamp += timestampsPerCall; /* peak inter-arrival time */
inst->Automode_inst.timescaleHoldOff >>= 1; /* time-scaling limiter */
}
cur_size = WEBRTC_SPL_MIN(curr_sizeQ7, WEBRTC_SPL_WORD16_MAX);
/* Calculate VQmon related variables */
/* avgDelay = avgDelay*(511/512) + currentDelay*(1/512) (sample ms delay in Q8) */
inst->avgDelayMsQ8 = (WebRtc_Word16) (WEBRTC_SPL_MUL_16_16_RSFT(inst->avgDelayMsQ8,511,9)
+ (cur_size >> 9));
/* Update maximum delay if needed */
currentDelayMs = (curr_sizeQ7 >> 7);
if (currentDelayMs > inst->maxDelayMs)
{
inst->maxDelayMs = currentDelayMs;
}
/* NetEQ is on with normal or steaming mode */
if (playoutMode == kPlayoutOn || playoutMode == kPlayoutStreaming)
{
/* Guard for errors, so that it should not get stuck in error mode */
if (prevPlayMode == MODE_ERROR)
{
if (noPacket)
{
return BUFSTATS_DO_EXPAND;
}
else
{
return BUFSTAT_REINIT;
}
}
if (prevPlayMode != MODE_EXPAND && prevPlayMode != MODE_FADE_TO_BGN)
{
inst->w16_noExpand = 1;
}
else
{
inst->w16_noExpand = 0;
}
if (cngPacket)
{
/* signed difference between wanted and available TS */
WebRtc_Word32 diffTS = (inst->uw32_CNGplayedTS + targetTS) - availableTS;
if ((diffTS) < 0 && (prevPlayMode == MODE_RFC3389CNG))
{
/* Not time to play this packet yet. Wait another round before using this
* packet. Keep on playing CNG from previous CNG parameters. */
return BUFSTATS_DO_RFC3389CNG_NOPACKET;
}
/* otherwise, go for the CNG packet now */
return BUFSTATS_DO_RFC3389CNG_PACKET;
}
/*Check for expand/cng */
if (noPacket)
{
if (inst->w16_cngOn == CNG_RFC3389_ON)
{
/* keep on playing CNG */
return BUFSTATS_DO_RFC3389CNG_NOPACKET;
}
else if (inst->w16_cngOn == CNG_INTERNAL_ON)
{
/* keep on playing internal CNG */
return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
}
else if (playDtmf == 1)
{
/* we have not audio data, but can play DTMF */
return BUFSTATS_DO_DTMF_ONLY;
}
else
{
/* nothing to play => do Expand */
return BUFSTATS_DO_EXPAND;
}
}
/*
* If the expand period was very long, reset NetEQ since it is likely that the
* sender was restarted.
*/
if (NoOfExpandCalls > REINIT_AFTER_EXPANDS) return BUFSTAT_REINIT_DECODER;
/* Calculate extra delay in Q8 packets */
if (inst->Automode_inst.extraDelayMs > 0 && inst->Automode_inst.packetSpeechLenSamp
> 0)
{
extraDelayPacketsQ8 = WebRtcSpl_DivW32W16ResW16(
(WEBRTC_SPL_MUL(inst->Automode_inst.extraDelayMs, 8 * fs_mult) << 8),
inst->Automode_inst.packetSpeechLenSamp);
/* (extra delay in samples in Q8) */
}
/* Check if needed packet is available */
if (targetTS == availableTS)
{
/* If last mode was not expand, and there is no DTMF to play */
if (inst->w16_noExpand == 1 && playDtmf == 0)
{
/* If so check for accelerate */
level_limit_lo = ((inst->Automode_inst.optBufLevel) >> 1) /* 50 % */
+ ((inst->Automode_inst.optBufLevel) >> 2); /* ... + 25% = 75% */
/* set upper limit to optBufLevel, but make sure that window is at least 20ms */
level_limit_hi = WEBRTC_SPL_MAX(inst->Automode_inst.optBufLevel,
level_limit_lo +
WebRtcSpl_DivW32W16ResW16((WEBRTC_SPL_MUL(20*8, fs_mult) << 8),
inst->Automode_inst.packetSpeechLenSamp));
/* if extra delay is non-zero, add it */
if (extraDelayPacketsQ8 > 0)
{
level_limit_hi += extraDelayPacketsQ8;
level_limit_lo += extraDelayPacketsQ8;
}
if (((inst->Automode_inst.buffLevelFilt >= level_limit_hi) &&
(inst->Automode_inst.timescaleHoldOff == 0)) ||
(inst->Automode_inst.buffLevelFilt >= level_limit_hi << 2))
{
/*
* Buffer level higher than limit and time-scaling allowed,
* OR buffer level _really_ high.
*/
return BUFSTATS_DO_ACCELERATE;
}
else if ((inst->Automode_inst.buffLevelFilt < level_limit_lo)
&& (inst->Automode_inst.timescaleHoldOff == 0))
{
return BUFSTATS_DO_PREEMPTIVE_EXPAND;
}
}
return BUFSTATS_DO_NORMAL;
}
/* Check for Merge */
else if (availableTS > targetTS)
{
/* Check that we do not play a packet "too early" */
if ((prevPlayMode == MODE_EXPAND)
&& (availableTS - targetTS
< (WebRtc_UWord32) WEBRTC_SPL_MUL_16_16((WebRtc_Word16)timestampsPerCall,
(WebRtc_Word16)REINIT_AFTER_EXPANDS))
&& (NoOfExpandCalls < MAX_WAIT_FOR_PACKET)
&& (availableTS
> targetTS
+ WEBRTC_SPL_MUL_16_16((WebRtc_Word16)timestampsPerCall,
(WebRtc_Word16)NoOfExpandCalls))
&& (inst->Automode_inst.buffLevelFilt <= inst->Automode_inst.optBufLevel
+ extraDelayPacketsQ8))
{
if (playDtmf == 1)
{
/* we still have DTMF to play, so do not perform expand */
return BUFSTATS_DO_DTMF_ONLY;
}
else
{
/* nothing to play */
return BUFSTATS_DO_EXPAND;
}
}
/* If previous was CNG period or BGNonly then no merge is needed */
if ((prevPlayMode == MODE_RFC3389CNG) || (prevPlayMode == MODE_CODEC_INTERNAL_CNG)
|| lastModeBGNonly)
{
/*
* Keep the same delay as before the CNG (or maximum 70 ms in buffer as safety
* precaution), but make sure that the number of samples in buffer is no
* higher than 4 times the optimal level.
*/
WebRtc_Word32 diffTS = (inst->uw32_CNGplayedTS + targetTS) - availableTS;
if (diffTS >= 0
|| (WEBRTC_SPL_MUL_16_16_RSFT( inst->Automode_inst.optBufLevel
+ extraDelayPacketsQ8,
inst->Automode_inst.packetSpeechLenSamp, 6) < currSizeSamples))
{
/* it is time to play this new packet */
return BUFSTATS_DO_NORMAL;
}
else
{
/* it is too early to play this new packet => keep on playing CNG */
if (prevPlayMode == MODE_RFC3389CNG)
{
return BUFSTATS_DO_RFC3389CNG_NOPACKET;
}
else if (prevPlayMode == MODE_CODEC_INTERNAL_CNG)
{
return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
}
else if (playDtmf == 1)
{
/* we have not audio data, but can play DTMF */
return BUFSTATS_DO_DTMF_ONLY;
}
else /* lastModeBGNonly */
{
/* signal expand, but this will result in BGN again */
return BUFSTATS_DO_EXPAND;
}
}
}
/* Do not merge unless we have done a Expand before (for complexity reasons) */
if ((inst->w16_noExpand == 0) || ((frameSize < timestampsPerCall) && (cur_size
> NETEQ_BUFSTAT_20MS_Q7)))
{
return BUFSTATS_DO_MERGE;
}
else if (playDtmf == 1)
{
/* play DTMF instead of expand */
return BUFSTATS_DO_DTMF_ONLY;
}
else
{
return BUFSTATS_DO_EXPAND;
}
}
}
else
{ /* kPlayoutOff or kPlayoutFax */
if (cngPacket)
{
if (((WebRtc_Word32) ((inst->uw32_CNGplayedTS + targetTS) - availableTS)) >= 0)
{
/* time to play this packet now */
return BUFSTATS_DO_RFC3389CNG_PACKET;
}
else
{
/* wait before playing this packet */
return BUFSTATS_DO_RFC3389CNG_NOPACKET;
}
}
if (noPacket)
{
/*
* No packet =>
* 1. If in CNG mode play as usual
* 2. Otherwise use other method to generate data and hold TS value
*/
if (inst->w16_cngOn == CNG_RFC3389_ON)
{
/* keep on playing CNG */
return BUFSTATS_DO_RFC3389CNG_NOPACKET;
}
else if (inst->w16_cngOn == CNG_INTERNAL_ON)
{
/* keep on playing internal CNG */
return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
}
else
{
/* nothing to play => invent some data to play out */
if (playoutMode == kPlayoutOff)
{
return BUFSTATS_DO_ALTERNATIVE_PLC;
}
else if (playoutMode == kPlayoutFax)
{
return BUFSTATS_DO_AUDIO_REPETITION;
}
else
{
/* UNDEFINED, should not get here... */
assert(0);
return BUFSTAT_REINIT;
}
}
}
else if (targetTS == availableTS)
{
return BUFSTATS_DO_NORMAL;
}
else
{
if (((WebRtc_Word32) ((inst->uw32_CNGplayedTS + targetTS) - availableTS)) >= 0)
{
return BUFSTATS_DO_NORMAL;
}
else if (playoutMode == kPlayoutOff)
{
/*
* If currently playing CNG, continue with that. Don't increase TS
* since uw32_CNGplayedTS will be increased.
*/
if (inst->w16_cngOn == CNG_RFC3389_ON)
{
return BUFSTATS_DO_RFC3389CNG_NOPACKET;
}
else if (inst->w16_cngOn == CNG_INTERNAL_ON)
{
return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
}
else
{
/*
* Otherwise, do PLC and increase TS while waiting for the time to
* play this packet.
*/
return BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS;
}
}
else if (playoutMode == kPlayoutFax)
{
/*
* If currently playing CNG, continue with that don't increase TS since
* uw32_CNGplayedTS will be increased.
*/
if (inst->w16_cngOn == CNG_RFC3389_ON)
{
return BUFSTATS_DO_RFC3389CNG_NOPACKET;
}
else if (inst->w16_cngOn == CNG_INTERNAL_ON)
{
return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
}
else
{
/*
* Otherwise, do audio repetition and increase TS while waiting for the
* time to play this packet.
*/
return BUFSTATS_DO_AUDIO_REPETITION_INC_TS;
}
}
else
{
/* UNDEFINED, should not get here... */
assert(0);
return BUFSTAT_REINIT;
}
}
}
/* We should not get here (but sometimes we do anyway...) */
return BUFSTAT_REINIT;
}

View File

@ -1,155 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains the function for obtaining comfort noise from noise parameters
* according to IETF RFC 3389.
*/
#include "dsp.h"
#include "signal_processing_library.h"
#include "webrtc_cng.h"
#include "dsp_helpfunctions.h"
#include "neteq_error_codes.h"
/****************************************************************************
* WebRtcNetEQ_Cng(...)
*
* This function produces CNG according to RFC 3389.
*
* Input:
* - inst : NetEQ DSP instance
* - len : Number of samples to produce (max 640 or
* 640 - fsHz*5/8000 for first-time CNG, governed by
* the definition of WEBRTC_CNG_MAX_OUTSIZE_ORDER in
* webrtc_cng.h)
*
* Output:
* - pw16_outData : Output CNG
*
* Return value : 0 - Ok
* <0 - Error
*/
#ifdef NETEQ_CNG_CODEC
/* Must compile NetEQ with CNG support to enable this function */
int WebRtcNetEQ_Cng(DSPInst_t *inst, WebRtc_Word16 *pw16_outData, int len)
{
WebRtc_Word16 w16_winMute = 0; /* mixing factor for overlap data */
WebRtc_Word16 w16_winUnMute = 0; /* mixing factor for comfort noise */
WebRtc_Word16 w16_winMuteInc = 0; /* mixing factor increment (negative) */
WebRtc_Word16 w16_winUnMuteInc = 0; /* mixing factor increment */
int i;
/*
* Check if last RecOut call was other than RFC3389,
* that is, this call is the first of a CNG period.
*/
if (inst->w16_mode != MODE_RFC3389CNG)
{
/* Reset generation and overlap slightly with old data */
/* Generate len samples + overlap */
if (WebRtcCng_Generate(inst->CNG_Codec_inst, pw16_outData,
(WebRtc_Word16) (len + inst->ExpandInst.w16_overlap), 1) < 0)
{
/* error returned */
return -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
}
/* Set windowing parameters depending on sample rate */
if (inst->fs == 8000)
{
/* Windowing in Q15 */
w16_winMute = NETEQ_OVERLAP_WINMUTE_8KHZ_START;
w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_8KHZ_INC;
w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_8KHZ_START;
w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_8KHZ_INC;
#ifdef NETEQ_WIDEBAND
}
else if (inst->fs == 16000)
{
/* Windowing in Q15 */
w16_winMute = NETEQ_OVERLAP_WINMUTE_16KHZ_START;
w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_16KHZ_INC;
w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_16KHZ_START;
w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_16KHZ_INC;
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
}
else if (inst->fs == 32000)
{
/* Windowing in Q15 */
w16_winMute = NETEQ_OVERLAP_WINMUTE_32KHZ_START;
w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_32KHZ_INC;
w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_32KHZ_START;
w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_32KHZ_INC;
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
}
else if (inst->fs == 48000)
{
/* Windowing in Q15 */
w16_winMute = NETEQ_OVERLAP_WINMUTE_48KHZ_START;
w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_48KHZ_INC;
w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_48KHZ_START;
w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_48KHZ_INC;
#endif
}
else
{
/* Unsupported sample rate (should not be possible) */
return NETEQ_OTHER_ERROR;
}
/* Do overlap add between new vector and overlap */
for (i = 0; i < inst->ExpandInst.w16_overlap; i++)
{
/* overlapVec[i] = WinMute * overlapVec[i] + WinUnMute * outData[i] */
inst->ExpandInst.pw16_overlapVec[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
WEBRTC_SPL_MUL_16_16(
inst->ExpandInst.pw16_overlapVec[i], w16_winMute) +
WEBRTC_SPL_MUL_16_16(pw16_outData[i], w16_winUnMute)
+ 16384, 15); /* shift with proper rounding */
w16_winMute += w16_winMuteInc; /* decrease mute factor (inc<0) */
w16_winUnMute += w16_winUnMuteInc; /* increase unmute factor (inc>0) */
}
/*
* Shift the contents of the outData buffer by overlap samples, since we
* already used these first samples in the overlapVec above
*/
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_outData+inst->ExpandInst.w16_overlap, len);
}
else
{
/* This is a subsequent CNG call; no special overlap needed */
/* Generate len samples */
if (WebRtcCng_Generate(inst->CNG_Codec_inst, pw16_outData, (WebRtc_Word16) len, 0) < 0)
{
/* error returned */
return -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
}
}
return 0;
}
#endif /* NETEQ_CNG_CODEC */

View File

@ -1,737 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Implementation of the codec database.
*/
#include "codec_db.h"
#include <string.h> /* to define NULL */
#include "signal_processing_library.h"
#include "neteq_error_codes.h"
/*
* Resets the codec database.
*/
int WebRtcNetEQ_DbReset(CodecDbInst_t *inst)
{
int i;
WebRtcSpl_MemSetW16((WebRtc_Word16*) inst, 0,
sizeof(CodecDbInst_t) / sizeof(WebRtc_Word16));
for (i = 0; i < NUM_TOTAL_CODECS; i++)
{
inst->position[i] = -1;
}
for (i = 0; i < NUM_CODECS; i++)
{
inst->payloadType[i] = -1;
}
for (i = 0; i < NUM_CNG_CODECS; i++)
{
inst->CNGpayloadType[i] = -1;
}
return 0;
}
/*
* Adds a new codec to the database.
*/
int WebRtcNetEQ_DbAdd(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
WebRtc_Word16 payloadType, FuncDecode funcDecode,
FuncDecode funcDecodeRCU, FuncDecodePLC funcDecodePLC,
FuncDecodeInit funcDecodeInit, FuncAddLatePkt funcAddLatePkt,
FuncGetMDinfo funcGetMDinfo, FuncGetPitchInfo funcGetPitch,
FuncUpdBWEst funcUpdBWEst, FuncGetErrorCode funcGetErrorCode,
void* codec_state, WebRtc_UWord16 codec_fs)
{
int temp;
int insertCNGcodec = 0, overwriteCNGcodec = 0, CNGpos = -1;
#ifndef NETEQ_RED_CODEC
if (codec == kDecoderRED)
{
return CODEC_DB_UNSUPPORTED_CODEC;
}
#endif
if (((int) codec <= (int) kDecoderReservedStart) || ((int) codec
>= (int) kDecoderReservedEnd))
{
return CODEC_DB_UNSUPPORTED_CODEC;
}
if ((codec_fs != 8000)
#ifdef NETEQ_WIDEBAND
&&(codec_fs!=16000)
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
&&(codec_fs!=32000)
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
&&(codec_fs!=48000)
#endif
)
{
return CODEC_DB_UNSUPPORTED_FS;
}
/* Ensure that the codec type is supported */
switch (codec)
{
#ifdef NETEQ_PCM16B_CODEC
case kDecoderPCM16B :
#endif
#ifdef NETEQ_G711_CODEC
case kDecoderPCMu :
case kDecoderPCMa :
#endif
#ifdef NETEQ_ILBC_CODEC
case kDecoderILBC :
#endif
#ifdef NETEQ_ISAC_CODEC
case kDecoderISAC :
#endif
#ifdef NETEQ_ISAC_SWB_CODEC
case kDecoderISACswb :
#endif
#ifdef NETEQ_G722_CODEC
case kDecoderG722 :
#endif
#ifdef NETEQ_WIDEBAND
case kDecoderPCM16Bwb :
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
case kDecoderPCM16Bswb32kHz :
#endif
#ifdef NETEQ_CNG_CODEC
case kDecoderCNG :
#endif
#ifdef NETEQ_ATEVENT_DECODE
case kDecoderAVT :
#endif
#ifdef NETEQ_RED_CODEC
case kDecoderRED :
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
case kDecoderPCM16Bswb48kHz :
#endif
#ifdef NETEQ_ARBITRARY_CODEC
case kDecoderArbitrary:
#endif
#ifdef NETEQ_G729_CODEC
case kDecoderG729:
#endif
#ifdef NETEQ_G729_1_CODEC
case kDecoderG729_1 :
#endif
#ifdef NETEQ_G726_CODEC
case kDecoderG726_16 :
case kDecoderG726_24 :
case kDecoderG726_32 :
case kDecoderG726_40 :
#endif
#ifdef NETEQ_G722_1_CODEC
case kDecoderG722_1_16 :
case kDecoderG722_1_24 :
case kDecoderG722_1_32 :
#endif
#ifdef NETEQ_G722_1C_CODEC
case kDecoderG722_1C_24 :
case kDecoderG722_1C_32 :
case kDecoderG722_1C_48 :
#endif
#ifdef NETEQ_SPEEX_CODEC
case kDecoderSPEEX_8 :
case kDecoderSPEEX_16 :
#endif
#ifdef NETEQ_GSMFR_CODEC
case kDecoderGSMFR :
#endif
#ifdef NETEQ_AMR_CODEC
case kDecoderAMR :
#endif
#ifdef NETEQ_AMRWB_CODEC
case kDecoderAMRWB :
#endif
{
/* If we end up here, the inserted codec is supported => Do nothing */
break;
}
default:
{
/* If we get to this point, the inserted codec is not supported */
return CODEC_DB_UNSUPPORTED_CODEC;
}
}
/* Check to see if payload type is taken */
if (WebRtcNetEQ_DbGetCodec(inst, payloadType) > 0)
{
return CODEC_DB_PAYLOAD_TAKEN;
}
/* Special case for CNG codecs */
if (codec == kDecoderCNG)
{
/* check if this is first CNG codec to be registered */
if (WebRtcNetEQ_DbGetPayload(inst, codec) == CODEC_DB_NOT_EXIST2)
{
/* no other CNG codec found */
insertCNGcodec = 1;
}
/* find the appropriate insert position in CNG payload vector */
switch (codec_fs)
{
#ifdef NETEQ_WIDEBAND
case 16000:
CNGpos = 1;
break;
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
case 32000:
CNGpos = 2;
break;
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
case 48000:
CNGpos = 3;
break;
#endif
default: /* 8000 Hz case */
CNGpos = 0;
/*
* The 8 kHz CNG payload type is the one associated with the regular codec DB
* should override any other setting.
* Overwrite if this isn't the first CNG
*/
overwriteCNGcodec = !insertCNGcodec;
break;
}
/* insert CNG payload type */
inst->CNGpayloadType[CNGpos] = payloadType;
}
if ((codec != kDecoderCNG) || (insertCNGcodec == 1) || (overwriteCNGcodec == 1))
{
/* Check if we have reached the maximum numbers of simultaneous codecs */
if (inst->nrOfCodecs == NUM_CODECS) return CODEC_DB_FULL;
/* Check that codec has not already been initialized to DB =>
remove it and reinitialize according to new spec */
if ((inst->position[codec] != -1) && (overwriteCNGcodec != 1))
{ /* if registering multiple CNG codecs, don't remove, just overwrite */
WebRtcNetEQ_DbRemove(inst, codec);
}
if (overwriteCNGcodec == 1)
{
temp = inst->position[codec];
}
else
{
temp = inst->nrOfCodecs; /* Store this codecs position */
inst->position[codec] = temp;
inst->nrOfCodecs++;
}
inst->payloadType[temp] = payloadType;
/* Copy to database */
inst->codec_state[temp] = codec_state;
inst->funcDecode[temp] = funcDecode;
inst->funcDecodeRCU[temp] = funcDecodeRCU;
inst->funcAddLatePkt[temp] = funcAddLatePkt;
inst->funcDecodeInit[temp] = funcDecodeInit;
inst->funcDecodePLC[temp] = funcDecodePLC;
inst->funcGetMDinfo[temp] = funcGetMDinfo;
inst->funcGetPitch[temp] = funcGetPitch;
inst->funcUpdBWEst[temp] = funcUpdBWEst;
inst->funcGetErrorCode[temp] = funcGetErrorCode;
inst->codec_fs[temp] = codec_fs;
}
return 0;
}
/*
* Removes a codec from the database.
*/
int WebRtcNetEQ_DbRemove(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec)
{
int i;
int pos = -1;
#ifndef NETEQ_RED_CODEC
if (codec == kDecoderRED)
{
return CODEC_DB_UNSUPPORTED_CODEC;
}
#endif
if (((int) codec <= (int) kDecoderReservedStart) || ((int) codec
>= (int) kDecoderReservedEnd))
{
return CODEC_DB_UNSUPPORTED_CODEC;
}
pos = inst->position[codec];
if (pos == -1)
{
return CODEC_DB_NOT_EXIST4;
}
else
{
/* Remove this codec */
inst->position[codec] = -1;
for (i = pos; i < (inst->nrOfCodecs - 1); i++)
{
inst->payloadType[i] = inst->payloadType[i + 1];
inst->codec_state[i] = inst->codec_state[i + 1];
inst->funcDecode[i] = inst->funcDecode[i + 1];
inst->funcDecodeRCU[i] = inst->funcDecodeRCU[i + 1];
inst->funcAddLatePkt[i] = inst->funcAddLatePkt[i + 1];
inst->funcDecodeInit[i] = inst->funcDecodeInit[i + 1];
inst->funcDecodePLC[i] = inst->funcDecodePLC[i + 1];
inst->funcGetMDinfo[i] = inst->funcGetMDinfo[i + 1];
inst->funcGetPitch[i] = inst->funcGetPitch[i + 1];
inst->funcUpdBWEst[i] = inst->funcUpdBWEst[i + 1];
inst->funcGetErrorCode[i] = inst->funcGetErrorCode[i + 1];
inst->codec_fs[i] = inst->codec_fs[i + 1];
}
inst->payloadType[i] = -1;
inst->codec_state[i] = NULL;
inst->funcDecode[i] = NULL;
inst->funcDecodeRCU[i] = NULL;
inst->funcAddLatePkt[i] = NULL;
inst->funcDecodeInit[i] = NULL;
inst->funcDecodePLC[i] = NULL;
inst->funcGetMDinfo[i] = NULL;
inst->funcGetPitch[i] = NULL;
inst->funcUpdBWEst[i] = NULL;
inst->funcGetErrorCode[i] = NULL;
inst->codec_fs[i] = 0;
/* Move down all the codecs above this one */
for (i = 0; i < NUM_TOTAL_CODECS; i++)
{
if (inst->position[i] >= pos)
{
inst->position[i] = inst->position[i] - 1;
}
}
inst->nrOfCodecs--;
if (codec == kDecoderCNG)
{
/* also remove all registered CNG payload types */
for (i = 0; i < NUM_CNG_CODECS; i++)
{
inst->CNGpayloadType[i] = -1;
}
}
}
return 0;
}
/*
* Get the decoder function pointers for a codec.
*/
int WebRtcNetEQ_DbGetPtrs(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
CodecFuncInst_t *ptr_inst)
{
int pos = inst->position[codec];
if ((codec <= kDecoderReservedStart) || (codec >= kDecoderReservedEnd) || (codec
> NUM_TOTAL_CODECS))
{
/* ERROR */
pos = -1;
}
if (pos >= 0)
{
ptr_inst->codec_state = inst->codec_state[pos];
ptr_inst->funcAddLatePkt = inst->funcAddLatePkt[pos];
ptr_inst->funcDecode = inst->funcDecode[pos];
ptr_inst->funcDecodeRCU = inst->funcDecodeRCU[pos];
ptr_inst->funcDecodeInit = inst->funcDecodeInit[pos];
ptr_inst->funcDecodePLC = inst->funcDecodePLC[pos];
ptr_inst->funcGetMDinfo = inst->funcGetMDinfo[pos];
ptr_inst->funcUpdBWEst = inst->funcUpdBWEst[pos];
ptr_inst->funcGetErrorCode = inst->funcGetErrorCode[pos];
ptr_inst->codec_fs = inst->codec_fs[pos];
return 0;
}
else
{
WebRtcSpl_MemSetW16((WebRtc_Word16*) ptr_inst, 0,
sizeof(CodecFuncInst_t) / sizeof(WebRtc_Word16));
return CODEC_DB_NOT_EXIST1;
}
}
/*
* Returns payload number given a codec identifier.
*/
int WebRtcNetEQ_DbGetPayload(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codecID)
{
if (inst->position[codecID] == -1)
return CODEC_DB_NOT_EXIST2;
else
return (inst->payloadType[inst->position[codecID]]);
}
/*
* Returns codec identifier given a payload number.
* Returns -1 if the payload type does not exist.
*/
int WebRtcNetEQ_DbGetCodec(CodecDbInst_t *inst, int payloadType)
{
int i, pos;
for (i = 0; i < NUM_TOTAL_CODECS; i++)
{
pos = inst->position[i];
if (pos != -1)
{
if (inst->payloadType[pos] == payloadType) return i;
}
}
/* did not find payload type */
/* check if it's a CNG codec */
if (WebRtcNetEQ_DbIsCNGPayload(inst, payloadType))
{
return kDecoderCNG;
}
/* found no match */
return CODEC_DB_NOT_EXIST3;
}
/*
* Extracts the Payload Split information of the codec with the specified payloadType.
*/
int WebRtcNetEQ_DbGetSplitInfo(SplitInfo_t *inst, enum WebRtcNetEQDecoder codecID,
int codedsize)
{
switch (codecID)
{
#ifdef NETEQ_ISAC_CODEC
case kDecoderISAC:
#endif
#ifdef NETEQ_ISAC_SWB_CODEC
case kDecoderISACswb:
#endif
#ifdef NETEQ_ARBITRARY_CODEC
case kDecoderArbitrary:
#endif
#ifdef NETEQ_AMR_CODEC
case kDecoderAMR:
#endif
#ifdef NETEQ_AMRWB_CODEC
case kDecoderAMRWB:
#endif
#ifdef NETEQ_G726_CODEC
/* Treat G726 as non-splittable to simplify the implementation */
case kDecoderG726_16:
case kDecoderG726_24:
case kDecoderG726_32:
case kDecoderG726_40:
#endif
#ifdef NETEQ_SPEEX_CODEC
case kDecoderSPEEX_8:
case kDecoderSPEEX_16:
#endif
#ifdef NETEQ_G729_1_CODEC
case kDecoderG729_1:
#endif
{
/* These codecs' payloads are not splittable */
inst->deltaBytes = NO_SPLIT;
return 0;
}
/*
* Sample based coders are a special case.
* In this case, deltaTime signals the number of bytes per timestamp unit times 2
* in log2 domain.
*/
#if (defined NETEQ_G711_CODEC)
case kDecoderPCMu:
case kDecoderPCMa:
{
inst->deltaBytes = -12;
inst->deltaTime = 1;
return 0;
}
#endif
#if (defined NETEQ_G722_CODEC)
case kDecoderG722:
{
inst->deltaBytes = -14;
inst->deltaTime = 0;
return 0;
}
#endif
#if (defined NETEQ_PCM16B_CODEC)
case kDecoderPCM16B:
{
inst->deltaBytes = -12;
inst->deltaTime = 2;
return 0;
}
#endif
#if ((defined NETEQ_PCM16B_CODEC)&&(defined NETEQ_WIDEBAND))
case kDecoderPCM16Bwb:
{
inst->deltaBytes = -14;
inst->deltaTime = 2;
return 0;
}
#endif
#if ((defined NETEQ_PCM16B_CODEC)&&(defined NETEQ_32KHZ_WIDEBAND))
case kDecoderPCM16Bswb32kHz:
{
inst->deltaBytes = -18;
inst->deltaTime = 2;
return 0;
}
#endif
#if ((defined NETEQ_PCM16B_CODEC)&&(defined NETEQ_48KHZ_WIDEBAND))
case kDecoderPCM16Bswb48kHz:
{
inst->deltaBytes = -22;
inst->deltaTime = 2;
return 0;
}
#endif
/* Splittable payloads */
#ifdef NETEQ_G722_1_CODEC
case kDecoderG722_1_16:
{
inst->deltaBytes = 40;
inst->deltaTime = 320;
return 0;
}
case kDecoderG722_1_24:
{
inst->deltaBytes = 60;
inst->deltaTime = 320;
return 0;
}
case kDecoderG722_1_32:
{
inst->deltaBytes = 80;
inst->deltaTime = 320;
return 0;
}
#endif
#ifdef NETEQ_G722_1C_CODEC
case kDecoderG722_1C_24:
{
inst->deltaBytes = 60;
inst->deltaTime = 640;
return 0;
}
case kDecoderG722_1C_32:
{
inst->deltaBytes = 80;
inst->deltaTime = 640;
return 0;
}
case kDecoderG722_1C_48:
{
inst->deltaBytes = 120;
inst->deltaTime = 640;
return 0;
}
#endif
#ifdef NETEQ_G729_CODEC
case kDecoderG729:
{
inst->deltaBytes = 10;
inst->deltaTime = 80;
return 0;
}
#endif
#ifdef NETEQ_ILBC_CODEC
case kDecoderILBC:
{
/* Check for splitting of iLBC packets.
* If payload size is a multiple of 50 bytes it should be split into 30ms frames.
* If payload size is a multiple of 38 bytes it should be split into 20ms frames.
* Least common multiplier between 38 and 50 is 950, so the payload size must be less than
* 950 bytes in order to resolve the frames unambiguously.
* Currently max 12 frames in one bundle.
*/
switch (codedsize)
{
case 50:
case 100:
case 150:
case 200:
case 250:
case 300:
case 350:
case 400:
case 450:
case 500:
case 550:
case 600:
{
inst->deltaBytes = 50;
inst->deltaTime = 240;
break;
}
case 38:
case 76:
case 114:
case 152:
case 190:
case 228:
case 266:
case 304:
case 342:
case 380:
case 418:
case 456:
{
inst->deltaBytes = 38;
inst->deltaTime = 160;
break;
}
default:
{
return AMBIGUOUS_ILBC_FRAME_SIZE; /* Something not supported... */
}
}
return 0;
}
#endif
#ifdef NETEQ_GSMFR_CODEC
case kDecoderGSMFR:
{
inst->deltaBytes = 33;
inst->deltaTime = 160;
return 0;
}
#endif
default:
{ /*Unknown codec */
inst->deltaBytes = NO_SPLIT;
return CODEC_DB_UNKNOWN_CODEC;
}
} /* end of switch */
}
/*
* Returns 1 if codec is multiple description, 0 otherwise.
* NOTE: This function is a stub, since there currently are no MD codecs.
*/
int WebRtcNetEQ_DbIsMDCodec(enum WebRtcNetEQDecoder codecID)
{
if (0) /* Add test for MD codecs here */
return 1;
else
return 0;
}
/*
* Returns 1 if payload type is registered as a CNG codec, 0 otherwise
*/
int WebRtcNetEQ_DbIsCNGPayload(CodecDbInst_t *inst, int payloadType)
{
#ifdef NETEQ_CNG_CODEC
int i;
for(i=0; i<NUM_CNG_CODECS; i++)
{
if( (inst->CNGpayloadType[i] != -1) && (inst->CNGpayloadType[i] == payloadType) )
{
return 1;
}
}
#endif
return 0;
}
/*
* Return the sample rate for the codec with the given payload type, 0 if error
*/
WebRtc_UWord16 WebRtcNetEQ_DbGetSampleRate(CodecDbInst_t *inst, int payloadType)
{
int i;
CodecFuncInst_t codecInst;
/* Sanity */
if (inst == NULL)
{
/* return 0 Hz */
return 0;
}
/* Check among CNG payloads */
for (i = 0; i < NUM_CNG_CODECS; i++)
{
if ((inst->CNGpayloadType[i] != -1) && (inst->CNGpayloadType[i] == payloadType))
{
switch (i)
{
case 1:
return 16000;
case 2:
return 32000;
case 3:
return 48000;
default:
return 8000;
}
}
}
/* Not a CNG payload, check the other payloads */
i = WebRtcNetEQ_DbGetCodec(inst, payloadType);
if (i >= 0)
{
if (WebRtcNetEQ_DbGetPtrs(inst, (enum WebRtcNetEQDecoder) i, &codecInst) != 0)
{
/* Unexpected error, return 0 Hz */
return 0;
}
return codecInst.codec_fs;
}
/* If we end up here, we got an error, return 0 Hz */
return 0;
}

View File

@ -1,126 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Interface for the codec database.
*/
#ifndef CODEC_DB_H
#define CODEC_DB_H
#include "typedefs.h"
#include "webrtc_neteq.h"
#include "codec_db_defines.h"
#include "neteq_defines.h"
#if defined(NETEQ_48KHZ_WIDEBAND)
#define NUM_CNG_CODECS 4
#elif defined(NETEQ_32KHZ_WIDEBAND)
#define NUM_CNG_CODECS 3
#elif defined(NETEQ_WIDEBAND)
#define NUM_CNG_CODECS 2
#else
#define NUM_CNG_CODECS 1
#endif
typedef struct
{
WebRtc_Word16 position[NUM_TOTAL_CODECS];
WebRtc_Word16 nrOfCodecs;
WebRtc_Word16 payloadType[NUM_CODECS];
FuncDecode funcDecode[NUM_CODECS];
FuncDecode funcDecodeRCU[NUM_CODECS];
FuncDecodePLC funcDecodePLC[NUM_CODECS];
FuncDecodeInit funcDecodeInit[NUM_CODECS];
FuncAddLatePkt funcAddLatePkt[NUM_CODECS];
FuncGetMDinfo funcGetMDinfo[NUM_CODECS];
FuncGetPitchInfo funcGetPitch[NUM_CODECS];
FuncUpdBWEst funcUpdBWEst[NUM_CODECS];
FuncGetErrorCode funcGetErrorCode[NUM_CODECS];
void * codec_state[NUM_CODECS];
WebRtc_UWord16 codec_fs[NUM_CODECS];
WebRtc_Word16 CNGpayloadType[NUM_CNG_CODECS];
} CodecDbInst_t;
#define NO_SPLIT -1 /* codec payload cannot be split */
typedef struct
{
WebRtc_Word16 deltaBytes;
WebRtc_Word16 deltaTime;
} SplitInfo_t;
/*
* Resets the codec database.
*/
int WebRtcNetEQ_DbReset(CodecDbInst_t *inst);
/*
* Adds a new codec to the database.
*/
int WebRtcNetEQ_DbAdd(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
WebRtc_Word16 payloadType, FuncDecode funcDecode,
FuncDecode funcDecodeRCU, FuncDecodePLC funcDecodePLC,
FuncDecodeInit funcDecodeInit, FuncAddLatePkt funcAddLatePkt,
FuncGetMDinfo funcGetMDinfo, FuncGetPitchInfo funcGetPitch,
FuncUpdBWEst funcUpdBWEst, FuncGetErrorCode funcGetErrorCode,
void* codec_state, WebRtc_UWord16 codec_fs);
/*
* Removes a codec from the database.
*/
int WebRtcNetEQ_DbRemove(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec);
/*
* Get the decoder function pointers for a codec.
*/
int WebRtcNetEQ_DbGetPtrs(CodecDbInst_t *inst, enum WebRtcNetEQDecoder,
CodecFuncInst_t *ptr_inst);
/*
* Returns payload number given a codec identifier.
*/
int WebRtcNetEQ_DbGetPayload(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codecID);
/*
* Returns codec identifier given a payload number.
*/
int WebRtcNetEQ_DbGetCodec(CodecDbInst_t *inst, int payloadType);
/*
* Extracts the Payload Split information of the codec with the specified payloadType.
*/
int WebRtcNetEQ_DbGetSplitInfo(SplitInfo_t *inst, enum WebRtcNetEQDecoder codecID,
int codedsize);
/*
* Returns 1 if codec is multiple description type, 0 otherwise.
*/
int WebRtcNetEQ_DbIsMDCodec(enum WebRtcNetEQDecoder codecID);
/*
* Returns 1 if payload type is registered as a CNG codec, 0 otherwise.
*/
int WebRtcNetEQ_DbIsCNGPayload(CodecDbInst_t *inst, int payloadType);
/*
* Return the sample rate for the codec with the given payload type, 0 if error.
*/
WebRtc_UWord16 WebRtcNetEQ_DbGetSampleRate(CodecDbInst_t *inst, int payloadType);
#endif

View File

@ -1,89 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Some definitions related to the codec database.
*/
#ifndef CODEC_DB_DEFINES_H
#define CODEC_DB_DEFINES_H
#include "typedefs.h"
#define NUM_CODECS 47 /* probably too large with the limited set of supported codecs*/
#define NUM_TOTAL_CODECS kDecoderReservedEnd
/*
* Pointer to decoder function.
*/
typedef WebRtc_Word16 (*FuncDecode)(void* state, WebRtc_Word16* encoded, WebRtc_Word16 len,
WebRtc_Word16* decoded, WebRtc_Word16* speechType);
/*
* Pointer to PLC function.
*/
typedef WebRtc_Word16 (*FuncDecodePLC)(void* state, WebRtc_Word16* decodec,
WebRtc_Word16 frames);
/*
* Pointer to decoder init function.
*/
typedef WebRtc_Word16 (*FuncDecodeInit)(void* state);
/*
* Pointer to add late packet function.
*/
typedef WebRtc_Word16
(*FuncAddLatePkt)(void* state, WebRtc_Word16* encoded, WebRtc_Word16 len);
/*
* Pointer to get MD infofunction.
*/
typedef WebRtc_Word16 (*FuncGetMDinfo)(void* state);
/*
* Pointer to pitch info function.
* Return 0 for unvoiced, -1 if pitch not availiable.
*/
typedef WebRtc_Word16 (*FuncGetPitchInfo)(void* state, WebRtc_Word16* encoded,
WebRtc_Word16* length);
/*
* Pointer to the update bandwidth estimate function
*/
typedef WebRtc_Word16 (*FuncUpdBWEst)(void* state, const WebRtc_UWord16 *encoded,
WebRtc_Word32 packet_size,
WebRtc_UWord16 rtp_seq_number, WebRtc_UWord32 send_ts,
WebRtc_UWord32 arr_ts);
/*
* Pointer to error code function
*/
typedef WebRtc_Word16 (*FuncGetErrorCode)(void* state);
typedef struct CodecFuncInst_t_
{
FuncDecode funcDecode;
FuncDecode funcDecodeRCU;
FuncDecodePLC funcDecodePLC;
FuncDecodeInit funcDecodeInit;
FuncAddLatePkt funcAddLatePkt;
FuncGetMDinfo funcGetMDinfo;
FuncUpdBWEst funcUpdBWEst; /* Currently in use for the ISAC family (without LC) only*/
FuncGetErrorCode funcGetErrorCode;
void * codec_state;
WebRtc_UWord16 codec_fs;
WebRtc_UWord32 timeStamp;
} CodecFuncInst_t;
#endif

View File

@ -1,132 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "dsp.h"
#include "signal_processing_library.h"
#include "dsp_helpfunctions.h"
/* Scratch usage:
Type Name size startpos endpos
WebRtc_Word16 pw16_corrVec 62 0 61
WebRtc_Word16 pw16_data_ds 124 0 123
WebRtc_Word32 pw32_corr 2*54 124 231
Total: 232
*/
#define SCRATCH_pw16_corrVec 0
#define SCRATCH_pw16_data_ds 0
#define SCRATCH_pw32_corr 124
#define NETEQ_CORRELATOR_DSVECLEN 124 /* 124 = 60 + 10 + 54 */
WebRtc_Word16 WebRtcNetEQ_Correlator(DSPInst_t *inst,
#ifdef SCRATCH
WebRtc_Word16 *pw16_scratchPtr,
#endif
WebRtc_Word16 *pw16_data,
WebRtc_Word16 w16_dataLen,
WebRtc_Word16 *pw16_corrOut,
WebRtc_Word16 *pw16_corrScale)
{
WebRtc_Word16 w16_corrLen = 60;
#ifdef SCRATCH
WebRtc_Word16 *pw16_data_ds = pw16_scratchPtr + SCRATCH_pw16_corrVec;
WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_pw32_corr);
/* WebRtc_Word16 *pw16_corrVec = pw16_scratchPtr + SCRATCH_pw16_corrVec;*/
#else
WebRtc_Word16 pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN];
WebRtc_Word32 pw32_corr[54];
/* WebRtc_Word16 pw16_corrVec[4+54+4];*/
#endif
/* WebRtc_Word16 *pw16_corr=&pw16_corrVec[4];*/
WebRtc_Word16 w16_maxVal;
WebRtc_Word32 w32_maxVal;
WebRtc_Word16 w16_normVal;
WebRtc_Word16 w16_normVal2;
/* WebRtc_Word16 w16_corrUpsLen;*/
WebRtc_Word16 *pw16_B = NULL;
WebRtc_Word16 w16_Blen = 0;
WebRtc_Word16 w16_factor = 0;
/* Set constants depending on frequency used */
if (inst->fs == 8000)
{
w16_Blen = 3;
w16_factor = 2;
pw16_B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl;
#ifdef NETEQ_WIDEBAND
}
else if (inst->fs==16000)
{
w16_Blen = 5;
w16_factor = 4;
pw16_B = (WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl;
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
}
else if (inst->fs==32000)
{
w16_Blen = 7;
w16_factor = 8;
pw16_B = (WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl;
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
}
else /* if inst->fs==48000 */
{
w16_Blen = 7;
w16_factor = 12;
pw16_B = (WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl;
#endif
}
/* Downsample data in order to work on a 4 kHz sampled signal */
WebRtcSpl_DownsampleFast(
pw16_data + w16_dataLen - (NETEQ_CORRELATOR_DSVECLEN * w16_factor),
(WebRtc_Word16) (NETEQ_CORRELATOR_DSVECLEN * w16_factor), pw16_data_ds,
NETEQ_CORRELATOR_DSVECLEN, pw16_B, w16_Blen, w16_factor, (WebRtc_Word16) 0);
/* Normalize downsampled vector to using entire 16 bit */
w16_maxVal = WebRtcSpl_MaxAbsValueW16(pw16_data_ds, 124);
w16_normVal = 16 - WebRtcSpl_NormW32((WebRtc_Word32) w16_maxVal);
WebRtcSpl_VectorBitShiftW16(pw16_data_ds, NETEQ_CORRELATOR_DSVECLEN, pw16_data_ds,
w16_normVal);
/* Correlate from lag 10 to lag 60 (20..120 in NB and 40..240 in WB) */
WebRtcNetEQ_CrossCorr(
pw32_corr, &pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN - w16_corrLen],
&pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN - w16_corrLen - 10], 60, 54,
6 /*maxValue... shifts*/, -1);
/*
* Move data from w32 to w16 vector.
* Normalize downsampled vector to using all 14 bits
*/
w32_maxVal = WebRtcSpl_MaxAbsValueW32(pw32_corr, 54);
w16_normVal2 = 18 - WebRtcSpl_NormW32(w32_maxVal);
w16_normVal2 = WEBRTC_SPL_MAX(w16_normVal2, 0);
WebRtcSpl_VectorBitShiftW32ToW16(pw16_corrOut, 54, pw32_corr, w16_normVal2);
/* Total scale factor (right shifts) of correlation value */
*pw16_corrScale = 2 * w16_normVal + 6 + w16_normVal2;
return (50 + 1);
}
#undef SCRATCH_pw16_corrVec
#undef SCRATCH_pw16_data_ds
#undef SCRATCH_pw32_corr

View File

@ -1,34 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Contains definitions for the delay logging functionality. Only used for debugging and
* tracing purposes.
*/
#ifndef DELAY_LOGGING_H
#define DELAY_LOGGING_H
#define NETEQ_DELAY_LOGGING_VERSION_STRING "2.0"
#define NETEQ_DELAY_LOGGING_SIGNAL_RECIN 1
#define NETEQ_DELAY_LOGGING_SIGNAL_FLUSH 2
#define NETEQ_DELAY_LOGGING_SIGNAL_CLOCK 3
#define NETEQ_DELAY_LOGGING_SIGNAL_EOF 4
#define NETEQ_DELAY_LOGGING_SIGNAL_DECODE 5
#define NETEQ_DELAY_LOGGING_SIGNAL_CHANGE_FS 6
#define NETEQ_DELAY_LOGGING_SIGNAL_MERGE_INFO 7
#define NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO 8
#define NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO 9
#define NETEQ_DELAY_LOGGING_SIGNAL_PREEMPTIVE_INFO 10
#define NETEQ_DELAY_LOGGING_SIGNAL_OPTBUF 11
#define NETEQ_DELAY_LOGGING_SIGNAL_DECODE_ONE_DESC 12
#endif

View File

@ -1,523 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains some DSP initialization functions and
* constant table definitions.
*/
#include "dsp.h"
#include "signal_processing_library.h"
#include "neteq_error_codes.h"
/* Filter coefficients used when downsampling from the indicated
sample rates (8, 16, 32, 48 kHz) to 4 kHz.
Coefficients are in Q12. */
/* {0.3, 0.4, 0.3} */
const WebRtc_Word16 WebRtcNetEQ_kDownsample8kHzTbl[] = { 1229, 1638, 1229 };
#ifdef NETEQ_WIDEBAND
/* {0.15, 0.2, 0.3, 0.2, 0.15} */
const WebRtc_Word16 WebRtcNetEQ_kDownsample16kHzTbl[] =
{ 614, 819, 1229, 819, 614};
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
/* {0.1425, 0.1251, 0.1525, 0.1628, 0.1525, 0.1251, 0.1425} */
const WebRtc_Word16 WebRtcNetEQ_kDownsample32kHzTbl[] =
{ 584, 512, 625, 667, 625, 512, 584};
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
/* {0.2487, 0.0952, 0.1042, 0.1074, 0.1042, 0.0952, 0.2487} */
const WebRtc_Word16 WebRtcNetEQ_kDownsample48kHzTbl[] =
{ 1019, 390, 427, 440, 427, 390, 1019};
#endif
/* Constants used in expand function WebRtcNetEQ_Expand */
/* Q12: -1.264421 + 4.8659148*x - 4.0092827*x^2 + 1.4100529*x^3 */
const WebRtc_Word16 WebRtcNetEQ_kMixFractionFuncTbl[4] = { -5179, 19931, -16422, 5776 };
/* Tabulated divisions to save complexity */
/* 1049/{0, .., 6} */
const WebRtc_Word16 WebRtcNetEQ_k1049div[7] = { 0, 1049, 524, 349, 262, 209, 174 };
/* 2097/{0, .., 6} */
const WebRtc_Word16 WebRtcNetEQ_k2097div[7] = { 0, 2097, 1048, 699, 524, 419, 349 };
/* 5243/{0, .., 6} */
const WebRtc_Word16 WebRtcNetEQ_k5243div[7] = { 0, 5243, 2621, 1747, 1310, 1048, 873 };
#ifdef WEBRTC_NETEQ_40BITACC_TEST
/*
* Run NetEQ with simulated 40-bit accumulator to run bit-exact to a DSP
* implementation where the main (spl and NetEQ) functions have been
* 40-bit optimized. For testing purposes.
*/
/****************************************************************************
* WebRtcNetEQ_40BitAccCrossCorr(...)
*
* Calculates the Cross correlation between two sequences seq1 and seq2. Seq1
* is fixed and seq2 slides as the pointer is increased with step
*
* Input:
* - seq1 : First sequence (fixed throughout the correlation)
* - seq2 : Second sequence (slided step_seq2 for each
* new correlation)
* - dimSeq : Number of samples to use in the cross correlation.
* Should be no larger than 1024 to avoid overflow.
* - dimCrossCorr : Number of CrossCorrelations to calculate (start
* position for seq2 is updated for each new one)
* - rShift : Number of right shifts to use
* - step_seq2 : How many (positive or negative) steps the seq2
* pointer should be updated for each new cross
* correlation value
*
* Output:
* - crossCorr : The cross correlation in Q-rShift
*/
void WebRtcNetEQ_40BitAccCrossCorr(WebRtc_Word32 *crossCorr,
WebRtc_Word16 *seq1,
WebRtc_Word16 *seq2,
WebRtc_Word16 dimSeq,
WebRtc_Word16 dimCrossCorr,
WebRtc_Word16 rShift,
WebRtc_Word16 step_seq2)
{
int i, j;
WebRtc_Word16 *seq1Ptr, *seq2Ptr;
WebRtc_Word64 acc;
for (i = 0; i < dimCrossCorr; i++)
{
/* Set the pointer to the static vector, set the pointer to
the sliding vector and initialize crossCorr */
seq1Ptr = seq1;
seq2Ptr = seq2 + (step_seq2 * i);
acc = 0;
/* Perform the cross correlation */
for (j = 0; j < dimSeq; j++)
{
acc += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
seq1Ptr++;
seq2Ptr++;
}
(*crossCorr) = (WebRtc_Word32) (acc >> rShift);
crossCorr++;
}
}
/****************************************************************************
* WebRtcNetEQ_40BitAccDotW16W16(...)
*
* Calculates the dot product between two vectors (WebRtc_Word16)
*
* Input:
* - vector1 : Vector 1
* - vector2 : Vector 2
* - len : Number of samples in vector
* Should be no larger than 1024 to avoid overflow.
* - scaling : The number of left shifts required to avoid overflow
* in the dot product
* Return value : The dot product
*/
WebRtc_Word32 WebRtcNetEQ_40BitAccDotW16W16(WebRtc_Word16 *vector1,
WebRtc_Word16 *vector2,
int len,
int scaling)
{
WebRtc_Word32 sum;
int i;
WebRtc_Word64 acc;
acc = 0;
for (i = 0; i < len; i++)
{
acc += WEBRTC_SPL_MUL_16_16(*vector1++, *vector2++);
}
sum = (WebRtc_Word32) (acc >> scaling);
return(sum);
}
#endif /* WEBRTC_NETEQ_40BITACC_TEST */
/****************************************************************************
* WebRtcNetEQ_DSPInit(...)
*
* Initializes DSP side of NetEQ.
*
* Input:
* - inst : NetEq DSP instance
* - fs : Initial sample rate (may change when decoding data)
*
* Output:
* - inst : Updated instance
*
* Return value : 0 - ok
* : non-zero - error
*/
int WebRtcNetEQ_DSPInit(DSPInst_t *inst, WebRtc_UWord16 fs)
{
int res = 0;
WebRtc_Word16 fs_mult;
/* Pointers and values to save before clearing the instance */
#ifdef NETEQ_CNG_CODEC
void *savedPtr1 = inst->CNG_Codec_inst;
#endif
void *savedPtr2 = inst->pw16_readAddress;
void *savedPtr3 = inst->pw16_writeAddress;
void *savedPtr4 = inst->main_inst;
#ifdef NETEQ_VAD
void *savedVADptr = inst->VADInst.VADState;
VADInitFunction savedVADinit = inst->VADInst.initFunction;
VADSetmodeFunction savedVADsetmode = inst->VADInst.setmodeFunction;
VADFunction savedVADfunc = inst->VADInst.VADFunction;
WebRtc_Word16 savedVADEnabled = inst->VADInst.VADEnabled;
WebRtc_Word16 savedVADMode = inst->VADInst.VADMode;
#endif /* NETEQ_VAD */
DSPStats_t saveStats;
WebRtc_Word16 saveMsPerCall = inst->millisecondsPerCall;
enum BGNMode saveBgnMode = inst->BGNInst.bgnMode;
#ifdef NETEQ_STEREO
MasterSlaveInfo saveMSinfo;
#endif
/* copy contents of statInst to avoid clearing */WEBRTC_SPL_MEMCPY_W16(&saveStats, &(inst->statInst),
sizeof(DSPStats_t)/sizeof(WebRtc_Word16));
#ifdef NETEQ_STEREO
/* copy contents of msInfo to avoid clearing */WEBRTC_SPL_MEMCPY_W16(&saveMSinfo, &(inst->msInfo),
sizeof(MasterSlaveInfo)/sizeof(WebRtc_Word16));
#endif
/* check that the sample rate is valid */
if ((fs != 8000)
#ifdef NETEQ_WIDEBAND
&&(fs!=16000)
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
&&(fs!=32000)
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
&&(fs!=48000)
#endif
)
{
/* invalid rate */
return (CODEC_DB_UNSUPPORTED_FS);
}
/* calcualte fs/8000 */
fs_mult = WebRtcSpl_DivW32W16ResW16(fs, 8000);
/* Set everything to zero since most variables should be zero at start */
WebRtcSpl_MemSetW16((WebRtc_Word16 *) inst, 0, sizeof(DSPInst_t) / sizeof(WebRtc_Word16));
/* Restore saved pointers */
#ifdef NETEQ_CNG_CODEC
inst->CNG_Codec_inst = (CNG_dec_inst *)savedPtr1;
#endif
inst->pw16_readAddress = (WebRtc_Word16 *) savedPtr2;
inst->pw16_writeAddress = (WebRtc_Word16 *) savedPtr3;
inst->main_inst = savedPtr4;
#ifdef NETEQ_VAD
inst->VADInst.VADState = savedVADptr;
inst->VADInst.initFunction = savedVADinit;
inst->VADInst.setmodeFunction = savedVADsetmode;
inst->VADInst.VADFunction = savedVADfunc;
inst->VADInst.VADEnabled = savedVADEnabled;
inst->VADInst.VADMode = savedVADMode;
#endif /* NETEQ_VAD */
/* Initialize main part */
inst->fs = fs;
inst->millisecondsPerCall = saveMsPerCall;
inst->timestampsPerCall = inst->millisecondsPerCall * 8 * fs_mult;
inst->ExpandInst.w16_overlap = 5 * fs_mult;
inst->endPosition = 565 * fs_mult;
inst->curPosition = inst->endPosition - inst->ExpandInst.w16_overlap;
inst->w16_seedInc = 1;
inst->uw16_seed = 777;
inst->w16_muteFactor = 16384; /* 1.0 in Q14 */
inst->w16_frameLen = 3 * inst->timestampsPerCall; /* Dummy initialize to 30ms */
inst->w16_speechHistoryLen = 256 * fs_mult;
inst->pw16_speechHistory = &inst->speechBuffer[inst->endPosition
- inst->w16_speechHistoryLen];
inst->ExpandInst.pw16_overlapVec = &(inst->pw16_speechHistory[inst->w16_speechHistoryLen
- inst->ExpandInst.w16_overlap]);
/* Reusage of memory in speechBuffer inside Expand */
inst->ExpandInst.pw16_expVecs[0] = &inst->speechBuffer[0];
inst->ExpandInst.pw16_expVecs[1] = &inst->speechBuffer[126 * fs_mult];
inst->ExpandInst.pw16_arState = &inst->speechBuffer[2 * 126 * fs_mult];
inst->ExpandInst.pw16_arFilter = &inst->speechBuffer[2 * 126 * fs_mult
+ UNVOICED_LPC_ORDER];
/* Ends at 2*126*fs_mult+UNVOICED_LPC_ORDER+(UNVOICED_LPC_ORDER+1) */
inst->ExpandInst.w16_expandMuteFactor = 16384; /* 1.0 in Q14 */
/* Initialize BGN part */
inst->BGNInst.pw16_filter[0] = 4096;
inst->BGNInst.w16_scale = 20000;
inst->BGNInst.w16_scaleShift = 24;
inst->BGNInst.w32_energyUpdate = 500000;
inst->BGNInst.w32_energyUpdateLow = 0;
inst->BGNInst.w32_energy = 2500;
inst->BGNInst.w16_initialized = 0;
inst->BGNInst.bgnMode = saveBgnMode;
/* Recreate statistics counters */WEBRTC_SPL_MEMCPY_W16(&(inst->statInst), &saveStats,
sizeof(DSPStats_t)/sizeof(WebRtc_Word16));
#ifdef NETEQ_STEREO
/* Recreate MSinfo */WEBRTC_SPL_MEMCPY_W16(&(inst->msInfo), &saveMSinfo,
sizeof(MasterSlaveInfo)/sizeof(WebRtc_Word16));
#endif
#ifdef NETEQ_CNG_CODEC
if (inst->CNG_Codec_inst!=NULL)
{
/* initialize comfort noise generator */
res |= WebRtcCng_InitDec(inst->CNG_Codec_inst);
}
#endif
#ifdef NETEQ_VAD
/* initialize PostDecode VAD instance
(don't bother checking for NULL instance, this is done inside init function) */
res |= WebRtcNetEQ_InitVAD(&inst->VADInst, fs);
#endif /* NETEQ_VAD */
return (res);
}
/****************************************************************************
* WebRtcNetEQ_AddressInit(...)
*
* Initializes the shared-memory communication on the DSP side.
*
* Input:
* - inst : NetEQ DSP instance
* - data2McuAddress : Pointer to memory where DSP writes / MCU reads
* - data2DspAddress : Pointer to memory where MCU writes / DSP reads
* - mainInst : NetEQ main instance
*
* Output:
* - inst : Updated instance
*
* Return value : 0 - ok
*/
int WebRtcNetEQ_AddressInit(DSPInst_t *inst, const void *data2McuAddress,
const void *data2DspAddress, const void *mainInst)
{
/* set shared-memory addresses in the DSP instance */
inst->pw16_readAddress = (WebRtc_Word16 *) data2DspAddress;
inst->pw16_writeAddress = (WebRtc_Word16 *) data2McuAddress;
/* set pointer to main NetEQ instance */
inst->main_inst = (void *) mainInst;
/* set output frame size to 10 ms = 80 samples in narrowband */
inst->millisecondsPerCall = 10;
inst->timestampsPerCall = 80;
return (0);
}
/****************************************************************************
* NETEQDSP_clearInCallStats(...)
*
* Reset in-call statistics variables on DSP side.
*
* Input:
* - inst : NetEQ DSP instance
*
* Output:
* - inst : Updated instance
*
* Return value : 0 - ok
*/
int WebRtcNetEQ_ClearInCallStats(DSPInst_t *inst)
{
/* Reset statistics counters */
inst->statInst.accelerateLength = 0;
inst->statInst.expandLength = 0;
inst->statInst.preemptiveLength = 0;
return (0);
}
/****************************************************************************
* WebRtcNetEQ_ClearPostCallStats(...)
*
* Reset post-call statistics variables on DSP side.
*
* Input:
* - inst : NetEQ DSP instance
*
* Output:
* - inst : Updated instance
*
* Return value : 0 - ok
*/
int WebRtcNetEQ_ClearPostCallStats(DSPInst_t *inst)
{
/* Reset statistics counters */
inst->statInst.expandedVoiceSamples = 0;
inst->statInst.expandedNoiseSamples = 0;
return (0);
}
#ifdef NETEQ_VAD
/****************************************************************************
* WebRtcNetEQ_InitVAD(...)
*
* Initializes post-decode VAD instance.
*
* Input:
* - VADinst : PostDecodeVAD instance
* - fs : Initial sample rate
*
* Output:
* - VADinst : Updated instance
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_InitVAD(PostDecodeVAD_t *VADInst, WebRtc_UWord16 fs)
{
int res = 0;
/* initially, disable the post-decode VAD */
VADInst->VADEnabled = 0;
if (VADInst->VADState != NULL /* if VAD state is provided */
&& VADInst->initFunction != NULL /* and all function ... */
&& VADInst->setmodeFunction != NULL /* ... pointers ... */
&& VADInst->VADFunction != NULL) /* ... are defined */
{
res = (int) VADInst->initFunction( VADInst->VADState ); /* call VAD init function */
res |= WebRtcNetEQ_SetVADModeInternal( VADInst, VADInst->VADMode );
if (res!=0)
{
/* something is wrong; play it safe and set the VADstate to NULL */
VADInst->VADState = NULL;
}
else if (fs<=16000)
{
/* enable VAD if NB or WB (VAD cannot handle SWB) */
VADInst->VADEnabled = 1;
}
}
/* reset SID/CNG interval counter */
VADInst->SIDintervalCounter = 0;
/* initialize with active-speaker decision */
VADInst->VADDecision = 1;
return(res);
}
/****************************************************************************
* WebRtcNetEQ_SetVADModeInternal(...)
*
* Set the VAD mode in the VAD struct, and communicate it to the VAD instance
* if it exists.
*
* Input:
* - VADinst : PostDecodeVAD instance
* - mode : Mode number passed on to the VAD function
*
* Output:
* - VADinst : Updated instance
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_SetVADModeInternal(PostDecodeVAD_t *VADInst,
WebRtc_Word16 mode)
{
int res = 0;
VADInst->VADMode = mode;
if (VADInst->VADState != NULL)
{
/* call setmode function */
res = (int) VADInst->setmodeFunction(VADInst->VADState, mode);
}
return(res);
}
#endif /* NETEQ_VAD */
/****************************************************************************
* WebRtcNetEQ_FlushSpeechBuffer(...)
*
* Flush the speech buffer.
*
* Input:
* - inst : NetEq DSP instance
*
* Output:
* - inst : Updated instance
*
* Return value : 0 - ok
* : non-zero - error
*/
int WebRtcNetEQ_FlushSpeechBuffer(DSPInst_t *inst)
{
WebRtc_Word16 fs_mult;
/* calcualte fs/8000 */
fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
/* clear buffer */
WebRtcSpl_MemSetW16(inst->speechBuffer, 0, SPEECH_BUF_SIZE);
inst->endPosition = 565 * fs_mult;
inst->curPosition = inst->endPosition - inst->ExpandInst.w16_overlap;
return 0;
}

View File

@ -1,788 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains some DSP initialization functions,
* constant table definitions and other parameters.
* Also contains definitions of all DSP-side data structures.
*/
#ifndef DSP_H
#define DSP_H
#include "typedefs.h"
#include "webrtc_cng.h"
#include "codec_db_defines.h"
#include "neteq_defines.h"
#include "neteq_statistics.h"
#ifdef NETEQ_ATEVENT_DECODE
#include "dtmf_tonegen.h"
#endif
/*****************************/
/* Pre-processor definitions */
/*****************************/
/* FSMULT is the sample rate divided by 8000 */
#if defined(NETEQ_48KHZ_WIDEBAND)
#define FSMULT 6
#elif defined(NETEQ_32KHZ_WIDEBAND)
#define FSMULT 4
#elif defined(NETEQ_WIDEBAND)
#define FSMULT 2
#else
#define FSMULT 1
#endif
/* Size of the speech buffer (or synchronization buffer). */
/* 60 ms decoding + 10 ms syncbuff + 0.625ms lookahead */
#define SPEECH_BUF_SIZE (565 * FSMULT)
/* Misc definitions */
#define BGN_LPC_ORDER (4 + FSMULT) /* 5, 6, 8, or 10 */
#define UNVOICED_LPC_ORDER 6
#define RANDVEC_NO_OF_SAMPLES 256
/* Number of milliseconds to remove/add during accelerate/pre-emptive expand
under BGNonly operation */
#define DEFAULT_TIME_ADJUST 8
/* Number of RecOut calls without CNG/SID before re-enabling post-decode VAD */
#define POST_DECODE_VAD_AUTO_ENABLE 3000
/* 8kHz windowing in Q15 (over 5 samples) */
#define NETEQ_OVERLAP_WINMUTE_8KHZ_START 27307
#define NETEQ_OVERLAP_WINMUTE_8KHZ_INC -5461
#define NETEQ_OVERLAP_WINUNMUTE_8KHZ_START 5461
#define NETEQ_OVERLAP_WINUNMUTE_8KHZ_INC 5461
/* 16kHz windowing in Q15 (over 10 samples) */
#define NETEQ_OVERLAP_WINMUTE_16KHZ_START 29789
#define NETEQ_OVERLAP_WINMUTE_16KHZ_INC -2979
#define NETEQ_OVERLAP_WINUNMUTE_16KHZ_START 2979
#define NETEQ_OVERLAP_WINUNMUTE_16KHZ_INC 2979
/* 32kHz windowing in Q15 (over 20 samples) */
#define NETEQ_OVERLAP_WINMUTE_32KHZ_START 31208
#define NETEQ_OVERLAP_WINMUTE_32KHZ_INC -1560
#define NETEQ_OVERLAP_WINUNMUTE_32KHZ_START 1560
#define NETEQ_OVERLAP_WINUNMUTE_32KHZ_INC 1560
/* 48kHz windowing in Q15 (over 30 samples) */
#define NETEQ_OVERLAP_WINMUTE_48KHZ_START 31711
#define NETEQ_OVERLAP_WINMUTE_48KHZ_INC -1057
#define NETEQ_OVERLAP_WINUNMUTE_48KHZ_START 1057
#define NETEQ_OVERLAP_WINUNMUTE_48KHZ_INC 1057
/* Fade BGN towards zero after this many Expand calls */
#define FADE_BGN_TIME 200
/*******************/
/* Constant tables */
/*******************/
extern const WebRtc_Word16 WebRtcNetEQ_kDownsample8kHzTbl[];
extern const WebRtc_Word16 WebRtcNetEQ_kDownsample16kHzTbl[];
extern const WebRtc_Word16 WebRtcNetEQ_kDownsample32kHzTbl[];
extern const WebRtc_Word16 WebRtcNetEQ_kDownsample48kHzTbl[];
extern const WebRtc_Word16 WebRtcNetEQ_kRandnTbl[];
extern const WebRtc_Word16 WebRtcNetEQ_kMixFractionFuncTbl[];
extern const WebRtc_Word16 WebRtcNetEQ_k1049div[];
extern const WebRtc_Word16 WebRtcNetEQ_k2097div[];
extern const WebRtc_Word16 WebRtcNetEQ_k5243div[];
/************/
/* Typedefs */
/************/
enum BGNMode
{
BGN_ON, /* default "normal" behavior with eternal noise */
BGN_FADE, /* noise fades to zero after some time */
BGN_OFF /* background noise is always zero */
};
#ifdef NETEQ_STEREO
enum MasterSlaveMode
{
NETEQ_MONO, /* stand-alone instance */
NETEQ_MASTER, /* master instance in a spatial/stereo configuration */
NETEQ_SLAVE /* slave instance in a spatial/stereo configuration */
};
enum MasterSlaveExtraInfo
{
NO_INFO, /* no info to convey */
ACC_FAIL, /* signal that accelerate failed */
PE_EXP_FAIL, /* signal that pre-emptive expand failed */
DTMF_OVERDUB, /* signal that DTMF overdub is generated */
DTMF_ONLY /* signal that DTMF only is played */
};
#endif
/****************************/
/* DSP-side data structures */
/****************************/
/* Background noise (BGN) instance for storing BGN parameters
(sub-instance of NETEQDSP_inst) */
typedef struct BGNInst_t_
{
WebRtc_Word32 w32_energy;
WebRtc_Word32 w32_energyMax;
WebRtc_Word32 w32_energyUpdate;
WebRtc_Word32 w32_energyUpdateLow;
WebRtc_Word16 pw16_filterState[BGN_LPC_ORDER];
WebRtc_Word16 pw16_filter[BGN_LPC_ORDER + 1];
WebRtc_Word16 w16_mutefactor;
WebRtc_Word16 w16_scale;
WebRtc_Word16 w16_scaleShift;
WebRtc_Word16 w16_initialized;
enum BGNMode bgnMode;
} BGNInst_t;
/* Expansion instance (sub-instance of NETEQDSP_inst) */
typedef struct ExpandInst_t_
{
WebRtc_Word16 w16_overlap; /* Constant, 5 for NB and 10 for WB */
WebRtc_Word16 w16_consecExp; /* Number of consecutive expand calls */
WebRtc_Word16 *pw16_arFilter; /* length [UNVOICED_LPC_ORDER+1] */
WebRtc_Word16 *pw16_arState; /* length [UNVOICED_LPC_ORDER] */
WebRtc_Word16 w16_arGain;
WebRtc_Word16 w16_arGainScale;
WebRtc_Word16 w16_vFraction; /* Q14 */
WebRtc_Word16 w16_currentVFraction; /* Q14 */
WebRtc_Word16 *pw16_expVecs[2];
WebRtc_Word16 w16_lags[3];
WebRtc_Word16 w16_maxLag;
WebRtc_Word16 *pw16_overlapVec; /* last samples of speech history */
WebRtc_Word16 w16_lagsDirection;
WebRtc_Word16 w16_lagsPosition;
WebRtc_Word16 w16_expandMuteFactor; /* Q14 */
WebRtc_Word16 w16_stopMuting;
WebRtc_Word16 w16_onset;
WebRtc_Word16 w16_muteSlope; /* Q20 */
} ExpandInst_t;
#ifdef NETEQ_VAD
/*
* VAD function pointer types, replicating the typedefs in webrtc_neteq_internal.h.
* These function pointers match the definitions of WebRtc VAD functions WebRtcVad_Init,
* WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in webrtc_vad.h.
*/
typedef WebRtc_Word16 (*VADInitFunction)(void *VAD_inst);
typedef WebRtc_Word16 (*VADSetmodeFunction)(void *VAD_inst, WebRtc_Word16 mode);
typedef WebRtc_Word16 (*VADFunction)(void *VAD_inst, WebRtc_Word16 fs, WebRtc_Word16 *frame,
WebRtc_Word16 frameLen);
/* Post-decode VAD instance (sub-instance of NETEQDSP_inst) */
typedef struct PostDecodeVAD_t_
{
void *VADState; /* pointer to a VAD instance */
WebRtc_Word16 VADEnabled; /* 1 if enabled, 0 if disabled */
WebRtc_Word16 VADMode; /* mode parameter to pass to the VAD function */
WebRtc_Word16 VADDecision; /* 1 for active, 0 for passive */
WebRtc_Word16 SIDintervalCounter; /* reset when decoding CNG/SID frame,
increment for each recout call */
/* Function pointers */
VADInitFunction initFunction; /* VAD init function */
VADSetmodeFunction setmodeFunction; /* VAD setmode function */
VADFunction VADFunction; /* VAD function */
} PostDecodeVAD_t;
#endif /* NETEQ_VAD */
#ifdef NETEQ_STEREO
#define MAX_MS_DECODES 10
typedef struct
{
/* Stand-alone, master, or slave */
enum MasterSlaveMode msMode;
enum MasterSlaveExtraInfo extraInfo;
WebRtc_UWord16 instruction;
WebRtc_Word16 distLag;
WebRtc_Word16 corrLag;
WebRtc_Word16 bestIndex;
WebRtc_UWord32 endTimestamp;
WebRtc_UWord16 samplesLeftWithOverlap;
} MasterSlaveInfo;
#endif
/* "Main" NetEQ DSP instance */
typedef struct DSPInst_t_
{
/* MCU/DSP Communication layer */
WebRtc_Word16 *pw16_readAddress;
WebRtc_Word16 *pw16_writeAddress;
void *main_inst;
/* Output frame size in ms and samples */
WebRtc_Word16 millisecondsPerCall;
WebRtc_Word16 timestampsPerCall;
/*
* Example of speech buffer
*
* -----------------------------------------------------------
* | History T-60 to T | Future |
* -----------------------------------------------------------
* ^ ^
* | |
* curPosition endPosition
*
* History is gradually shifted out to the left when inserting
* new data at the end.
*/
WebRtc_Word16 speechBuffer[SPEECH_BUF_SIZE]; /* History/future speech buffer */
int curPosition; /* Next sample to play */
int endPosition; /* Position that ends future data */
WebRtc_UWord32 endTimestamp; /* Timestamp value at end of future data */
WebRtc_UWord32 videoSyncTimestamp; /* (Estimated) timestamp of the last
played sample (usually same as
endTimestamp-(endPosition-curPosition)
except during Expand and CNG) */
WebRtc_UWord16 fs; /* sample rate in Hz */
WebRtc_Word16 w16_frameLen; /* decoder frame length in samples */
WebRtc_Word16 w16_mode; /* operation used during last RecOut call */
WebRtc_Word16 w16_muteFactor; /* speech mute factor in Q14 */
WebRtc_Word16 *pw16_speechHistory; /* beginning of speech history during Expand */
WebRtc_Word16 w16_speechHistoryLen; /* 256 for NB and 512 for WB */
/* random noise seed parameters */
WebRtc_Word16 w16_seedInc;
WebRtc_UWord32 uw16_seed;
/* VQmon related variable */
WebRtc_Word16 w16_concealedTS;
/*****************/
/* Sub-instances */
/*****************/
/* Decoder data */
CodecFuncInst_t codec_ptr_inst;
#ifdef NETEQ_CNG_CODEC
/* CNG "decoder" instance */
CNG_dec_inst *CNG_Codec_inst;
#endif /* NETEQ_CNG_CODEC */
#ifdef NETEQ_ATEVENT_DECODE
/* DTMF generator instance */
dtmf_tone_inst_t DTMFInst;
#endif /* NETEQ_CNG_CODEC */
#ifdef NETEQ_VAD
/* Post-decode VAD instance */
PostDecodeVAD_t VADInst;
#endif /* NETEQ_VAD */
/* Expand instance (defined above) */
ExpandInst_t ExpandInst;
/* Background noise instance (defined above) */
BGNInst_t BGNInst;
/* Internal statistics instance */
DSPStats_t statInst;
#ifdef NETEQ_STEREO
/* Pointer to Master/Slave info */
MasterSlaveInfo *msInfo;
#endif
} DSPInst_t;
/*************************/
/* Function declarations */
/*************************/
/****************************************************************************
* WebRtcNetEQ_DSPInit(...)
*
* Initializes DSP side of NetEQ.
*
* Input:
* - inst : NetEq DSP instance
* - fs : Initial sample rate (may change when decoding data)
*
* Output:
* - inst : Updated instance
*
* Return value : 0 - ok
* : non-zero - error
*/
int WebRtcNetEQ_DSPInit(DSPInst_t *inst, WebRtc_UWord16 fs);
/****************************************************************************
* WebRtcNetEQ_AddressInit(...)
*
* Initializes the shared-memory communication on the DSP side.
*
* Input:
* - inst : NetEQ DSP instance
* - data2McuAddress : Pointer to memory where DSP writes / MCU reads
* - data2DspAddress : Pointer to memory where MCU writes / DSP reads
* - mainInst : NetEQ main instance
*
* Output:
* - inst : Updated instance
*
* Return value : 0 - ok
*/
int WebRtcNetEQ_AddressInit(DSPInst_t *inst, const void *data2McuAddress,
const void *data2DspAddress, const void *mainInst);
/****************************************************************************
* WebRtcNetEQ_ClearInCallStats(...)
*
* Reset in-call statistics variables on DSP side.
*
* Input:
* - inst : NetEQ DSP instance
*
* Output:
* - inst : Updated instance
*
* Return value : 0 - ok
*/
int WebRtcNetEQ_ClearInCallStats(DSPInst_t *inst);
/****************************************************************************
* WebRtcNetEQ_ClearPostCallStats(...)
*
* Reset post-call statistics variables on DSP side.
*
* Input:
* - inst : NetEQ DSP instance
*
* Output:
* - inst : Updated instance
*
* Return value : 0 - ok
*/
int WebRtcNetEQ_ClearPostCallStats(DSPInst_t *inst);
/****************************************************************************
* WebRtcNetEQ_RecOutInternal(...)
*
* This function asks NetEQ for more speech/audio data.
*
* Input:
* - inst : NetEQ instance, i.e. the user that requests more
* speech/audio data.
* - outdata : Pointer to a memory space where the output data
* should be stored.
* - BGNonly : If non-zero, RecOut will only produce background
* noise. It will still draw packets from the packet
* buffer, but they will never be decoded.
*
* Output:
* - inst : Updated user information
* - len : Number of samples that were outputted from NetEq
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_RecOutInternal(DSPInst_t *inst, WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
WebRtc_Word16 BGNonly);
/****************************************************************************
* WebRtcNetEQ_Normal(...)
*
* This function has the possibility to modify data that is played out in Normal
* mode, for example adjust the gain of the signal. The length of the signal
* can not be changed.
*
* Input:
* - inst : NetEQ DSP instance
* - scratchPtr : Pointer to scratch vector
* - decoded : Pointer to vector of new data from decoder
* - len : Number of input samples
*
* Output:
* - inst : Updated user information
* - pw16_len : Pointer to varibale where the number of samples
* produced will be written
*
* Return value : >=0 - Number of samples written to outData
* -1 - Error
*/
int WebRtcNetEQ_Normal(DSPInst_t *inst,
#ifdef SCRATCH
WebRtc_Word16 *pw16_scratchPtr,
#endif
WebRtc_Word16 *pw16_decoded, WebRtc_Word16 len,
WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len);
/****************************************************************************
* WebRtcNetEQ_Expand(...)
*
* This function produces one "chunk" of expansion data (PLC audio). The
* lenght of the produced audio depends on the speech history.
*
* Input:
* - inst : NetEQ DSP instance
* - scratchPtr : Pointer to scratch vector
* - BGNonly : If non-zero, Expand will only produce background
* noise.
* - pw16_len : Desired number of samples (only for BGN mode).
*
* Output:
* - inst : Updated user information
* - outdata : Pointer to a memory space where the output data
* should be stored
* - pw16_len : Number of samples that were outputted from NetEq
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_Expand(DSPInst_t *inst,
#ifdef SCRATCH
WebRtc_Word16 *pw16_scratchPtr,
#endif
WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
WebRtc_Word16 BGNonly);
/****************************************************************************
* WebRtcNetEQ_GenerateBGN(...)
*
* This function generates and writes len samples of background noise to the
* output vector. The Expand function will be called repeteadly until the
* correct number of samples is produced.
*
* Input:
* - inst : NetEQ DSP instance
* - scratchPtr : Pointer to scratch vector
* - len : Desired length of produced BGN.
*
*
* Output:
* - pw16_outData : Pointer to a memory space where the output data
* should be stored
*
* Return value : >=0 - Number of noise samples produced and written
* to output
* -1 - Error
*/
int WebRtcNetEQ_GenerateBGN(DSPInst_t *inst,
#ifdef SCRATCH
WebRtc_Word16 *pw16_scratchPtr,
#endif
WebRtc_Word16 *pw16_outData, WebRtc_Word16 len);
/****************************************************************************
* WebRtcNetEQ_PreEmptiveExpand(...)
*
* This function tries to extend the audio data by repeating one or several
* pitch periods. The operation is only carried out if the correlation is
* strong or if the signal energy is very low. The algorithm is the
* reciprocal of the Accelerate algorithm.
*
* Input:
* - inst : NetEQ DSP instance
* - scratchPtr : Pointer to scratch vector.
* - decoded : Pointer to newly decoded speech.
* - len : Length of decoded speech.
* - oldDataLen : Length of the part of decoded that has already been played out.
* - BGNonly : If non-zero, Pre-emptive Expand will only copy
* the first DEFAULT_TIME_ADJUST seconds of the
* input and append to the end. No signal matching is
* done.
*
* Output:
* - inst : Updated instance
* - outData : Pointer to a memory space where the output data
* should be stored. The vector must be at least
* min(len + 120*fs/8000, NETEQ_MAX_OUTPUT_SIZE)
* elements long.
* - pw16_len : Number of samples written to outData.
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_PreEmptiveExpand(DSPInst_t *inst,
#ifdef SCRATCH
WebRtc_Word16 *pw16_scratchPtr,
#endif
const WebRtc_Word16 *pw16_decoded, int len, int oldDataLen,
WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
WebRtc_Word16 BGNonly);
/****************************************************************************
* WebRtcNetEQ_Accelerate(...)
*
* This function tries to shorten the audio data by removing one or several
* pitch periods. The operation is only carried out if the correlation is
* strong or if the signal energy is very low.
*
* Input:
* - inst : NetEQ DSP instance
* - scratchPtr : Pointer to scratch vector.
* - decoded : Pointer to newly decoded speech.
* - len : Length of decoded speech.
* - BGNonly : If non-zero, Accelerate will only remove the last
* DEFAULT_TIME_ADJUST seconds of the intput.
* No signal matching is done.
*
*
* Output:
* - inst : Updated instance
* - outData : Pointer to a memory space where the output data
* should be stored
* - pw16_len : Number of samples written to outData.
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_Accelerate(DSPInst_t *inst,
#ifdef SCRATCH
WebRtc_Word16 *pw16_scratchPtr,
#endif
const WebRtc_Word16 *pw16_decoded, int len,
WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
WebRtc_Word16 BGNonly);
/****************************************************************************
* WebRtcNetEQ_Merge(...)
*
* This function is used to merge new data from the decoder to the exisiting
* stream in the synchronization buffer. The merge operation is typically
* done after a packet loss, where the end of the expanded data does not
* fit naturally with the new decoded data.
*
* Input:
* - inst : NetEQ DSP instance
* - scratchPtr : Pointer to scratch vector.
* - decoded : Pointer to new decoded speech.
* - len : Number of samples in pw16_decoded.
*
*
* Output:
* - inst : Updated user information
* - outData : Pointer to a memory space where the output data
* should be stored
* - pw16_len : Number of samples written to pw16_outData
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_Merge(DSPInst_t *inst,
#ifdef SCRATCH
WebRtc_Word16 *pw16_scratchPtr,
#endif
WebRtc_Word16 *pw16_decoded, int len, WebRtc_Word16 *pw16_outData,
WebRtc_Word16 *pw16_len);
/****************************************************************************
* WebRtcNetEQ_Cng(...)
*
* This function produces CNG according to RFC 3389
*
* Input:
* - inst : NetEQ DSP instance
* - len : Number of samples to produce
*
* Output:
* - pw16_outData : Output CNG
*
* Return value : 0 - Ok
* <0 - Error
*/
#ifdef NETEQ_CNG_CODEC
/* Must compile NetEQ with CNG support to enable this function */
int WebRtcNetEQ_Cng(DSPInst_t *inst, WebRtc_Word16 *pw16_outData, int len);
#endif /* NETEQ_CNG_CODEC */
/****************************************************************************
* WebRtcNetEQ_BGNUpdate(...)
*
* This function updates the background noise parameter estimates.
*
* Input:
* - inst : NetEQ instance, where the speech history is stored.
* - scratchPtr : Pointer to scratch vector.
*
* Output:
* - inst : Updated information about the BGN characteristics.
*
* Return value : No return value
*/
void WebRtcNetEQ_BGNUpdate(
#ifdef SCRATCH
DSPInst_t *inst, WebRtc_Word16 *pw16_scratchPtr
#else
DSPInst_t *inst
#endif
);
#ifdef NETEQ_VAD
/* Functions used by post-decode VAD */
/****************************************************************************
* WebRtcNetEQ_InitVAD(...)
*
* Initializes post-decode VAD instance.
*
* Input:
* - VADinst : PostDecodeVAD instance
* - fs : Initial sample rate
*
* Output:
* - VADinst : Updated instance
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_InitVAD(PostDecodeVAD_t *VADInst, WebRtc_UWord16 fs);
/****************************************************************************
* WebRtcNetEQ_SetVADModeInternal(...)
*
* Set the VAD mode in the VAD struct, and communicate it to the VAD instance
* if it exists.
*
* Input:
* - VADinst : PostDecodeVAD instance
* - mode : Mode number passed on to the VAD function
*
* Output:
* - VADinst : Updated instance
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_SetVADModeInternal(PostDecodeVAD_t *VADInst, WebRtc_Word16 mode);
#endif /* NETEQ_VAD */
/****************************************************************************
* WebRtcNetEQ_FlushSpeechBuffer(...)
*
* Flush the speech buffer.
*
* Input:
* - inst : NetEq DSP instance
*
* Output:
* - inst : Updated instance
*
* Return value : 0 - ok
* : non-zero - error
*/
int WebRtcNetEQ_FlushSpeechBuffer(DSPInst_t *inst);
#ifndef WEBRTC_NETEQ_40BITACC_TEST
#include "signal_processing_library.h"
/* Map to regular SPL functions */
#define WebRtcNetEQ_CrossCorr WebRtcSpl_CrossCorrelation
#define WebRtcNetEQ_DotW16W16 WebRtcSpl_DotProductWithScale
#else /* WEBRTC_NETEQ_40BITACC_TEST defined */
/* Run NetEQ with simulated 40-bit accumulator to run bit-exact to a DSP
implementation where the main (splib and NetEQ) functions have been
40-bit optimized. */
/* Map to special 40-bit optimized functions, defined below */
#define WebRtcNetEQ_CrossCorr WebRtcNetEQ_40BitAccCrossCorr
#define WebRtcNetEQ_DotW16W16 WebRtcNetEQ_40BitAccDotW16W16
/****************************************************************************
* WebRtcNetEQ_40BitAccCrossCorr(...)
*
* Calculates the Cross correlation between two sequences seq1 and seq2. Seq1
* is fixed and seq2 slides as the pointer is increased with step
*
* Input:
* - seq1 : First sequence (fixed throughout the correlation)
* - seq2 : Second sequence (slided step_seq2 for each
* new correlation)
* - dimSeq : Number of samples to use in the cross correlation.
* Should be no larger than 1024 to avoid overflow.
* - dimCrossCorr : Number of CrossCorrelations to calculate (start
* position for seq2 is updated for each new one)
* - rShift : Number of right shifts to use
* - step_seq2 : How many (positive or negative) steps the seq2
* pointer should be updated for each new cross
* correlation value
*
* Output:
* - crossCorr : The cross correlation in Q-rShift
*/
void WebRtcNetEQ_40BitAccCrossCorr(WebRtc_Word32 *crossCorr, WebRtc_Word16 *seq1,
WebRtc_Word16 *seq2, WebRtc_Word16 dimSeq,
WebRtc_Word16 dimCrossCorr, WebRtc_Word16 rShift,
WebRtc_Word16 step_seq2);
/****************************************************************************
* WebRtcNetEQ_40BitAccDotW16W16(...)
*
* Calculates the dot product between two vectors (WebRtc_Word16)
*
* Input:
* - vector1 : Vector 1
* - vector2 : Vector 2
* - len : Number of samples in vector
* Should be no larger than 1024 to avoid overflow.
* - scaling : The number of right shifts (after multiplication)
* required to avoid overflow in the dot product.
* Return value : The dot product
*/
WebRtc_Word32 WebRtcNetEQ_40BitAccDotW16W16(WebRtc_Word16 *vector1, WebRtc_Word16 *vector2,
int len, int scaling);
#endif /* WEBRTC_NETEQ_40BITACC_TEST */
#endif /* DSP_H */

View File

@ -1,120 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains some help functions that did not fit elsewhere.
*/
#include "dsp_helpfunctions.h"
WebRtc_Word16 WebRtcNetEQ_CalcFsMult(WebRtc_UWord16 fsHz)
{
switch (fsHz)
{
case 8000:
{
return 1;
}
case 16000:
{
return 2;
}
case 32000:
{
return 4;
}
case 48000:
{
return 6;
}
default:
{
return 1;
}
}
}
int WebRtcNetEQ_DownSampleTo4kHz(const WebRtc_Word16 *in, int inLen, WebRtc_UWord16 inFsHz,
WebRtc_Word16 *out, int outLen, int compensateDelay)
{
WebRtc_Word16 *B; /* filter coefficients */
WebRtc_Word16 Blen; /* number of coefficients */
WebRtc_Word16 filterDelay; /* phase delay in samples */
WebRtc_Word16 factor; /* conversion rate (inFsHz/8000) */
int ok;
/* Set constants depending on frequency used */
/* NOTE: The phase delay values are wrong compared to the true phase delay
of the filters. However, the error is preserved (through the +1 term)
for consistency. */
switch (inFsHz)
{
case 8000:
{
Blen = 3;
factor = 2;
B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl;
filterDelay = 1 + 1;
break;
}
#ifdef NETEQ_WIDEBAND
case 16000:
{
Blen = 5;
factor = 4;
B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample16kHzTbl;
filterDelay = 2 + 1;
break;
}
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
case 32000:
{
Blen = 7;
factor = 8;
B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample32kHzTbl;
filterDelay = 3 + 1;
break;
}
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
case 48000:
{
Blen = 7;
factor = 12;
B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample48kHzTbl;
filterDelay = 3 + 1;
break;
}
#endif
default:
{
/* unsupported or wrong sample rate */
return -1;
}
}
if (!compensateDelay)
{
/* disregard delay compensation */
filterDelay = 0;
}
ok = WebRtcSpl_DownsampleFast((WebRtc_Word16*) &in[Blen - 1],
(WebRtc_Word16) (inLen - (Blen - 1)), /* number of input samples */
out, (WebRtc_Word16) outLen, /* number of output samples to produce */
B, Blen, factor, filterDelay); /* filter parameters */
return ok; /* return value is -1 if input signal is too short */
}

View File

@ -1,220 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Various help functions used by the DSP functions.
*/
#ifndef DSP_HELPFUNCTIONS_H
#define DSP_HELPFUNCTIONS_H
#include "typedefs.h"
#include "dsp.h"
/****************************************************************************
* WebRtcNetEQ_Correlator(...)
*
* Calculate signal correlation.
*
* Input:
* - inst : DSP instance
* - data : Speech history to do expand from (older history in data[-4..-1])
* - dataLen : Length of data
*
* Output:
* - corrOut : CC of downsampled signal
* - corrScale : Scale factor for correlation (-Qdomain)
*
* Return value : Length of correlated data
*/
WebRtc_Word16 WebRtcNetEQ_Correlator(DSPInst_t *inst,
#ifdef SCRATCH
WebRtc_Word16 *pw16_scratchPtr,
#endif
WebRtc_Word16 *pw16_data, WebRtc_Word16 w16_dataLen,
WebRtc_Word16 *pw16_corrOut,
WebRtc_Word16 *pw16_corrScale);
/****************************************************************************
* WebRtcNetEQ_PeakDetection(...)
*
* Peak detection with parabolic fit.
*
* Input:
* - data : Data sequence for peak detection
* - dataLen : Length of data
* - nmbPeaks : Number of peaks to detect
* - fs_mult : Sample rate multiplier
*
* Output:
* - corrIndex : Index of the peak
* - winner : Value of the peak
*
* Return value : 0 for ok
*/
WebRtc_Word16 WebRtcNetEQ_PeakDetection(WebRtc_Word16 *pw16_data, WebRtc_Word16 w16_dataLen,
WebRtc_Word16 w16_nmbPeaks, WebRtc_Word16 fs_mult,
WebRtc_Word16 *pw16_corrIndex,
WebRtc_Word16 *pw16_winners);
/****************************************************************************
* WebRtcNetEQ_PrblFit(...)
*
* Three-point parbola fit.
*
* Input:
* - 3pts : Three input samples
* - fs_mult : Sample rate multiplier
*
* Output:
* - Ind : Index of the peak
* - outVal : Value of the peak
*
* Return value : 0 for ok
*/
WebRtc_Word16 WebRtcNetEQ_PrblFit(WebRtc_Word16 *pw16_3pts, WebRtc_Word16 *pw16_Ind,
WebRtc_Word16 *pw16_outVal, WebRtc_Word16 fs_mult);
/****************************************************************************
* WebRtcNetEQ_MinDistortion(...)
*
* Find the lag that results in minimum distortion.
*
* Input:
* - data : Start of speech to perform distortion on, second vector is assumed
* to be data[-Lag]
* - minLag : Start lag
* - maxLag : End lag
* - len : Length to correlate
*
* Output:
* - dist : Distorion value
*
* Return value : Lag for minimum distortion
*/
WebRtc_Word16 WebRtcNetEQ_MinDistortion(const WebRtc_Word16 *pw16_data,
WebRtc_Word16 w16_minLag, WebRtc_Word16 w16_maxLag,
WebRtc_Word16 len, WebRtc_Word32 *pw16_dist);
/****************************************************************************
* WebRtcNetEQ_RandomVec(...)
*
* Generate random vector.
*
* Input:
* - seed : Current seed (input/output)
* - len : Number of samples to generate
* - incVal : Jump step
*
* Output:
* - randVec : Generated random vector
*/
void WebRtcNetEQ_RandomVec(WebRtc_UWord32 *w32_seed, WebRtc_Word16 *pw16_randVec,
WebRtc_Word16 w16_len, WebRtc_Word16 w16_incval);
/****************************************************************************
* WebRtcNetEQ_MixVoiceUnvoice(...)
*
* Mix voiced and unvoiced signal.
*
* Input:
* - voicedVec : Voiced input signal
* - unvoicedVec : Unvoiced input signal
* - current_vfraction : Current mixing factor
* - vfraction_change : Mixing factor change per sample
* - N : Number of samples
*
* Output:
* - outData : Mixed signal
*/
void WebRtcNetEQ_MixVoiceUnvoice(WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_voicedVec,
WebRtc_Word16 *pw16_unvoicedVec,
WebRtc_Word16 *w16_current_vfraction,
WebRtc_Word16 w16_vfraction_change, WebRtc_Word16 N);
/****************************************************************************
* WebRtcNetEQ_UnmuteSignal(...)
*
* Gradually reduce attenuation.
*
* Input:
* - inVec : Input signal
* - startMuteFact : Starting attenuation
* - unmuteFact : Factor to "unmute" with (Q20)
* - N : Number of samples
*
* Output:
* - outVec : Output signal
*/
void WebRtcNetEQ_UnmuteSignal(WebRtc_Word16 *pw16_inVec, WebRtc_Word16 *startMuteFact,
WebRtc_Word16 *pw16_outVec, WebRtc_Word16 unmuteFact,
WebRtc_Word16 N);
/****************************************************************************
* WebRtcNetEQ_MuteSignal(...)
*
* Gradually increase attenuation.
*
* Input:
* - inout : Input/output signal
* - muteSlope : Slope of muting
* - N : Number of samples
*/
void WebRtcNetEQ_MuteSignal(WebRtc_Word16 *pw16_inout, WebRtc_Word16 muteSlope,
WebRtc_Word16 N);
/****************************************************************************
* WebRtcNetEQ_CalcFsMult(...)
*
* Calculate the sample rate divided by 8000.
*
* Input:
* - fsHz : Sample rate in Hz in {8000, 16000, 32000, 48000}.
*
* Return value : fsHz/8000 for the valid values, 1 for other inputs
*/
WebRtc_Word16 WebRtcNetEQ_CalcFsMult(WebRtc_UWord16 fsHz);
/****************************************************************************
* WebRtcNetEQ_DownSampleTo4kHz(...)
*
* Lowpass filter and downsample a signal to 4 kHz sample rate.
*
* Input:
* - in : Input signal samples.
* - inLen : Number of input samples.
* - inFsHz : Input sample rate in Hz.
* - outLen : Desired number of samples in decimated signal.
* - compensateDelay : If non-zero, compensate for the phase delay of
* of the anti-alias filter.
*
* Output:
* - out : Output signal samples.
*
* Return value : 0 - Ok
* -1 - Error
*
*/
int WebRtcNetEQ_DownSampleTo4kHz(const WebRtc_Word16 *in, int inLen, WebRtc_UWord16 inFsHz,
WebRtc_Word16 *out, int outLen, int compensateDelay);
#endif

View File

@ -1,232 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Implementation of packet buffer for DTMF messages.
*/
#include "dtmf_buffer.h"
#include "typedefs.h" /* to define endianness */
#include "signal_processing_library.h"
#include "neteq_error_codes.h"
#ifdef NETEQ_ATEVENT_DECODE
WebRtc_Word16 WebRtcNetEQ_DtmfRemoveEvent(dtmf_inst_t *DTMFdec_inst)
{
int i;
for (i = 0; i < 3; i++)
{
DTMFdec_inst->EventQueue[i] = DTMFdec_inst->EventQueue[i + 1];
DTMFdec_inst->EventQueueVolume[i] = DTMFdec_inst->EventQueueVolume[i + 1];
DTMFdec_inst->EventQueueEnded[i] = DTMFdec_inst->EventQueueEnded[i + 1];
DTMFdec_inst->EventQueueStartTime[i] = DTMFdec_inst->EventQueueStartTime[i + 1];
DTMFdec_inst->EventQueueEndTime[i] = DTMFdec_inst->EventQueueEndTime[i + 1];
}
DTMFdec_inst->EventBufferSize--;
DTMFdec_inst->EventQueue[3] = -1;
DTMFdec_inst->EventQueueVolume[3] = 0;
DTMFdec_inst->EventQueueEnded[3] = 0;
DTMFdec_inst->EventQueueStartTime[3] = 0;
DTMFdec_inst->EventQueueEndTime[3] = 0;
return 0;
}
WebRtc_Word16 WebRtcNetEQ_DtmfDecoderInit(dtmf_inst_t *DTMFdec_inst, WebRtc_UWord16 fs,
WebRtc_Word16 MaxPLCtime)
{
int i;
if (((fs != 8000) && (fs != 16000) && (fs != 32000) && (fs != 48000)) || (MaxPLCtime < 0))
{
return DTMF_DEC_PARAMETER_ERROR;
}
if (fs == 8000)
DTMFdec_inst->framelen = 80;
else if (fs == 16000)
DTMFdec_inst->framelen = 160;
else if (fs == 32000)
DTMFdec_inst->framelen = 320;
else
/* fs == 48000 */
DTMFdec_inst->framelen = 480;
DTMFdec_inst->MaxPLCtime = MaxPLCtime;
DTMFdec_inst->CurrentPLCtime = 0;
DTMFdec_inst->EventBufferSize = 0;
for (i = 0; i < 4; i++)
{
DTMFdec_inst->EventQueue[i] = -1;
DTMFdec_inst->EventQueueVolume[i] = 0;
DTMFdec_inst->EventQueueEnded[i] = 0;
DTMFdec_inst->EventQueueStartTime[i] = 0;
DTMFdec_inst->EventQueueEndTime[i] = 0;
}
return 0;
}
WebRtc_Word16 WebRtcNetEQ_DtmfInsertEvent(dtmf_inst_t *DTMFdec_inst,
const WebRtc_Word16 *encoded, WebRtc_Word16 len,
WebRtc_UWord32 timeStamp)
{
int i;
WebRtc_Word16 value;
const WebRtc_Word16 *EventStart;
WebRtc_Word16 endEvent;
WebRtc_Word16 Volume;
WebRtc_Word16 Duration;
WebRtc_Word16 position = -1;
/* Extract event */
if (len == 4)
{
EventStart = encoded;
#ifdef WEBRTC_BIG_ENDIAN
value=((*EventStart)>>8);
endEvent=((*EventStart)&0x80)>>7;
Volume=((*EventStart)&0x3F);
Duration=EventStart[1];
#else
value = ((*EventStart) & 0xFF);
endEvent = ((*EventStart) & 0x8000) >> 15;
Volume = ((*EventStart) & 0x3F00) >> 8;
Duration = (((((WebRtc_UWord16) EventStart[1]) >> 8) & 0xFF)
| (((WebRtc_UWord16) (EventStart[1] & 0xFF)) << 8));
#endif
/* Only events between 0-15 are supported (DTMF tones) */
if ((value < 0) || (value > 15))
{
return 0;
}
/* Discard all DTMF tones with really low volume (<-36dbm0) */
if (Volume > 36)
{
return 0;
}
/*Are there any unended events of the same type? */
for (i = 0; i < DTMFdec_inst->EventBufferSize; i++)
{
/* Going through the whole queue even when we have found a match will
ensure that we add to the latest applicable event */
if ((DTMFdec_inst->EventQueue[i] == value) && (!DTMFdec_inst->EventQueueEnded[i]
|| endEvent)) position = i;
}
if (position > -1)
{
DTMFdec_inst->EventQueueVolume[position] = Volume;
if ((timeStamp + Duration) > DTMFdec_inst->EventQueueEndTime[position]) DTMFdec_inst->EventQueueEndTime[position]
= DTMFdec_inst->EventQueueStartTime[position] + Duration;
if (endEvent) DTMFdec_inst->EventQueueEnded[position] = 1;
}
else
{
if (DTMFdec_inst->EventBufferSize == MAX_DTMF_QUEUE_SIZE)
{ /* Buffer full */
/* Remove one event */
DTMFdec_inst->EventBufferSize--;
}
/* Store data in the instance on a new position*/
DTMFdec_inst->EventQueue[DTMFdec_inst->EventBufferSize] = value;
DTMFdec_inst->EventQueueVolume[DTMFdec_inst->EventBufferSize] = Volume;
DTMFdec_inst->EventQueueEnded[DTMFdec_inst->EventBufferSize] = endEvent;
DTMFdec_inst->EventQueueStartTime[DTMFdec_inst->EventBufferSize] = timeStamp;
DTMFdec_inst->EventQueueEndTime[DTMFdec_inst->EventBufferSize] = timeStamp
+ Duration;
DTMFdec_inst->EventBufferSize++;
}
return 0;
}
return DTMF_INSERT_ERROR;
}
WebRtc_Word16 WebRtcNetEQ_DtmfDecode(dtmf_inst_t *DTMFdec_inst, WebRtc_Word16 *event,
WebRtc_Word16 *volume, WebRtc_UWord32 currTimeStamp)
{
if (DTMFdec_inst->EventBufferSize < 1) return 0; /* No events to play */
/* We have events, is it time to play them? */
if (currTimeStamp < DTMFdec_inst->EventQueueStartTime[0])
{
/*No, just return zero */
return 0;
}
/* Continue on the event that is currently ongoing */
*event = DTMFdec_inst->EventQueue[0];
*volume = DTMFdec_inst->EventQueueVolume[0];
if (DTMFdec_inst->EventQueueEndTime[0] >= (currTimeStamp + DTMFdec_inst->framelen))
{
/* Still at least framLen to play */
DTMFdec_inst->CurrentPLCtime = 0;
if ((DTMFdec_inst->EventQueueEndTime[0] == (currTimeStamp + DTMFdec_inst->framelen))
&& (DTMFdec_inst->EventQueueEnded[0]))
{ /* We are done */
/*Remove the event from Queue*/
WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
}
return DTMFdec_inst->framelen;
}
else
{
if ((DTMFdec_inst->EventQueueEnded[0]) || (DTMFdec_inst->EventQueue[1] > -1))
{
/*
* Less than frameLen to play and end of event or already received next event.
* Give our a whole frame size of audio to simplify things.
*/
/*Remove the event from Queue*/
WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
DTMFdec_inst->CurrentPLCtime = 0;
return DTMFdec_inst->framelen;
}
else
{
/* Less than frameLen to play and not end of event. */
DTMFdec_inst->CurrentPLCtime = (WebRtc_Word16) (currTimeStamp
- DTMFdec_inst->EventQueueEndTime[0]);
if ((DTMFdec_inst->CurrentPLCtime > DTMFdec_inst->MaxPLCtime)
|| (DTMFdec_inst->CurrentPLCtime < -DTMFdec_inst->MaxPLCtime))
{
/*Remove the event from queue*/
WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
DTMFdec_inst->CurrentPLCtime = 0;
}
/* If we have a new event that it's time to play */
if ((DTMFdec_inst->EventQueue[1] > -1) && (DTMFdec_inst->EventQueueStartTime[1]
>= (currTimeStamp + DTMFdec_inst->framelen)))
{
/*Remove the event from queue*/
WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
DTMFdec_inst->CurrentPLCtime = 0;
}
return DTMFdec_inst->framelen;
}
}
}
#endif

View File

@ -1,101 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Packet buffer for DTMF messages.
*/
#ifndef DTMF_BUFFER_H
#define DTMF_BUFFER_H
#include "typedefs.h"
#include "neteq_defines.h"
/* Include this code only if ATEVENT (DTMF) is defined in */
#ifdef NETEQ_ATEVENT_DECODE
#define MAX_DTMF_QUEUE_SIZE 4
typedef struct dtmf_inst_t_
{
WebRtc_Word16 MaxPLCtime;
WebRtc_Word16 CurrentPLCtime;
WebRtc_Word16 EventQueue[MAX_DTMF_QUEUE_SIZE];
WebRtc_Word16 EventQueueVolume[MAX_DTMF_QUEUE_SIZE];
WebRtc_Word16 EventQueueEnded[MAX_DTMF_QUEUE_SIZE];
WebRtc_UWord32 EventQueueStartTime[MAX_DTMF_QUEUE_SIZE];
WebRtc_UWord32 EventQueueEndTime[MAX_DTMF_QUEUE_SIZE];
WebRtc_Word16 EventBufferSize;
WebRtc_Word16 framelen;
} dtmf_inst_t;
/****************************************************************************
* WebRtcNetEQ_DtmfDecoderInit(...)
*
* This function initializes a DTMF instance.
*
* Input:
* - DTMF_decinst_t : DTMF instance
* - fs : The sample rate used for the DTMF
* - MaxPLCtime : Maximum length for a PLC before zeros should be inserted
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcNetEQ_DtmfDecoderInit(dtmf_inst_t *DTMFdec_inst, WebRtc_UWord16 fs,
WebRtc_Word16 MaxPLCtime);
/****************************************************************************
* WebRtcNetEQ_DtmfInsertEvent(...)
*
* This function decodes a packet with DTMF frames.
*
* Input:
* - DTMFdec_inst : DTMF instance
* - encoded : Encoded DTMF frame(s)
* - len : Bytes in encoded vector
*
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcNetEQ_DtmfInsertEvent(dtmf_inst_t *DTMFdec_inst,
const WebRtc_Word16 *encoded, WebRtc_Word16 len,
WebRtc_UWord32 timeStamp);
/****************************************************************************
* WebRtcNetEQ_DtmfDecode(...)
*
* This function decodes a packet with DTMF frame(s). Output will be the
* event that should be played for next 10 ms.
*
* Input:
* - DTMFdec_inst : DTMF instance
* - currTimeStamp : The current playout timestamp
*
* Output:
* - event : Event number to be played
* - volume : Event volume to be played
*
* Return value : >0 - There is a event to be played
* 0 - No event to be played
* -1 - Error
*/
WebRtc_Word16 WebRtcNetEQ_DtmfDecode(dtmf_inst_t *DTMFdec_inst, WebRtc_Word16 *event,
WebRtc_Word16 *volume, WebRtc_UWord32 currTimeStamp);
#endif /* NETEQ_ATEVENT_DECODE */
#endif /* DTMF_BUFFER_H */

View File

@ -1,371 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains the DTMF tone generator and its parameters.
*
* A sinusoid is generated using the recursive oscillator model
*
* y[n] = sin(w*n + phi) = 2*cos(w) * y[n-1] - y[n-2]
* = a * y[n-1] - y[n-2]
*
* initialized with
* y[-2] = 0
* y[-1] = sin(w)
*
* A DTMF signal is a combination of two sinusoids, depending
* on which event is sent (i.e, which key is pressed). The following
* table maps each key (event codes in parentheses) into two tones:
*
* 1209 Hz 1336 Hz 1477 Hz 1633 Hz
* 697 Hz 1 (ev. 1) 2 (ev. 2) 3 (ev. 3) A (ev. 12)
* 770 Hz 4 (ev. 4) 5 (ev. 5) 6 (ev. 6) B (ev. 13)
* 852 Hz 7 (ev. 7) 8 (ev. 8) 9 (ev. 9) C (ev. 14)
* 941 Hz * (ev. 10) 0 (ev. 0) # (ev. 11) D (ev. 15)
*
* The two tones are added to form the DTMF signal.
*
*/
#include "dtmf_tonegen.h"
#include "signal_processing_library.h"
#include "neteq_error_codes.h"
#ifdef NETEQ_ATEVENT_DECODE
/* Must compile NetEQ with DTMF support to enable the functionality */
/*******************/
/* Constant tables */
/*******************/
/*
* All tables corresponding to the oscillator model are organized so that
* the coefficients for a specific frequency is found in the same position
* in every table. The positions for the tones follow this layout:
*
* dummyVector[8] =
* {
* 697 Hz, 770 Hz, 852 Hz, 941 Hz,
* 1209 Hz, 1336 Hz, 1477 Hz, 1633 Hz
* };
*/
/*
* Tables for the constant a = 2*cos(w) = 2*cos(2*pi*f/fs)
* in the oscillator model, for 8, 16, 32 and 48 kHz sample rate.
* Table values in Q14.
*/
const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl8Khz[8] =
{
27980, 26956, 25701, 24219,
19073, 16325, 13085, 9315
};
#ifdef NETEQ_WIDEBAND
const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl16Khz[8]=
{
31548, 31281, 30951, 30556,
29144, 28361, 27409, 26258
};
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl32Khz[8]=
{
32462, 32394, 32311, 32210,
31849, 31647, 31400, 31098
};
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl48Khz[8]=
{
32632, 32602, 32564, 32520,
32359, 32268, 32157, 32022
};
#endif
/*
* Initialization values y[-1] = sin(w) = sin(2*pi*f/fs), for 8, 16, 32 and 48 kHz sample rate.
* Table values in Q14.
*/
const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab8Khz[8] =
{
8528, 9315, 10163, 11036,
13323, 14206,15021, 15708
};
#ifdef NETEQ_WIDEBAND
const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab16Khz[8]=
{
4429, 4879, 5380, 5918,
7490, 8207, 8979, 9801
};
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab32Khz[8]=
{
2235, 2468, 2728, 3010,
3853, 4249, 4685, 5164
};
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab48Khz[8]=
{
1493, 1649, 1823, 2013,
2582, 2851, 3148, 3476
};
#endif
/* Volume in dBm0 from 0 to -63, where 0 is the first table entry.
Everything below -36 is discarded, wherefore the table stops at -36.
Table entries are in Q14.
*/
const WebRtc_Word16 WebRtcNetEQ_dtfm_dBm0[37] = { 16141, 14386, 12821, 11427, 10184, 9077, 8090,
7210, 6426, 5727, 5104, 4549, 4054, 3614,
3221, 2870, 2558, 2280, 2032, 1811, 1614,
1439, 1282, 1143, 1018, 908, 809, 721, 643,
573, 510, 455, 405, 361, 322, 287, 256 };
/****************************************************************************
* WebRtcNetEQ_DTMFGenerate(...)
*
* Generate 10 ms DTMF signal according to input parameters.
*
* Input:
* - DTMFdecInst : DTMF instance
* - value : DTMF event number (0-15)
* - volume : Volume of generated signal (0-36)
* Volume is given in negative dBm0, i.e., volume == 0
* means 0 dBm0 while volume == 36 mean -36 dBm0.
* - sampFreq : Sample rate in Hz
*
* Output:
* - signal : Pointer to vector where DTMF signal is stored;
* Vector must be at least sampFreq/100 samples long.
* - DTMFdecInst : Updated DTMF instance
*
* Return value : >0 - Number of samples written to signal
* : <0 - error
*/
WebRtc_Word16 WebRtcNetEQ_DTMFGenerate(dtmf_tone_inst_t *DTMFdecInst, WebRtc_Word16 value,
WebRtc_Word16 volume, WebRtc_Word16 *signal,
WebRtc_UWord16 sampFreq, WebRtc_Word16 extFrameLen)
{
const WebRtc_Word16 *aTbl; /* pointer to a-coefficient table */
const WebRtc_Word16 *yInitTable; /* pointer to initialization value table */
WebRtc_Word16 a1 = 0; /* a-coefficient for first tone (low tone) */
WebRtc_Word16 a2 = 0; /* a-coefficient for second tone (high tone) */
int i;
int frameLen; /* number of samples to generate */
int lowIndex;
int highIndex;
WebRtc_Word32 tempVal;
WebRtc_Word16 tempValLow;
WebRtc_Word16 tempValHigh;
/* Sanity check for volume */
if ((volume < 0) || (volume > 36))
{
return DTMF_DEC_PARAMETER_ERROR;
}
/* Sanity check for extFrameLen */
if (extFrameLen < -1)
{
return DTMF_DEC_PARAMETER_ERROR;
}
/* Select oscillator coefficient tables based on sample rate */
if (sampFreq == 8000)
{
aTbl = WebRtcNetEQ_dtfm_aTbl8Khz;
yInitTable = WebRtcNetEQ_dtfm_yInitTab8Khz;
frameLen = 80;
#ifdef NETEQ_WIDEBAND
}
else if (sampFreq == 16000)
{
aTbl = WebRtcNetEQ_dtfm_aTbl16Khz;
yInitTable = WebRtcNetEQ_dtfm_yInitTab16Khz;
frameLen = 160;
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
}
else if (sampFreq == 32000)
{
aTbl = WebRtcNetEQ_dtfm_aTbl32Khz;
yInitTable = WebRtcNetEQ_dtfm_yInitTab32Khz;
frameLen = 320;
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
}
else if (sampFreq == 48000)
{
aTbl = WebRtcNetEQ_dtfm_aTbl48Khz;
yInitTable = WebRtcNetEQ_dtfm_yInitTab48Khz;
frameLen = 480;
#endif
}
else
{
/* unsupported sample rate */
return DTMF_GEN_UNKNOWN_SAMP_FREQ;
}
if (extFrameLen >= 0)
{
frameLen = extFrameLen;
}
/* select low frequency based on event value */
switch (value)
{
case 1:
case 2:
case 3:
case 12: /* first row on keypad */
{
lowIndex = 0; /* low frequency: 697 Hz */
break;
}
case 4:
case 5:
case 6:
case 13: /* second row on keypad */
{
lowIndex = 1; /* low frequency: 770 Hz */
break;
}
case 7:
case 8:
case 9:
case 14: /* third row on keypad */
{
lowIndex = 2; /* low frequency: 852 Hz */
break;
}
case 0:
case 10:
case 11:
case 15: /* fourth row on keypad */
{
lowIndex = 3; /* low frequency: 941 Hz */
break;
}
default:
{
return DTMF_DEC_PARAMETER_ERROR;
}
} /* end switch */
/* select high frequency based on event value */
switch (value)
{
case 1:
case 4:
case 7:
case 10: /* first column on keypad */
{
highIndex = 4; /* high frequency: 1209 Hz */
break;
}
case 2:
case 5:
case 8:
case 0: /* second column on keypad */
{
highIndex = 5;/* high frequency: 1336 Hz */
break;
}
case 3:
case 6:
case 9:
case 11: /* third column on keypad */
{
highIndex = 6;/* high frequency: 1477 Hz */
break;
}
case 12:
case 13:
case 14:
case 15: /* fourth column on keypad (special) */
{
highIndex = 7;/* high frequency: 1633 Hz */
break;
}
default:
{
return DTMF_DEC_PARAMETER_ERROR;
}
} /* end switch */
/* select coefficients based on results from switches above */
a1 = aTbl[lowIndex]; /* coefficient for first (low) tone */
a2 = aTbl[highIndex]; /* coefficient for second (high) tone */
if (DTMFdecInst->reinit)
{
/* set initial values for the recursive model */
DTMFdecInst->oldOutputLow[0] = yInitTable[lowIndex];
DTMFdecInst->oldOutputLow[1] = 0;
DTMFdecInst->oldOutputHigh[0] = yInitTable[highIndex];
DTMFdecInst->oldOutputHigh[1] = 0;
/* reset reinit flag */
DTMFdecInst->reinit = 0;
}
/* generate signal sample by sample */
for (i = 0; i < frameLen; i++)
{
/* Use rescursion formula y[n] = a*y[n-1] - y[n-2] */
tempValLow
= (WebRtc_Word16) (((WEBRTC_SPL_MUL_16_16(a1, DTMFdecInst->oldOutputLow[1])
+ 8192) >> 14) - DTMFdecInst->oldOutputLow[0]);
tempValHigh
= (WebRtc_Word16) (((WEBRTC_SPL_MUL_16_16(a2, DTMFdecInst->oldOutputHigh[1])
+ 8192) >> 14) - DTMFdecInst->oldOutputHigh[0]);
/* Update recursion memory */
DTMFdecInst->oldOutputLow[0] = DTMFdecInst->oldOutputLow[1];
DTMFdecInst->oldOutputLow[1] = tempValLow;
DTMFdecInst->oldOutputHigh[0] = DTMFdecInst->oldOutputHigh[1];
DTMFdecInst->oldOutputHigh[1] = tempValHigh;
/* scale high tone with 32768 (15 left shifts)
and low tone with 23171 (3dB lower than high tone) */
tempVal = WEBRTC_SPL_MUL_16_16(DTMF_AMP_LOW, tempValLow)
+ WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)tempValHigh, 15);
/* Norm the signal to Q14 (with proper rounding) */
tempVal = (tempVal + 16384) >> 15;
/* Scale the signal to correct dbM0 value */
signal[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
(WEBRTC_SPL_MUL_16_16(tempVal, WebRtcNetEQ_dtfm_dBm0[volume])
+ 8192), 14); /* volume value is in Q14; use proper rounding */
}
return frameLen;
}
#endif /* NETEQ_ATEVENT_DECODE */

View File

@ -1,73 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains the DTMF tone generator function.
*/
#ifndef DTMF_TONEGEN_H
#define DTMF_TONEGEN_H
#include "typedefs.h"
#include "neteq_defines.h"
#ifdef NETEQ_ATEVENT_DECODE
/* Must compile NetEQ with DTMF support to enable the functionality */
#define DTMF_AMP_LOW 23171 /* 3 dB lower than the high frequency */
/* The DTMF generator struct (part of DSP main struct DSPInst_t) */
typedef struct dtmf_tone_inst_t_
{
WebRtc_Word16 reinit; /* non-zero if the oscillator model should
be reinitialized for next event */
WebRtc_Word16 oldOutputLow[2]; /* oscillator recursion history (low tone) */
WebRtc_Word16 oldOutputHigh[2]; /* oscillator recursion history (high tone) */
int lastDtmfSample; /* index to the first non-DTMF sample in the
speech history, if non-negative */
}dtmf_tone_inst_t;
/****************************************************************************
* WebRtcNetEQ_DTMFGenerate(...)
*
* Generate 10 ms DTMF signal according to input parameters.
*
* Input:
* - DTMFdecInst : DTMF instance
* - value : DTMF event number (0-15)
* - volume : Volume of generated signal (0-36)
* Volume is given in negative dBm0, i.e., volume == 0
* means 0 dBm0 while volume == 36 mean -36 dBm0.
* - sampFreq : Sample rate in Hz
*
* Output:
* - signal : Pointer to vector where DTMF signal is stored;
* Vector must be at least sampFreq/100 samples long.
* - DTMFdecInst : Updated DTMF instance
*
* Return value : >0 - Number of samples written to signal
* : <0 - Error
*/
WebRtc_Word16 WebRtcNetEQ_DTMFGenerate(dtmf_tone_inst_t *DTMFdecInst,
WebRtc_Word16 value,
WebRtc_Word16 volume,
WebRtc_Word16 *signal,
WebRtc_UWord16 sampFreq,
WebRtc_Word16 frameLen
);
#endif /* NETEQ_ATEVENT_DECODE */
#endif /* DTMF_TONEGEN_H */

File diff suppressed because it is too large Load Diff

View File

@ -1,255 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* MCU struct and functions related to the MCU side operations.
*/
#ifndef MCU_H
#define MCU_H
#include "typedefs.h"
#include "codec_db.h"
#include "rtcp.h"
#include "packet_buffer.h"
#include "buffer_stats.h"
#include "neteq_statistics.h"
#ifdef NETEQ_ATEVENT_DECODE
#include "dtmf_buffer.h"
#endif
#define MAX_ONE_DESC 5 /* cannot do more than this many consecutive one-descriptor decodings */
#define MAX_LOSS_REPORT_PERIOD 60 /* number of seconds between auto-reset */
enum TsScaling
{
kTSnoScaling = 0,
kTSscalingTwo,
kTSscalingTwoThirds,
kTSscalingFourThirds
};
typedef struct
{
WebRtc_Word16 current_Codec;
WebRtc_Word16 current_Payload;
WebRtc_UWord32 timeStamp; /* Next timestamp that should be played */
WebRtc_Word16 millisecondsPerCall;
WebRtc_UWord16 timestampsPerCall; /* Output chunk size */
WebRtc_UWord16 fs;
WebRtc_UWord32 ssrc; /* Current ssrc */
WebRtc_Word16 new_codec;
WebRtc_Word16 first_packet;
/* MCU/DSP Communication layer */
WebRtc_Word16 *pw16_readAddress;
WebRtc_Word16 *pw16_writeAddress;
void *main_inst;
CodecDbInst_t codec_DB_inst; /* Information about all the codecs, i.e. which
functions to use and which codpoints that
have been assigned */
SplitInfo_t PayloadSplit_inst; /* Information about how the current codec
payload should be splitted */
WebRtcNetEQ_RTCP_t RTCP_inst; /* RTCP statistics */
PacketBuf_t PacketBuffer_inst; /* The packet buffer */
BufstatsInst_t BufferStat_inst; /* Statistics that are used to make decision
for what the DSP should perform */
#ifdef NETEQ_ATEVENT_DECODE
dtmf_inst_t DTMF_inst;
#endif
int NoOfExpandCalls;
WebRtc_Word16 AVT_PlayoutOn;
enum WebRtcNetEQPlayoutMode NetEqPlayoutMode;
WebRtc_Word16 one_desc; /* Number of times running on one desc */
WebRtc_UWord32 lostTS; /* Number of timestamps lost */
WebRtc_UWord32 lastReportTS; /* Timestamp elapsed since last report was given */
WebRtc_UWord32 externalTS;
WebRtc_UWord32 internalTS;
WebRtc_Word16 TSscalingInitialized;
enum TsScaling scalingFactor;
MCUStats_t statInst;
#ifdef NETEQ_STEREO
int usingStereo;
#endif
} MCUInst_t;
/****************************************************************************
* WebRtcNetEQ_McuReset(...)
*
* Reset the MCU instance.
*
* Input:
* - inst : MCU instance
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_McuReset(MCUInst_t *inst);
/****************************************************************************
* WebRtcNetEQ_ResetMcuInCallStats(...)
*
* Reset MCU-side statistics variables for the in-call statistics.
*
* Input:
* - inst : MCU instance
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_ResetMcuInCallStats(MCUInst_t *inst);
/****************************************************************************
* WebRtcNetEQ_ResetMcuJitterStat(...)
*
* Reset MCU-side statistics variables for the post-call statistics.
*
* Input:
* - inst : MCU instance
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_ResetMcuJitterStat(MCUInst_t *inst);
/****************************************************************************
* WebRtcNetEQ_McuAddressInit(...)
*
* Initializes MCU with read address and write address.
*
* Input:
* - inst : MCU instance
* - Data2McuAddress : Pointer to MCU address
* - Data2DspAddress : Pointer to DSP address
* - main_inst : Pointer to NetEQ main instance
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_McuAddressInit(MCUInst_t *inst, void * Data2McuAddress,
void * Data2DspAddress, void *main_inst);
/****************************************************************************
* WebRtcNetEQ_McuSetFs(...)
*
* Initializes MCU with read address and write address.
*
* Input:
* - inst : MCU instance
* - fs_hz : Sample rate in Hz -- 8000, 16000, 32000, (48000)
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_McuSetFs(MCUInst_t *inst, WebRtc_UWord16 fs_hz);
/****************************************************************************
* WebRtcNetEQ_SignalMcu(...)
*
* Signal the MCU that data is available and ask for a RecOut decision.
*
* Input:
* - inst : MCU instance
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_SignalMcu(MCUInst_t *inst);
/****************************************************************************
* WebRtcNetEQ_RecInInternal(...)
*
* This function inserts a packet into the jitter buffer.
*
* Input:
* - MCU_inst : MCU instance
* - RTPpacket : The RTP packet, parsed into NetEQ's internal RTP struct
* - uw32_timeRec : Time stamp for the arrival of the packet (not RTP timestamp)
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_RecInInternal(MCUInst_t *MCU_inst, RTPPacket_t *RTPpacket,
WebRtc_UWord32 uw32_timeRec);
/****************************************************************************
* WebRtcNetEQ_RecInInternal(...)
*
* Split the packet according to split_inst and inserts the parts into
* Buffer_inst.
*
* Input:
* - MCU_inst : MCU instance
* - RTPpacket : The RTP packet, parsed into NetEQ's internal RTP struct
* - uw32_timeRec : Time stamp for the arrival of the packet (not RTP timestamp)
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_SplitAndInsertPayload(RTPPacket_t *packet, PacketBuf_t *Buffer_inst,
SplitInfo_t *split_inst, WebRtc_Word16 *flushed);
/****************************************************************************
* WebRtcNetEQ_GetTimestampScaling(...)
*
* Update information about timestamp scaling for a payload type
* in MCU_inst->scalingFactor.
*
* Input:
* - MCU_inst : MCU instance
* - rtpPayloadType : RTP payload number
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_GetTimestampScaling(MCUInst_t *MCU_inst, int rtpPayloadType);
/****************************************************************************
* WebRtcNetEQ_ScaleTimestampExternalToInternal(...)
*
* Convert from external to internal timestamp using current scaling info.
*
* Input:
* - MCU_inst : MCU instance
* - externalTS : External timestamp
*
* Return value : Internal timestamp
*/
WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampExternalToInternal(const MCUInst_t *MCU_inst,
WebRtc_UWord32 externalTS);
/****************************************************************************
* WebRtcNetEQ_ScaleTimestampInternalToExternal(...)
*
* Convert from external to internal timestamp using current scaling info.
*
* Input:
* - MCU_inst : MCU instance
* - externalTS : Internal timestamp
*
* Return value : External timestamp
*/
WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampInternalToExternal(const MCUInst_t *MCU_inst,
WebRtc_UWord32 internalTS);
#endif

View File

@ -1,33 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "mcu.h"
#include <string.h> /* to define NULL */
/*
* Initializes MCU with read address and write address
*/
int WebRtcNetEQ_McuAddressInit(MCUInst_t *inst, void * Data2McuAddress,
void * Data2DspAddress, void *main_inst)
{
inst->pw16_readAddress = (WebRtc_Word16*) Data2McuAddress;
inst->pw16_writeAddress = (WebRtc_Word16*) Data2DspAddress;
inst->main_inst = main_inst;
inst->millisecondsPerCall = 10;
/* Do expansions in the beginning */
if (inst->pw16_writeAddress != NULL) inst->pw16_writeAddress[0] = DSP_INSTR_EXPAND;
return (0);
}

View File

@ -1,37 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Communication between MCU and DSP sides.
*/
#include "mcu_dsp_common.h"
#include <string.h>
/* Initialize instances with read and write address */
int WebRtcNetEQ_DSPinit(MainInst_t *inst)
{
int res = 0;
res |= WebRtcNetEQ_AddressInit(&inst->DSPinst, NULL, NULL, inst);
res |= WebRtcNetEQ_McuAddressInit(&inst->MCUinst, NULL, NULL, inst);
return res;
}
/* The DSP side will call this function to interrupt the MCU side */
int WebRtcNetEQ_DSP2MCUinterrupt(MainInst_t *inst, WebRtc_Word16 *pw16_shared_mem)
{
inst->MCUinst.pw16_readAddress = pw16_shared_mem;
inst->MCUinst.pw16_writeAddress = pw16_shared_mem;
return WebRtcNetEQ_SignalMcu(&inst->MCUinst);
}

View File

@ -1,61 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* The main NetEQ instance, which is where the DSP and MCU sides join.
*/
#ifndef MCU_DSP_COMMON_H
#define MCU_DSP_COMMON_H
#include "typedefs.h"
#include "dsp.h"
#include "mcu.h"
/* Define size of shared memory area. */
#if defined(NETEQ_48KHZ_WIDEBAND)
#define SHARED_MEM_SIZE (6*640)
#elif defined(NETEQ_32KHZ_WIDEBAND)
#define SHARED_MEM_SIZE (4*640)
#elif defined(NETEQ_WIDEBAND)
#define SHARED_MEM_SIZE (2*640)
#else
#define SHARED_MEM_SIZE 640
#endif
/* Struct to hold the NetEQ instance */
typedef struct
{
DSPInst_t DSPinst; /* DSP part of the NetEQ instance */
MCUInst_t MCUinst; /* MCU part of the NetEQ instance */
WebRtc_Word16 ErrorCode; /* Store last error code */
#ifdef NETEQ_STEREO
WebRtc_Word16 masterSlave; /* 0 = not set, 1 = master, 2 = slave */
#endif /* NETEQ_STEREO */
} MainInst_t;
/* Struct used for communication between DSP and MCU sides of NetEQ */
typedef struct
{
WebRtc_UWord32 playedOutTS; /* Timestamp position at end of DSP data */
WebRtc_UWord16 samplesLeft; /* Number of samples stored */
WebRtc_Word16 MD; /* Multiple description codec information */
WebRtc_Word16 lastMode; /* Latest mode of NetEQ playout */
WebRtc_Word16 frameLen; /* Frame length of previously decoded packet */
} DSP2MCU_info_t;
/* Initialize instances with read and write address */
int WebRtcNetEQ_DSPinit(MainInst_t *inst);
/* The DSP side will call this function to interrupt the MCU side */
int WebRtcNetEQ_DSP2MCUinterrupt(MainInst_t *inst, WebRtc_Word16 *pw16_shared_mem);
#endif

View File

@ -1,118 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Reset MCU side data.
*/
#include "mcu.h"
#include <string.h>
#include "automode.h"
int WebRtcNetEQ_McuReset(MCUInst_t *inst)
{
#ifdef NETEQ_ATEVENT_DECODE
int ok;
#endif
/* MCU/DSP Communication layer */
inst->pw16_readAddress = NULL;
inst->pw16_writeAddress = NULL;
inst->main_inst = NULL;
inst->one_desc = 0;
inst->BufferStat_inst.Automode_inst.extraDelayMs = 0;
inst->NetEqPlayoutMode = kPlayoutOn;
WebRtcNetEQ_DbReset(&inst->codec_DB_inst);
memset(&inst->PayloadSplit_inst, 0, sizeof(SplitInfo_t));
/* Clear the Packet buffer and the pointer to memory storage */
WebRtcNetEQ_PacketBufferFlush(&inst->PacketBuffer_inst);
inst->PacketBuffer_inst.memorySizeW16 = 0;
inst->PacketBuffer_inst.maxInsertPositions = 0;
/* Clear the decision and delay history */
memset(&inst->BufferStat_inst, 0, sizeof(BufstatsInst_t));
#ifdef NETEQ_ATEVENT_DECODE
ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 8000, 560);
if (ok != 0)
{
return ok;
}
#endif
inst->NoOfExpandCalls = 0;
inst->current_Codec = -1;
inst->current_Payload = -1;
inst->millisecondsPerCall = 10;
inst->timestampsPerCall = inst->millisecondsPerCall * 8;
inst->fs = 8000;
inst->first_packet = 1;
WebRtcNetEQ_ResetMcuInCallStats(inst);
WebRtcNetEQ_ResetMcuJitterStat(inst);
WebRtcNetEQ_ResetAutomode(&(inst->BufferStat_inst.Automode_inst),
inst->PacketBuffer_inst.maxInsertPositions);
return 0;
}
/*
* Reset MCU-side statistics variables for the in-call statistics.
*/
int WebRtcNetEQ_ResetMcuInCallStats(MCUInst_t *inst)
{
inst->lostTS = 0;
inst->lastReportTS = 0;
inst->PacketBuffer_inst.discardedPackets = 0;
return 0;
}
/*
* Reset all MCU-side statistics variables for the post-call statistics.
*/
int WebRtcNetEQ_ResetMcuJitterStat(MCUInst_t *inst)
{
inst->statInst.jbAvgCount = 0;
inst->statInst.jbAvgSizeQ16 = 0;
inst->statInst.jbMaxSize = 0;
inst->statInst.jbMinSize = 0xFFFFFFFF;
inst->statInst.avgPacketCount = 0;
inst->statInst.avgPacketDelayMs = 0;
inst->statInst.minPacketDelayMs = 0xFFFFFFFF;
inst->statInst.maxPacketDelayMs = 0;
inst->statInst.jbChangeCount = 0;
inst->statInst.generatedSilentMs = 0;
inst->statInst.countExpandMoreThan120ms = 0;
inst->statInst.countExpandMoreThan250ms = 0;
inst->statInst.countExpandMoreThan500ms = 0;
inst->statInst.countExpandMoreThan2000ms = 0;
inst->statInst.longestExpandDurationMs = 0;
inst->statInst.accelerateMs = 0;
inst->PacketBuffer_inst.totalDiscardedPackets = 0;
inst->PacketBuffer_inst.totalFlushedPackets = 0;
inst->BufferStat_inst.Automode_inst.countIAT500ms = 0;
inst->BufferStat_inst.Automode_inst.countIAT1000ms = 0;
inst->BufferStat_inst.Automode_inst.countIAT2000ms = 0;
inst->BufferStat_inst.Automode_inst.longestIATms = 0;
return 0;
}

View File

@ -1,548 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This is the function to merge a new packet with expanded data after a packet loss.
*/
#include "dsp.h"
#include "signal_processing_library.h"
#include "dsp_helpfunctions.h"
#include "neteq_error_codes.h"
/****************************************************************************
* WebRtcNetEQ_Merge(...)
*
* This function...
*
* Input:
* - inst : NetEQ DSP instance
* - scratchPtr : Pointer to scratch vector.
* - decoded : Pointer to new decoded speech.
* - len : Number of samples in pw16_decoded.
*
*
* Output:
* - inst : Updated user information
* - outData : Pointer to a memory space where the output data
* should be stored
* - pw16_len : Number of samples written to pw16_outData
*
* Return value : 0 - Ok
* <0 - Error
*/
/* Scratch usage:
Type Name size startpos endpos
WebRtc_Word16 pw16_expanded 210*fs/8000 0 209*fs/8000
WebRtc_Word16 pw16_expandedLB 100 210*fs/8000 99+210*fs/8000
WebRtc_Word16 pw16_decodedLB 40 100+210*fs/8000 139+210*fs/8000
WebRtc_Word32 pw32_corr 2*60 140+210*fs/8000 260+210*fs/8000
WebRtc_Word16 pw16_corrVec 68 210*fs/8000 67+210*fs/8000
[gap in scratch vector]
func WebRtcNetEQ_Expand 40+370*fs/8000 126*fs/8000 39+496*fs/8000
Total: 40+496*fs/8000
*/
#define SCRATCH_pw16_expanded 0
#if (defined(NETEQ_48KHZ_WIDEBAND))
#define SCRATCH_pw16_expandedLB 1260
#define SCRATCH_pw16_decodedLB 1360
#define SCRATCH_pw32_corr 1400
#define SCRATCH_pw16_corrVec 1260
#define SCRATCH_NETEQ_EXPAND 756
#elif (defined(NETEQ_32KHZ_WIDEBAND))
#define SCRATCH_pw16_expandedLB 840
#define SCRATCH_pw16_decodedLB 940
#define SCRATCH_pw32_corr 980
#define SCRATCH_pw16_corrVec 840
#define SCRATCH_NETEQ_EXPAND 504
#elif (defined(NETEQ_WIDEBAND))
#define SCRATCH_pw16_expandedLB 420
#define SCRATCH_pw16_decodedLB 520
#define SCRATCH_pw32_corr 560
#define SCRATCH_pw16_corrVec 420
#define SCRATCH_NETEQ_EXPAND 252
#else /* NB */
#define SCRATCH_pw16_expandedLB 210
#define SCRATCH_pw16_decodedLB 310
#define SCRATCH_pw32_corr 350
#define SCRATCH_pw16_corrVec 210
#define SCRATCH_NETEQ_EXPAND 126
#endif
int WebRtcNetEQ_Merge(DSPInst_t *inst,
#ifdef SCRATCH
WebRtc_Word16 *pw16_scratchPtr,
#endif
WebRtc_Word16 *pw16_decoded, int len, WebRtc_Word16 *pw16_outData,
WebRtc_Word16 *pw16_len)
{
WebRtc_Word16 fs_mult;
WebRtc_Word16 fs_shift;
WebRtc_Word32 w32_En_new_frame, w32_En_old_frame;
WebRtc_Word16 w16_expmax, w16_newmax;
WebRtc_Word16 w16_tmp, w16_tmp2;
WebRtc_Word32 w32_tmp;
#ifdef SCRATCH
WebRtc_Word16 *pw16_expanded = pw16_scratchPtr + SCRATCH_pw16_expanded;
WebRtc_Word16 *pw16_expandedLB = pw16_scratchPtr + SCRATCH_pw16_expandedLB;
WebRtc_Word16 *pw16_decodedLB = pw16_scratchPtr + SCRATCH_pw16_decodedLB;
WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_pw32_corr);
WebRtc_Word16 *pw16_corrVec = pw16_scratchPtr + SCRATCH_pw16_corrVec;
#else
WebRtc_Word16 pw16_expanded[(125+80+5)*FSMULT];
WebRtc_Word16 pw16_expandedLB[100];
WebRtc_Word16 pw16_decodedLB[40];
WebRtc_Word32 pw32_corr[60];
WebRtc_Word16 pw16_corrVec[4+60+4];
#endif
WebRtc_Word16 *pw16_corr = &pw16_corrVec[4];
WebRtc_Word16 w16_stopPos, w16_bestIndex, w16_interpLen;
WebRtc_Word16 w16_bestVal; /* bestVal is dummy */
WebRtc_Word16 w16_startfact, w16_inc;
WebRtc_Word16 w16_expandedLen;
WebRtc_Word16 w16_startPos;
WebRtc_Word16 w16_expLen, w16_newLen = 0;
WebRtc_Word16 *pw16_decodedOut;
WebRtc_Word16 w16_muted;
int w16_decodedLen = len;
#ifdef NETEQ_STEREO
MasterSlaveInfo *msInfo = inst->msInfo;
#endif
fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
fs_shift = 30 - WebRtcSpl_NormW32(fs_mult); /* Note that this is not "exact" for 48kHz */
/*************************************
* Generate data to merge with
*************************************/
/*
* Check how much data that is left since earlier
* (at least there should be the overlap)...
*/
w16_startPos = inst->endPosition - inst->curPosition;
/* Get one extra expansion to merge and overlap with */
inst->ExpandInst.w16_stopMuting = 1;
inst->ExpandInst.w16_lagsDirection = 1; /* make sure we get the "optimal" lag */
inst->ExpandInst.w16_lagsPosition = -1; /* out of the 3 possible ones */
w16_expandedLen = 0; /* Does not fill any function currently */
if (w16_startPos >= 210 * FSMULT)
{
/*
* The number of samples available in the sync buffer is more than what fits in
* pw16_expanded.Keep the first 210*FSMULT samples, but shift them towards the end of
* the buffer. This is ok, since all of the buffer will be expand data anyway, so as
* long as the beginning is left untouched, we're fine.
*/
w16_tmp = w16_startPos - 210 * FSMULT; /* length difference */
WEBRTC_SPL_MEMMOVE_W16(&inst->speechBuffer[inst->curPosition+w16_tmp] ,
&inst->speechBuffer[inst->curPosition], 210*FSMULT);
inst->curPosition += w16_tmp; /* move start position of sync buffer accordingly */
w16_startPos = 210 * FSMULT; /* this is the truncated length */
}
WebRtcNetEQ_Expand(inst,
#ifdef SCRATCH
pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
#endif
pw16_expanded, /* let Expand write to beginning of pw16_expanded to avoid overflow */
&w16_newLen, 0);
/*
* Now shift the data in pw16_expanded to where it belongs.
* Truncate all that ends up outside the vector.
*/
WEBRTC_SPL_MEMMOVE_W16(&pw16_expanded[w16_startPos], pw16_expanded,
WEBRTC_SPL_MIN(w16_newLen,
WEBRTC_SPL_MAX(210*FSMULT - w16_startPos, 0) ) );
inst->ExpandInst.w16_stopMuting = 0;
/* Copy what is left since earlier into the expanded vector */
WEBRTC_SPL_MEMCPY_W16(pw16_expanded, &inst->speechBuffer[inst->curPosition], w16_startPos);
/*
* Do "ugly" copy and paste from the expanded in order to generate more data
* to correlate (but not interpolate) with.
*/
w16_expandedLen = (120 + 80 + 2) * fs_mult;
w16_expLen = w16_startPos + w16_newLen;
if (w16_expLen < w16_expandedLen)
{
while ((w16_expLen + w16_newLen) < w16_expandedLen)
{
WEBRTC_SPL_MEMCPY_W16(&pw16_expanded[w16_expLen], &pw16_expanded[w16_startPos],
w16_newLen);
w16_expLen += w16_newLen;
}
/* Copy last part (fraction of a whole expansion) */
WEBRTC_SPL_MEMCPY_W16(&pw16_expanded[w16_expLen], &pw16_expanded[w16_startPos],
(w16_expandedLen-w16_expLen));
}
w16_expLen = w16_expandedLen;
/* Adjust muting factor (main muting factor times expand muting factor) */
inst->w16_muteFactor
= (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(inst->w16_muteFactor,
inst->ExpandInst.w16_expandMuteFactor, 14);
/* Adjust muting factor if new vector is more or less of the BGN energy */
len = WEBRTC_SPL_MIN(64*fs_mult, w16_decodedLen);
w16_expmax = WebRtcSpl_MaxAbsValueW16(pw16_expanded, (WebRtc_Word16) len);
w16_newmax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
/* Calculate energy of old data */
w16_tmp = 6 + fs_shift - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_expmax, w16_expmax));
w16_tmp = WEBRTC_SPL_MAX(w16_tmp,0);
w32_En_old_frame = WebRtcNetEQ_DotW16W16(pw16_expanded, pw16_expanded, len, w16_tmp);
/* Calculate energy of new data */
w16_tmp2 = 6 + fs_shift - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_newmax, w16_newmax));
w16_tmp2 = WEBRTC_SPL_MAX(w16_tmp2,0);
w32_En_new_frame = WebRtcNetEQ_DotW16W16(pw16_decoded, pw16_decoded, len, w16_tmp2);
/* Align to same Q-domain */
if (w16_tmp2 > w16_tmp)
{
w32_En_old_frame = WEBRTC_SPL_RSHIFT_W32(w32_En_old_frame, (w16_tmp2-w16_tmp));
}
else
{
w32_En_new_frame = WEBRTC_SPL_RSHIFT_W32(w32_En_new_frame, (w16_tmp-w16_tmp2));
}
/* Calculate muting factor to use for new frame */
if (w32_En_new_frame > w32_En_old_frame)
{
/* Normalize w32_En_new_frame to 14 bits */
w16_tmp = WebRtcSpl_NormW32(w32_En_new_frame) - 17;
w32_En_new_frame = WEBRTC_SPL_SHIFT_W32(w32_En_new_frame, w16_tmp);
/*
* Put w32_En_old_frame in a domain 14 higher, so that
* w32_En_old_frame/w32_En_new_frame is in Q14
*/
w16_tmp = w16_tmp + 14;
w32_En_old_frame = WEBRTC_SPL_SHIFT_W32(w32_En_old_frame, w16_tmp);
w16_tmp
= WebRtcSpl_DivW32W16ResW16(w32_En_old_frame, (WebRtc_Word16) w32_En_new_frame);
/* Calculate sqrt(w32_En_old_frame/w32_En_new_frame) in Q14 */
w16_muted = (WebRtc_Word16) WebRtcSpl_Sqrt(
WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)w16_tmp,14));
}
else
{
w16_muted = 16384; /* Set = 1.0 when old frame has higher energy than new */
}
/* Set the raise the continued muting factor w16_muted if w16_muteFactor is lower */
if (w16_muted > inst->w16_muteFactor)
{
inst->w16_muteFactor = WEBRTC_SPL_MIN(w16_muted, 16384);
}
#ifdef NETEQ_STEREO
/* Sanity for msInfo */
if (msInfo == NULL)
{
/* this should not happen here */
return MASTER_SLAVE_ERROR;
}
/* do not downsample and calculate correlations for slave instance(s) */
if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
{
#endif
/*********************************************
* Downsample to 4kHz and find best overlap
*********************************************/
/* Downsample to 4 kHz */
if (inst->fs == 8000)
{
WebRtcSpl_DownsampleFast(&pw16_expanded[2], (WebRtc_Word16) (w16_expandedLen - 2),
pw16_expandedLB, (WebRtc_Word16) (100),
(WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl, (WebRtc_Word16) 3,
(WebRtc_Word16) 2, (WebRtc_Word16) 0);
if (w16_decodedLen <= 80)
{
/* Not quite long enough, so we have to cheat a bit... */
WebRtcSpl_DownsampleFast(&pw16_decoded[2], (WebRtc_Word16) 80, pw16_decodedLB,
(WebRtc_Word16) (40), (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl,
(WebRtc_Word16) 3, (WebRtc_Word16) 2, (WebRtc_Word16) 0);
w16_tmp = ((w16_decodedLen - 2) >> 1);
WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40 - w16_tmp));
}
else
{
WebRtcSpl_DownsampleFast(&pw16_decoded[2],
(WebRtc_Word16) (w16_decodedLen - 2), pw16_decodedLB,
(WebRtc_Word16) (40), (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl,
(WebRtc_Word16) 3, (WebRtc_Word16) 2, (WebRtc_Word16) 0);
}
#ifdef NETEQ_WIDEBAND
}
else if (inst->fs==16000)
{
WebRtcSpl_DownsampleFast(
&pw16_expanded[4], (WebRtc_Word16)(w16_expandedLen-4),
pw16_expandedLB, (WebRtc_Word16)(100),
(WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl, (WebRtc_Word16)5,
(WebRtc_Word16)4, (WebRtc_Word16)0);
if (w16_decodedLen<=160)
{
/* Not quite long enough, so we have to cheat a bit... */
WebRtcSpl_DownsampleFast(
&pw16_decoded[4], (WebRtc_Word16)160,
pw16_decodedLB, (WebRtc_Word16)(40),
(WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl, (WebRtc_Word16)5,
(WebRtc_Word16)4, (WebRtc_Word16)0);
w16_tmp = ((w16_decodedLen-4)>>2);
WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
}
else
{
WebRtcSpl_DownsampleFast(
&pw16_decoded[4], (WebRtc_Word16)(w16_decodedLen-4),
pw16_decodedLB, (WebRtc_Word16)(40),
(WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl, (WebRtc_Word16)5,
(WebRtc_Word16)4, (WebRtc_Word16)0);
}
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
}
else if (inst->fs==32000)
{
WebRtcSpl_DownsampleFast(
&pw16_expanded[6], (WebRtc_Word16)(w16_expandedLen-6),
pw16_expandedLB, (WebRtc_Word16)(100),
(WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl, (WebRtc_Word16)7,
(WebRtc_Word16)8, (WebRtc_Word16)0);
if (w16_decodedLen<=320)
{
/* Not quite long enough, so we have to cheat a bit... */
WebRtcSpl_DownsampleFast(
&pw16_decoded[6], (WebRtc_Word16)320,
pw16_decodedLB, (WebRtc_Word16)(40),
(WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl, (WebRtc_Word16)7,
(WebRtc_Word16)8, (WebRtc_Word16)0);
w16_tmp = ((w16_decodedLen-6)>>3);
WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
}
else
{
WebRtcSpl_DownsampleFast(
&pw16_decoded[6], (WebRtc_Word16)(w16_decodedLen-6),
pw16_decodedLB, (WebRtc_Word16)(40),
(WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl, (WebRtc_Word16)7,
(WebRtc_Word16)8, (WebRtc_Word16)0);
}
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
}
else /* if (inst->fs==48000) */
{
WebRtcSpl_DownsampleFast(
&pw16_expanded[6], (WebRtc_Word16)(w16_expandedLen-6),
pw16_expandedLB, (WebRtc_Word16)(100),
(WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl, (WebRtc_Word16)7,
(WebRtc_Word16)12, (WebRtc_Word16)0);
if (w16_decodedLen<=320)
{
/* Not quite long enough, so we have to cheat a bit... */
WebRtcSpl_DownsampleFast(
&pw16_decoded[6], (WebRtc_Word16)320,
pw16_decodedLB, (WebRtc_Word16)(40),
(WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl, (WebRtc_Word16)7,
(WebRtc_Word16)12, (WebRtc_Word16)0);
w16_tmp = ((w16_decodedLen-6)>>3);
WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
}
else
{
WebRtcSpl_DownsampleFast(
&pw16_decoded[6], (WebRtc_Word16)(w16_decodedLen-6),
pw16_decodedLB, (WebRtc_Word16)(40),
(WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl, (WebRtc_Word16)7,
(WebRtc_Word16)12, (WebRtc_Word16)0);
}
#endif
}
/* Calculate correlation without any normalization (40 samples) */
w16_tmp = WebRtcSpl_DivW32W16ResW16((WebRtc_Word32) inst->ExpandInst.w16_maxLag,
(WebRtc_Word16) (fs_mult * 2)) + 1;
w16_stopPos = WEBRTC_SPL_MIN(60, w16_tmp);
w32_tmp = WEBRTC_SPL_MUL_16_16(w16_expmax, w16_newmax);
if (w32_tmp > 26843546)
{
w16_tmp = 3;
}
else
{
w16_tmp = 0;
}
WebRtcNetEQ_CrossCorr(pw32_corr, pw16_decodedLB, pw16_expandedLB, 40,
(WebRtc_Word16) w16_stopPos, w16_tmp, 1);
/* Normalize correlation to 14 bits and put in a WebRtc_Word16 vector */
WebRtcSpl_MemSetW16(pw16_corrVec, 0, (4 + 60 + 4));
w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_stopPos);
w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
WebRtcSpl_VectorBitShiftW32ToW16(pw16_corr, w16_stopPos, pw32_corr, w16_tmp);
/* Calculate allowed starting point for peak finding.
The peak location bestIndex must fulfill two criteria:
(1) w16_bestIndex+w16_decodedLen < inst->timestampsPerCall+inst->ExpandInst.w16_overlap
(2) w16_bestIndex+w16_decodedLen < w16_startPos */
w16_tmp = WEBRTC_SPL_MAX(0, WEBRTC_SPL_MAX(w16_startPos,
inst->timestampsPerCall+inst->ExpandInst.w16_overlap) - w16_decodedLen);
/* Downscale starting index to 4kHz domain */
w16_tmp2 = WebRtcSpl_DivW32W16ResW16((WebRtc_Word32) w16_tmp,
(WebRtc_Word16) (fs_mult << 1));
#ifdef NETEQ_STEREO
} /* end if (msInfo->msMode != NETEQ_SLAVE) */
if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
{
/* This is master or mono instance; find peak */
WebRtcNetEQ_PeakDetection(&pw16_corr[w16_tmp2], w16_stopPos, 1, fs_mult, &w16_bestIndex,
&w16_bestVal);
w16_bestIndex += w16_tmp; /* compensate for modified starting index */
msInfo->bestIndex = w16_bestIndex;
}
else if (msInfo->msMode == NETEQ_SLAVE)
{
/* Get peak location from master instance */
w16_bestIndex = msInfo->bestIndex;
}
else
{
/* Invalid mode */
return MASTER_SLAVE_ERROR;
}
#else /* NETEQ_STEREO */
/* Find peak */
WebRtcNetEQ_PeakDetection(&pw16_corr[w16_tmp2], w16_stopPos, 1, fs_mult, &w16_bestIndex,
&w16_bestVal);
w16_bestIndex += w16_tmp; /* compensate for modified starting index */
#endif /* NETEQ_STEREO */
/*
* Ensure that underrun does not occur for 10ms case => we have to get at least
* 10ms + overlap . (This should never happen thanks to the above modification of
* peak-finding starting point.)
* */
while ((w16_bestIndex + w16_decodedLen) < (inst->timestampsPerCall
+ inst->ExpandInst.w16_overlap) || w16_bestIndex + w16_decodedLen < w16_startPos)
{
w16_bestIndex += w16_newLen; /* Jump one lag ahead */
}
pw16_decodedOut = pw16_outData + w16_bestIndex;
/* Mute the new decoded data if needed (and unmute it linearly) */
w16_interpLen = WEBRTC_SPL_MIN(60*fs_mult,
w16_expandedLen-w16_bestIndex); /* this is the overlapping part of pw16_expanded */
w16_interpLen = WEBRTC_SPL_MIN(w16_interpLen, w16_decodedLen);
w16_inc = WebRtcSpl_DivW32W16ResW16(4194,
fs_mult); /* in Q20, 0.004 for NB and 0.002 for WB */
if (inst->w16_muteFactor < 16384)
{
WebRtcNetEQ_UnmuteSignal(pw16_decoded, &inst->w16_muteFactor, pw16_decoded, w16_inc,
(WebRtc_Word16) w16_interpLen);
WebRtcNetEQ_UnmuteSignal(&pw16_decoded[w16_interpLen], &inst->w16_muteFactor,
&pw16_decodedOut[w16_interpLen], w16_inc,
(WebRtc_Word16) (w16_decodedLen - w16_interpLen));
}
else
{
/* No muting needed */
WEBRTC_SPL_MEMMOVE_W16(&pw16_decodedOut[w16_interpLen], &pw16_decoded[w16_interpLen],
(w16_decodedLen-w16_interpLen));
}
/* Do overlap and interpolate linearly */
w16_inc = WebRtcSpl_DivW32W16ResW16(16384, (WebRtc_Word16) (w16_interpLen + 1)); /* Q14 */
w16_startfact = (16384 - w16_inc);
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_expanded, w16_bestIndex);
WebRtcNetEQ_MixVoiceUnvoice(pw16_decodedOut, &pw16_expanded[w16_bestIndex], pw16_decoded,
&w16_startfact, w16_inc, w16_interpLen);
inst->w16_mode = MODE_MERGE;
inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
/* New added length (w16_startPos samples were borrowed) */
*pw16_len = w16_bestIndex + w16_decodedLen - w16_startPos;
/* Update VQmon parameter */
inst->w16_concealedTS += (*pw16_len - w16_decodedLen);
inst->w16_concealedTS = WEBRTC_SPL_MAX(0, inst->w16_concealedTS);
/* Update in-call and post-call statistics */
if (inst->ExpandInst.w16_expandMuteFactor == 0)
{
/* expansion generates noise only */
inst->statInst.expandedNoiseSamples += (*pw16_len - w16_decodedLen);
}
else
{
/* expansion generates more than only noise */
inst->statInst.expandedVoiceSamples += (*pw16_len - w16_decodedLen);
}
inst->statInst.expandLength += (*pw16_len - w16_decodedLen);
/* Copy back the first part of the data to the speechHistory */
WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->curPosition], pw16_outData, w16_startPos);
/* Move data to within outData */
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, &pw16_outData[w16_startPos], (*pw16_len));
return 0;
}
#undef SCRATCH_pw16_expanded
#undef SCRATCH_pw16_expandedLB
#undef SCRATCH_pw16_decodedLB
#undef SCRATCH_pw32_corr
#undef SCRATCH_pw16_corrVec
#undef SCRATCH_NETEQ_EXPAND

View File

@ -1,55 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Calculate best overlap fit according to distortion measure.
*/
#include "dsp_helpfunctions.h"
#include "signal_processing_library.h"
WebRtc_Word16 WebRtcNetEQ_MinDistortion(const WebRtc_Word16 *pw16_data,
WebRtc_Word16 w16_minLag, WebRtc_Word16 w16_maxLag,
WebRtc_Word16 len, WebRtc_Word32 *pw16_dist)
{
int i, j;
const WebRtc_Word16 *pw16_data1;
const WebRtc_Word16 *pw16_data2;
WebRtc_Word32 w32_diff;
WebRtc_Word32 w32_sumdiff;
WebRtc_Word16 bestIndex = -1;
WebRtc_Word32 minDist = WEBRTC_SPL_WORD32_MAX;
for (i = w16_minLag; i <= w16_maxLag; i++)
{
w32_sumdiff = 0;
pw16_data1 = pw16_data;
pw16_data2 = pw16_data - i;
for (j = 0; j < len; j++)
{
w32_diff = pw16_data1[j] - pw16_data2[j];
w32_sumdiff += WEBRTC_SPL_ABS_W32(w32_diff);
}
/* Compare with previous minimum */
if (w32_sumdiff < minDist)
{
minDist = w32_sumdiff;
bestIndex = i;
}
}
*pw16_dist = minDist;
return bestIndex;
}

View File

@ -1,41 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This function mixes a voiced signal with an unvoiced signal and
* updates the weight on a sample by sample basis.
*/
#include "dsp_helpfunctions.h"
#include "signal_processing_library.h"
void WebRtcNetEQ_MixVoiceUnvoice(WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_voicedVec,
WebRtc_Word16 *pw16_unvoicedVec,
WebRtc_Word16 *w16_current_vfraction,
WebRtc_Word16 w16_vfraction_change, WebRtc_Word16 N)
{
int i;
WebRtc_Word16 w16_tmp2;
WebRtc_Word16 vfraction = *w16_current_vfraction;
w16_tmp2 = 16384 - vfraction;
for (i = 0; i < N; i++)
{
pw16_outData[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
WEBRTC_SPL_MUL_16_16(vfraction, pw16_voicedVec[i]) +
WEBRTC_SPL_MUL_16_16(w16_tmp2, pw16_unvoicedVec[i]) + 8192,
14);
vfraction -= w16_vfraction_change;
w16_tmp2 += w16_vfraction_change;
}
*w16_current_vfraction = vfraction;
}

View File

@ -1,33 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This function mutes a signal linearly on a sample by sample basis.
*/
#include "dsp_helpfunctions.h"
#include "signal_processing_library.h"
void WebRtcNetEQ_MuteSignal(WebRtc_Word16 *pw16_inout, WebRtc_Word16 muteSlope,
WebRtc_Word16 N)
{
int i;
WebRtc_Word32 w32_tmp = 1048608; /* (16384<<6 + 32) */
for (i = 0; i < N; i++)
{
pw16_inout[i]
= (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16((WebRtc_Word16)(w32_tmp>>6), pw16_inout[i])
+ 8192) >> 14);
w32_tmp -= muteSlope;
}
}

View File

@ -1,301 +0,0 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'includes': [
'../../../../../common_settings.gypi', # Common settings
],
'targets': [
{
'target_name': 'NetEq',
'type': '<(library)',
'dependencies': [
'../../../codecs/CNG/main/source/cng.gyp:CNG',
'../../../../../common_audio/signal_processing_library/main/source/spl.gyp:spl',
],
'defines': [
'NETEQ_VOICEENGINE_CODECS', # TODO: Should create a Chrome define which specifies a subset of codecs to support
'SCRATCH',
],
'include_dirs': [
'../interface',
],
'direct_dependent_settings': {
'include_dirs': [
'../interface',
],
},
'sources': [
'../interface/webrtc_neteq.h',
'../interface/webrtc_neteq_help_macros.h',
'../interface/webrtc_neteq_internal.h',
'accelerate.c',
'automode.c',
'automode.h',
'bgn_update.c',
'buffer_stats.h',
'bufstats_decision.c',
'cng_internal.c',
'codec_db.c',
'codec_db.h',
'codec_db_defines.h',
'correlator.c',
'delay_logging.h',
'dsp.c',
'dsp.h',
'dsp_helpfunctions.c',
'dsp_helpfunctions.h',
'dtmf_buffer.c',
'dtmf_buffer.h',
'dtmf_tonegen.c',
'dtmf_tonegen.h',
'expand.c',
'mcu.h',
'mcu_address_init.c',
'mcu_dsp_common.c',
'mcu_dsp_common.h',
'mcu_reset.c',
'merge.c',
'min_distortion.c',
'mix_voice_unvoice.c',
'mute_signal.c',
'neteq_defines.h',
'neteq_error_codes.h',
'neteq_statistics.h',
'normal.c',
'packet_buffer.c',
'packet_buffer.h',
'peak_detection.c',
'preemptive_expand.c',
'random_vector.c',
'recin.c',
'recout.c',
'rtcp.c',
'rtcp.h',
'rtp.c',
'rtp.h',
'set_fs.c',
'signal_mcu.c',
'split_and_insert.c',
'unmute_signal.c',
'webrtc_neteq.c',
],
},
{
'target_name': 'NetEqRTPplay',
'type': 'executable',
'dependencies': [
'NetEq', # NetEQ library defined above
'NetEqTestTools',# Test helpers
'../../../codecs/G711/main/source/g711.gyp:G711',
'../../../codecs/G722/main/source/g722.gyp:G722',
'../../../codecs/PCM16B/main/source/pcm16b.gyp:PCM16B',
'../../../codecs/iLBC/main/source/ilbc.gyp:iLBC',
'../../../codecs/iSAC/main/source/isac.gyp:iSAC',
'../../../codecs/CNG/main/source/cng.gyp:CNG',
],
'defines': [
# TODO: Make codec selection conditional on definitions in target NetEq
'CODEC_ILBC',
'CODEC_PCM16B',
'CODEC_G711',
'CODEC_G722',
'CODEC_ISAC',
'CODEC_PCM16B_WB',
'CODEC_ISAC_SWB',
'CODEC_PCM16B_32KHZ',
'CODEC_CNGCODEC8',
'CODEC_CNGCODEC16',
'CODEC_CNGCODEC32',
'CODEC_ATEVENT_DECODE',
'CODEC_RED',
],
'include_dirs': [
'../source',
'../test',
],
'sources': [
'../test/NetEqRTPplay.cc',
],
},
{
'target_name': 'RTPencode',
'type': 'executable',
'dependencies': [
'NetEqTestTools',# Test helpers
'../../../codecs/G711/main/source/g711.gyp:G711',
'../../../codecs/G722/main/source/g722.gyp:G722',
'../../../codecs/PCM16B/main/source/pcm16b.gyp:PCM16B',
'../../../codecs/iLBC/main/source/ilbc.gyp:iLBC',
'../../../codecs/iSAC/main/source/isac.gyp:iSAC',
'../../../codecs/CNG/main/source/cng.gyp:CNG',
'../../../../../common_audio/vad/main/source/vad.gyp:vad',
],
'defines': [
# TODO: Make codec selection conditional on definitions in target NetEq
'CODEC_ILBC',
'CODEC_PCM16B',
'CODEC_G711',
'CODEC_G722',
'CODEC_ISAC',
'CODEC_PCM16B_WB',
'CODEC_ISAC_SWB',
'CODEC_PCM16B_32KHZ',
'CODEC_CNGCODEC8',
'CODEC_CNGCODEC16',
'CODEC_CNGCODEC32',
'CODEC_ATEVENT_DECODE',
'CODEC_RED',
],
'include_dirs': [
'../interface',
'../test',
],
'sources': [
'../test/RTPencode.cc',
],
},
{
'target_name': 'RTPjitter',
'type': 'executable',
'dependencies': [
],
'defines': [
],
'include_dirs': [
],
'sources': [
'../test/RTPjitter.cc',
],
},
{
'target_name': 'RTPanalyze',
'type': 'executable',
'dependencies': [
'NetEqTestTools',
],
'defines': [
],
'include_dirs': [
],
'sources': [
'../test/RTPanalyze.cc',
],
},
{
'target_name': 'RTPchange',
'type': 'executable',
'dependencies': [
'NetEqTestTools',
],
'defines': [
],
'include_dirs': [
],
'sources': [
'../test/RTPchange.cc',
],
},
{
'target_name': 'RTPtimeshift',
'type': 'executable',
'dependencies': [
'NetEqTestTools',
],
'defines': [
],
'include_dirs': [
],
'sources': [
'../test/RTPtimeshift.cc',
],
},
{
'target_name': 'RTPcat',
'type': 'executable',
'dependencies': [
'NetEqTestTools',
],
'defines': [
],
'include_dirs': [
],
'sources': [
'../test/RTPcat.cc',
],
},
{
'target_name': 'NetEqTestTools',
# Collection of useful functions used in other tests
'type': '<(library)',
'dependencies': [
'../../../codecs/G711/main/source/g711.gyp:G711',
'../../../codecs/G722/main/source/g722.gyp:G722',
'../../../codecs/PCM16B/main/source/pcm16b.gyp:PCM16B',
'../../../codecs/iLBC/main/source/ilbc.gyp:iLBC',
'../../../codecs/iSAC/main/source/isac.gyp:iSAC',
'../../../codecs/CNG/main/source/cng.gyp:CNG',
],
'direct_dependent_settings': {
'include_dirs': [
'../test',
'../interface',
],
},
'defines': [
# TODO: Make codec selection conditional on definitions in target NetEq
'CODEC_ILBC',
'CODEC_PCM16B',
'CODEC_G711',
'CODEC_G722',
'CODEC_ISAC',
'CODEC_PCM16B_WB',
'CODEC_ISAC_SWB',
'CODEC_PCM16B_32KHZ',
'CODEC_CNGCODEC8',
'CODEC_CNGCODEC16',
'CODEC_CNGCODEC32',
'CODEC_ATEVENT_DECODE',
'CODEC_RED',
],
'include_dirs': [
'../source',
'../interface',
'../test',
],
'sources': [
'../test/NETEQTEST_NetEQClass.cc',
'../test/NETEQTEST_RTPpacket.cc',
'../test/NETEQTEST_CodecClass.cc',
'../test/NETEQTEST_NetEQClass.h',
'../test/NETEQTEST_RTPpacket.h',
'../test/NETEQTEST_CodecClass.h',
],
'conditions': [
['OS=="linux"', {
'cflags': [
'-fexceptions', # enable exceptions
],
}],
],
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:

View File

@ -1,343 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*****************************************************************************************
*
* Compilation flags in NetEQ:
*
*****************************************************************************************
*
***** Platform flags ******
*
* SCRATCH Run NetEQ with "Scratch memory" to save some stack memory.
* Definition can be used on all platforms
*
***** Summary flags ******
*
* NETEQ_ALL_SPECIAL_CODECS Add support for special codecs (CN/RED/DTMF)
*
* NETEQ_ALL_NB_CODECS Add support for all NB codecs (except CN/RED/DTMF)
*
* NETEQ_ALL_WB_CODECS Add support for all WB codecs (except CN/RED/DTMF)
*
* NETEQ_VOICEENGINE_CODECS Support for all NB, WB and SWB32 codecs and CN, RED and DTMF
*
* NETEQ_ALL_CODECS Support for all NB, WB, SWB 32kHz and SWB 48kHz as well as
* CN, RED and DTMF
*
***** Sampling frequency ******
* (Note: usually not needed when Summary flags are used)
*
* NETEQ_WIDEBAND Wideband enabled
*
* NETEQ_32KHZ_WIDEBAND Super wideband @ 32kHz enabled
*
* NETEQ_48KHZ_WIDEBAND Super wideband @ 48kHz enabled
*
***** Special Codec ******
* (Note: not needed if NETEQ_ALL_CODECS is used)
*
* NETEQ_RED_CODEC With this flag you enable NetEQ to understand redundancy in
* the RTP. NetEQ will use the redundancy if it's the same
* codec
*
* NETEQ_CNG_CODEC Enable DTX with the CN payload
*
* NETEQ_ATEVENT_DECODE Enable AVT event and play out the corresponding DTMF tone
*
***** Speech Codecs *****
* (Note: Not needed if Summary flags are used)
*
* NETEQ_G711_CODEC Enable G.711 u- and A-law
*
* NETEQ_PCM16B_CODEC Enable uncompressed 16-bit
*
* NETEQ_ILBC_CODEC Enable iLBC
*
* NETEQ_ISAC_CODEC Enable iSAC
*
* NETEQ_ISAC_SWB_CODEC Enable iSAC-SWB
*
* NETEQ_G722_CODEC Enable G.722
*
* NETEQ_G729_CODEC Enable G.729
*
* NETEQ_G729_1_CODEC Enable G.729.1
*
* NETEQ_G726_CODEC Enable G.726
*
* NETEQ_G722_1_CODEC Enable G722.1
*
* NETEQ_G722_1C_CODEC Enable G722.1 Annex C
*
* NETEQ_SPEEX_CODEC Enable Speex (at 8 and 16 kHz sample rate)
*
* NETEQ_GSMFR_CODEC Enable GSM-FR
*
* NETEQ_AMR_CODEC Enable AMR (narrowband)
*
* NETEQ_AMRWB_CODEC Enable AMR-WB
*
* NETEQ_CNG_CODEC Enable DTX with the CNG payload
*
* NETEQ_ATEVENT_DECODE Enable AVT event and play out the corresponding DTMF tone
*
***** Test flags ******
*
* WEBRTC_NETEQ_40BITACC_TEST Run NetEQ with simulated 40-bit accumulator to run
* bit-exact to a DSP implementation where the main (splib
* and NetEQ) functions have been 40-bit optimized
*
*****************************************************************************************
*/
#if !defined NETEQ_DEFINES_H
#define NETEQ_DEFINES_H
/* Data block structure for MCU to DSP communication:
*
*
* First 3 16-bit words are pre-header that contains instructions and timestamp update
* Fourth 16-bit word is length of data block 1
* Rest is payload data
*
* 0 48 64 80
* -------------...----------------------------------------------------------------------
* | PreHeader ... | Length 1 | Payload data 1 ...... | Lenght 2| Data block 2.... | ...
* -------------...----------------------------------------------------------------------
*
*
* Preheader:
* 4 MSB can be either of:
*/
#define DSP_INSTR_NORMAL 0x1000
/* Payload data will contain the encoded frames */
#define DSP_INSTR_MERGE 0x2000
/* Payload data block 1 will contain the encoded frame */
/* Info block will contain the number of missing samples */
#define DSP_INSTR_EXPAND 0x3000
/* Payload data will be empty */
#define DSP_INSTR_ACCELERATE 0x4000
/* Payload data will contain the encoded frame */
#define DSP_INSTR_DO_RFC3389CNG 0x5000
/* Payload data will contain the SID frame if there is one*/
#define DSP_INSTR_DTMF_GENERATE 0x6000
/* Payload data will be one WebRtc_Word16 with the current DTMF value and one
* WebRtc_Word16 with the current volume value
*/
#define DSP_INSTR_NORMAL_ONE_DESC 0x7000
/* No encoded frames */
#define DSP_INSTR_DO_CODEC_INTERNAL_CNG 0x8000
/* Codec has a built-in VAD/DTX scheme (use the above for "no transmission") */
#define DSP_INSTR_PREEMPTIVE_EXPAND 0x9000
/* Payload data will contain the encoded frames, if any */
#define DSP_INSTR_DO_ALTERNATIVE_PLC 0xB000
/* NetEQ switched off and packet missing... */
#define DSP_INSTR_DO_ALTERNATIVE_PLC_INC_TS 0xC000
/* NetEQ switched off and packet missing... */
#define DSP_INSTR_DO_AUDIO_REPETITION 0xD000
/* NetEQ switched off and packet missing... */
#define DSP_INSTR_DO_AUDIO_REPETITION_INC_TS 0xE000
/* NetEQ switched off and packet missing... */
#define DSP_INSTR_FADE_TO_BGN 0xF000
/* Exception handling: fade out to BGN (expand) */
/*
* Next 4 bits signal additional data that needs to be transmitted
*/
#define DSP_CODEC_NO_CHANGE 0x0100
#define DSP_CODEC_NEW_CODEC 0x0200
#define DSP_CODEC_ADD_LATE_PKT 0x0300
#define DSP_CODEC_RESET 0x0400
#define DSP_DTMF_PAYLOAD 0x0010
/*
* The most significant bit of the payload-length
* is used to flag whether the associated payload
* is redundant payload. This currently useful only for
* iSAC, where redundant payloads have to be treated
* differently. Every time the length is read it must be
* masked by DSP_CODEC_MASK_RED_FLAG to ignore the flag.
* Use DSP_CODEC_RED_FLAG to set or retrieve the flag.
*/
#define DSP_CODEC_MASK_RED_FLAG 0x7FFF
#define DSP_CODEC_RED_FLAG 0x8000
/*
* The first block of payload data consist of decode function pointers,
* and then the speech blocks.
*
*/
/*
* The playout modes that NetEq produced (i.e. gives more info about if the
* Accelerate was successful or not)
*/
#define MODE_NORMAL 0x0000
#define MODE_EXPAND 0x0001
#define MODE_MERGE 0x0002
#define MODE_SUCCESS_ACCELERATE 0x0003
#define MODE_UNSUCCESS_ACCELERATE 0x0004
#define MODE_RFC3389CNG 0x0005
#define MODE_LOWEN_ACCELERATE 0x0006
#define MODE_DTMF 0x0007
#define MODE_ONE_DESCRIPTOR 0x0008
#define MODE_CODEC_INTERNAL_CNG 0x0009
#define MODE_SUCCESS_PREEMPTIVE 0x000A
#define MODE_UNSUCCESS_PREEMPTIVE 0x000B
#define MODE_LOWEN_PREEMPTIVE 0x000C
#define MODE_FADE_TO_BGN 0x000D
#define MODE_ERROR 0x0010
#define MODE_AWAITING_CODEC_PTR 0x0100
#define MODE_BGN_ONLY 0x0200
#define MODE_MASTER_DTMF_SIGNAL 0x0400
#define MODE_USING_STEREO 0x0800
/***********************/
/* Group codec defines */
/***********************/
#if (defined(NETEQ_ALL_SPECIAL_CODECS))
#define NETEQ_CNG_CODEC
#define NETEQ_ATEVENT_DECODE
#define NETEQ_RED_CODEC
#define NETEQ_VAD
#define NETEQ_ARBITRARY_CODEC
#endif
#if (defined(NETEQ_ALL_NB_CODECS)) /* Except RED, DTMF and CNG */
#define NETEQ_PCM16B_CODEC
#define NETEQ_G711_CODEC
#define NETEQ_ILBC_CODEC
#define NETEQ_G729_CODEC
#define NETEQ_G726_CODEC
#define NETEQ_GSMFR_CODEC
#define NETEQ_AMR_CODEC
#endif
#if (defined(NETEQ_ALL_WB_CODECS)) /* Except RED, DTMF and CNG */
#define NETEQ_ISAC_CODEC
#define NETEQ_G722_CODEC
#define NETEQ_G722_1_CODEC
#define NETEQ_G729_1_CODEC
#define NETEQ_SPEEX_CODEC
#define NETEQ_AMRWB_CODEC
#define NETEQ_WIDEBAND
#endif
#if (defined(NETEQ_ALL_WB32_CODECS)) /* AAC, RED, DTMF and CNG */
#define NETEQ_ISAC_SWB_CODEC
#define NETEQ_32KHZ_WIDEBAND
#define NETEQ_G722_1C_CODEC
#endif
#if (defined(NETEQ_VOICEENGINE_CODECS))
/* Special codecs */
#define NETEQ_CNG_CODEC
#define NETEQ_ATEVENT_DECODE
#define NETEQ_RED_CODEC
#define NETEQ_VAD
#define NETEQ_ARBITRARY_CODEC
/* Narrowband codecs */
#define NETEQ_PCM16B_CODEC
#define NETEQ_G711_CODEC
#define NETEQ_ILBC_CODEC
/* Wideband codecs */
#define NETEQ_WIDEBAND
#define NETEQ_ISAC_CODEC
#define NETEQ_G722_CODEC
/* Super wideband 32kHz codecs */
#define NETEQ_ISAC_SWB_CODEC
#define NETEQ_32KHZ_WIDEBAND
#endif
#if (defined(NETEQ_ALL_CODECS))
/* Special codecs */
#define NETEQ_CNG_CODEC
#define NETEQ_ATEVENT_DECODE
#define NETEQ_RED_CODEC
#define NETEQ_VAD
#define NETEQ_ARBITRARY_CODEC
/* Narrowband codecs */
#define NETEQ_PCM16B_CODEC
#define NETEQ_G711_CODEC
#define NETEQ_ILBC_CODEC
#define NETEQ_G729_CODEC
#define NETEQ_G726_CODEC
#define NETEQ_GSMFR_CODEC
#define NETEQ_AMR_CODEC
/* Wideband codecs */
#define NETEQ_WIDEBAND
#define NETEQ_ISAC_CODEC
#define NETEQ_G722_CODEC
#define NETEQ_G722_1_CODEC
#define NETEQ_G729_1_CODEC
#define NETEQ_SPEEX_CODEC
#define NETEQ_AMRWB_CODEC
/* Super wideband 32kHz codecs */
#define NETEQ_ISAC_SWB_CODEC
#define NETEQ_32KHZ_WIDEBAND
#define NETEQ_G722_1C_CODEC
/* Super wideband 48kHz codecs */
#define NETEQ_48KHZ_WIDEBAND
#endif
/* Max output size from decoding one frame */
#if defined(NETEQ_48KHZ_WIDEBAND)
#define NETEQ_MAX_FRAME_SIZE 2880 /* 60 ms super wideband */
#define NETEQ_MAX_OUTPUT_SIZE 3600 /* 60+15 ms super wideband (60 ms decoded + 15 ms for merge overlap) */
#elif defined(NETEQ_32KHZ_WIDEBAND)
#define NETEQ_MAX_FRAME_SIZE 1920 /* 60 ms super wideband */
#define NETEQ_MAX_OUTPUT_SIZE 2400 /* 60+15 ms super wideband (60 ms decoded + 15 ms for merge overlap) */
#elif defined(NETEQ_WIDEBAND)
#define NETEQ_MAX_FRAME_SIZE 960 /* 60 ms wideband */
#define NETEQ_MAX_OUTPUT_SIZE 1200 /* 60+15 ms wideband (60 ms decoded + 10 ms for merge overlap) */
#else
#define NETEQ_MAX_FRAME_SIZE 480 /* 60 ms narrowband */
#define NETEQ_MAX_OUTPUT_SIZE 600 /* 60+15 ms narrowband (60 ms decoded + 10 ms for merge overlap) */
#endif
/* Enable stereo */
#define NETEQ_STEREO
#endif /* #if !defined NETEQ_DEFINES_H */

View File

@ -1,79 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Definition of error codes.
*
* NOTE: When modifying the error codes,
* also modify the function WebRtcNetEQ_GetErrorCode!
*/
#if !defined NETEQ_ERROR_CODES_H
#define NETEQ_ERROR_CODES_H
/* Misc Error */
#define NETEQ_OTHER_ERROR -1000
/* Misc Recout Errors */
#define FAULTY_INSTRUCTION -1001
#define FAULTY_NETWORK_TYPE -1002
#define FAULTY_DELAYVALUE -1003
#define FAULTY_PLAYOUTMODE -1004
#define CORRUPT_INSTANCE -1005
#define ILLEGAL_MASTER_SLAVE_SWITCH -1006
#define MASTER_SLAVE_ERROR -1007
/* Misc Recout problems */
#define UNKNOWN_BUFSTAT_DECISION -2001
#define RECOUT_ERROR_DECODING -2002
#define RECOUT_ERROR_SAMPLEUNDERRUN -2003
#define RECOUT_ERROR_DECODED_TOO_MUCH -2004
/* Misc RecIn problems */
#define RECIN_CNG_ERROR -3001
#define RECIN_UNKNOWNPAYLOAD -3002
#define RECIN_BUFFERINSERT_ERROR -3003
/* PBUFFER/BUFSTAT ERRORS */
#define PBUFFER_INIT_ERROR -4001
#define PBUFFER_INSERT_ERROR1 -4002
#define PBUFFER_INSERT_ERROR2 -4003
#define PBUFFER_INSERT_ERROR3 -4004
#define PBUFFER_INSERT_ERROR4 -4005
#define PBUFFER_INSERT_ERROR5 -4006
#define UNKNOWN_G723_HEADER -4007
#define PBUFFER_NONEXISTING_PACKET -4008
#define PBUFFER_NOT_INITIALIZED -4009
#define AMBIGUOUS_ILBC_FRAME_SIZE -4010
/* CODEC DATABASE ERRORS */
#define CODEC_DB_FULL -5001
#define CODEC_DB_NOT_EXIST1 -5002
#define CODEC_DB_NOT_EXIST2 -5003
#define CODEC_DB_NOT_EXIST3 -5004
#define CODEC_DB_NOT_EXIST4 -5005
#define CODEC_DB_UNKNOWN_CODEC -5006
#define CODEC_DB_PAYLOAD_TAKEN -5007
#define CODEC_DB_UNSUPPORTED_CODEC -5008
#define CODEC_DB_UNSUPPORTED_FS -5009
/* DTMF ERRORS */
#define DTMF_DEC_PARAMETER_ERROR -6001
#define DTMF_INSERT_ERROR -6002
#define DTMF_GEN_UNKNOWN_SAMP_FREQ -6003
#define DTMF_NOT_SUPPORTED -6004
/* RTP/PACKET ERRORS */
#define RED_SPLIT_ERROR1 -7001
#define RED_SPLIT_ERROR2 -7002
#define RTP_TOO_SHORT_PACKET -7003
#define RTP_CORRUPT_PACKET -7004
#endif

View File

@ -1,67 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Definitions of statistics data structures for MCU and DSP sides.
*/
#include "typedefs.h"
#ifndef NETEQ_STATISTICS_H
#define NETEQ_STATISTICS_H
/*
* Statistics struct on DSP side
*/
typedef struct
{
/* variables for in-call statistics; queried through WebRtcNetEQ_GetNetworkStatistics */
WebRtc_UWord32 expandLength; /* number of samples produced through expand */
WebRtc_UWord32 preemptiveLength; /* number of samples produced through pre-emptive
expand */
WebRtc_UWord32 accelerateLength; /* number of samples removed through accelerate */
/* variables for post-call statistics; queried through WebRtcNetEQ_GetJitterStatistics */
WebRtc_UWord32 expandedVoiceSamples; /* number of voice samples produced through expand */
WebRtc_UWord32 expandedNoiseSamples; /* number of noise (background) samples produced
through expand */
} DSPStats_t;
/*
* Statistics struct on MCU side
* All variables are for post-call statistics; queried through WebRtcNetEQ_GetJitterStatistics.
*/
typedef struct
{
WebRtc_UWord32 jbMinSize; /* smallest Jitter Buffer size during call in ms */
WebRtc_UWord32 jbMaxSize; /* largest Jitter Buffer size during call in ms */
WebRtc_UWord32 jbAvgSizeQ16; /* the average JB size, measured over time in ms (Q16) */
WebRtc_UWord16 jbAvgCount; /* help counter for jbAveSize */
WebRtc_UWord32 minPacketDelayMs; /* min time incoming packet "waited" to be played in ms */
WebRtc_UWord32 maxPacketDelayMs; /* max time incoming packet "waited" to be played in ms */
WebRtc_UWord16 avgPacketDelayMs; /* avg time incoming packet "waited" to be played in ms */
WebRtc_UWord16 avgPacketCount; /* help counter for avgPacketDelayMs */
WebRtc_UWord32 jbChangeCount; /* count number of successful accelerate and pre-emptive
expand operations */
WebRtc_UWord32 generatedSilentMs; /* generated silence in ms */
WebRtc_UWord32 countExpandMoreThan120ms; /* count of tiny expansions */
WebRtc_UWord32 countExpandMoreThan250ms; /* count of small expansions */
WebRtc_UWord32 countExpandMoreThan500ms; /* count of medium expansions */
WebRtc_UWord32 countExpandMoreThan2000ms; /* count of large expansions */
WebRtc_UWord32 longestExpandDurationMs; /* duration of longest expansions in ms */
WebRtc_UWord32 accelerateMs; /* audio data removed through accelerate in ms */
} MCUStats_t;
#endif

View File

@ -1,279 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains the function for handling "normal" speech operation.
*/
#include "dsp.h"
#include "signal_processing_library.h"
#include "dsp_helpfunctions.h"
/* Scratch usage:
Type Name size startpos endpos
WebRtc_Word16 pw16_expanded 125*fs/8000 0 125*fs/8000-1
func WebRtcNetEQ_Expand 40+370*fs/8000 125*fs/8000 39+495*fs/8000
Total: 40+495*fs/8000
*/
#define SCRATCH_PW16_EXPANDED 0
#if (defined(NETEQ_48KHZ_WIDEBAND))
#define SCRATCH_NETEQ_EXPAND 756
#elif (defined(NETEQ_32KHZ_WIDEBAND))
#define SCRATCH_NETEQ_EXPAND 504
#elif (defined(NETEQ_WIDEBAND))
#define SCRATCH_NETEQ_EXPAND 252
#else /* NB */
#define SCRATCH_NETEQ_EXPAND 126
#endif
/****************************************************************************
* WebRtcNetEQ_Normal(...)
*
* This function has the possibility to modify data that is played out in Normal
* mode, for example adjust the gain of the signal. The length of the signal
* can not be changed.
*
* Input:
* - inst : NetEq instance, i.e. the user that requests more
* speech/audio data
* - scratchPtr : Pointer to scratch vector
* - decoded : Pointer to vector of new data from decoder
* (Vector contents may be altered by the function)
* - len : Number of input samples
*
* Output:
* - inst : Updated user information
* - outData : Pointer to a memory space where the output data
* should be stored
* - pw16_len : Pointer to variable where the number of samples
* produced will be written
*
* Return value : >=0 - Number of samples written to outData
* -1 - Error
*/
int WebRtcNetEQ_Normal(DSPInst_t *inst,
#ifdef SCRATCH
WebRtc_Word16 *pw16_scratchPtr,
#endif
WebRtc_Word16 *pw16_decoded, WebRtc_Word16 len,
WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len)
{
int i;
WebRtc_Word16 fs_mult;
WebRtc_Word16 fs_shift;
WebRtc_Word32 w32_En_speech;
WebRtc_Word16 enLen;
WebRtc_Word16 w16_muted;
WebRtc_Word16 w16_inc, w16_frac;
WebRtc_Word16 w16_tmp;
WebRtc_Word32 w32_tmp;
/* Sanity check */
if (len < 0)
{
/* Cannot have negative length of input vector */
return (-1);
}
if (len == 0)
{
/* Still got some data to play => continue with the same mode */
*pw16_len = len;
return (len);
}
fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
fs_shift = 30 - WebRtcSpl_NormW32(fs_mult); /* Note that this is not "exact" for 48kHz */
/*
* Check if last RecOut call resulted in an Expand or a FadeToBGN. If so, we have to take
* care of some cross-fading and unmuting.
*/
if (inst->w16_mode == MODE_EXPAND || inst->w16_mode == MODE_FADE_TO_BGN)
{
/* Define memory where temporary result from Expand algorithm can be stored. */
#ifdef SCRATCH
WebRtc_Word16 *pw16_expanded = pw16_scratchPtr + SCRATCH_PW16_EXPANDED;
#else
WebRtc_Word16 pw16_expanded[FSMULT * 125];
#endif
WebRtc_Word16 expandedLen = 0;
WebRtc_Word16 w16_decodedMax;
/* Find largest value in new data */
w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
/* Generate interpolation data using Expand */
/* First, set Expand parameters to appropriate values. */
inst->ExpandInst.w16_lagsPosition = 0;
inst->ExpandInst.w16_lagsDirection = 0;
inst->ExpandInst.w16_stopMuting = 1; /* Do not mute signal any more */
/* Call Expand */
WebRtcNetEQ_Expand(inst,
#ifdef SCRATCH
pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
#endif
pw16_expanded, &expandedLen, (WebRtc_Word16) (inst->w16_mode == MODE_FADE_TO_BGN));
inst->ExpandInst.w16_stopMuting = 0; /* Restore value */
inst->ExpandInst.w16_consecExp = 0; /* Last was not Expand any more */
/* Adjust muting factor (main muting factor times expand muting factor) */
if (inst->w16_mode == MODE_FADE_TO_BGN)
{
/* If last mode was FadeToBGN, the mute factor should be zero. */
inst->w16_muteFactor = 0;
}
else
{
/* w16_muteFactor * w16_expandMuteFactor */
inst->w16_muteFactor
= (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(inst->w16_muteFactor,
inst->ExpandInst.w16_expandMuteFactor, 14);
}
/* Adjust muting factor if needed (to BGN level) */
enLen = WEBRTC_SPL_MIN(fs_mult<<6, len); /* min( fs_mult * 64, len ) */
w16_tmp = 6 + fs_shift - WebRtcSpl_NormW32(
WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
w16_tmp = WEBRTC_SPL_MAX(w16_tmp, 0);
w32_En_speech = WebRtcNetEQ_DotW16W16(pw16_decoded, pw16_decoded, enLen, w16_tmp);
w32_En_speech = WebRtcSpl_DivW32W16(w32_En_speech, (WebRtc_Word16) (enLen >> w16_tmp));
if ((w32_En_speech != 0) && (w32_En_speech > inst->BGNInst.w32_energy))
{
/* Normalize new frame energy to 15 bits */
w16_tmp = WebRtcSpl_NormW32(w32_En_speech) - 16;
/* we want inst->BGNInst.energy/En_speech in Q14 */
w32_tmp = WEBRTC_SPL_SHIFT_W32(inst->BGNInst.w32_energy, (w16_tmp+14));
w16_tmp = (WebRtc_Word16) WEBRTC_SPL_SHIFT_W32(w32_En_speech, w16_tmp);
w16_tmp = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_tmp, w16_tmp);
w16_muted = (WebRtc_Word16) WebRtcSpl_Sqrt(
WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) w16_tmp,
14)); /* w16_muted in Q14 (sqrt(Q28)) */
}
else
{
w16_muted = 16384; /* 1.0 in Q14 */
}
if (w16_muted > inst->w16_muteFactor)
{
inst->w16_muteFactor = WEBRTC_SPL_MIN(w16_muted, 16384);
}
/* If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14) */
w16_inc = WebRtcSpl_DivW32W16ResW16(64, fs_mult);
for (i = 0; i < len; i++)
{
/* scale with mute factor */
w32_tmp = WEBRTC_SPL_MUL_16_16(pw16_decoded[i], inst->w16_muteFactor);
/* shift 14 with proper rounding */
pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32((w32_tmp + 8192), 14);
/* increase mute_factor towards 16384 */
inst->w16_muteFactor = WEBRTC_SPL_MIN(16384, (inst->w16_muteFactor+w16_inc));
}
/*
* Interpolate the expanded data into the new vector
* (NB/WB/SWB32/SWB40 8/16/32/32 samples)
*/
fs_shift = WEBRTC_SPL_MIN(3, fs_shift); /* Set to 3 for >32kHz */
w16_inc = 4 >> fs_shift;
w16_frac = w16_inc;
for (i = 0; i < 8 * fs_mult; i++)
{
pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
(WEBRTC_SPL_MUL_16_16(w16_frac, pw16_decoded[i]) +
WEBRTC_SPL_MUL_16_16((32 - w16_frac), pw16_expanded[i]) + 8),
5);
w16_frac += w16_inc;
}
#ifdef NETEQ_CNG_CODEC
}
else if (inst->w16_mode==MODE_RFC3389CNG)
{ /* previous was RFC 3389 CNG...*/
WebRtc_Word16 pw16_CngInterp[32];
/* Reset mute factor and start up fresh */
inst->w16_muteFactor = 16384;
if (inst->CNG_Codec_inst != NULL)
{
/* Generate long enough for 32kHz */
if(WebRtcCng_Generate(inst->CNG_Codec_inst,pw16_CngInterp, 32, 0)<0)
{
/* error returned; set return vector to all zeros */
WebRtcSpl_MemSetW16(pw16_CngInterp, 0, 32);
}
}
else
{
/*
* If no CNG instance is defined, just copy from the decoded data.
* (This will result in interpolating the decoded with itself.)
*/
WEBRTC_SPL_MEMCPY_W16(pw16_CngInterp, pw16_decoded, fs_mult * 8);
}
/*
* Interpolate the CNG into the new vector
* (NB/WB/SWB32kHz/SWB48kHz 8/16/32/32 samples)
*/
fs_shift = WEBRTC_SPL_MIN(3, fs_shift); /* Set to 3 for >32kHz */
w16_inc = 4>>fs_shift;
w16_frac = w16_inc;
for (i = 0; i < 8 * fs_mult; i++)
{
pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
(WEBRTC_SPL_MUL_16_16(w16_frac, pw16_decoded[i]) +
WEBRTC_SPL_MUL_16_16((32-w16_frac), pw16_CngInterp[i]) + 8),
5);
w16_frac += w16_inc;
}
#endif
}
else if (inst->w16_muteFactor < 16384)
{
/*
* Previous was neither of Expand, FadeToBGN or RFC3389_CNG, but we are still
* ramping up from previous muting.
* If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14)
*/
w16_inc = WebRtcSpl_DivW32W16ResW16(64, fs_mult);
for (i = 0; i < len; i++)
{
/* scale with mute factor */
w32_tmp = WEBRTC_SPL_MUL_16_16(pw16_decoded[i], inst->w16_muteFactor);
/* shift 14 with proper rounding */
pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32((w32_tmp + 8192), 14);
/* increase mute_factor towards 16384 */
inst->w16_muteFactor = WEBRTC_SPL_MIN(16384, (inst->w16_muteFactor+w16_inc));
}
}
/* Copy data to other buffer */WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, len);
inst->w16_mode = MODE_NORMAL;
*pw16_len = len;
return (len);
}
#undef SCRATCH_PW16_EXPANDED
#undef SCRATCH_NETEQ_EXPAND

View File

@ -1,707 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Implementation of the actual packet buffer data structure.
*/
#include "packet_buffer.h"
#include <string.h> /* to define NULL */
#include "signal_processing_library.h"
#include "neteq_error_codes.h"
#ifdef NETEQ_DELAY_LOGGING
/* special code for offline delay logging */
#include "delay_logging.h"
#include <stdio.h>
extern FILE *delay_fid2; /* file pointer to delay log file */
extern WebRtc_UWord32 tot_received_packets;
#endif /* NETEQ_DELAY_LOGGING */
int WebRtcNetEQ_PacketBufferInit(PacketBuf_t *bufferInst, int maxNoOfPackets,
WebRtc_Word16 *pw16_memory, int memorySize)
{
int i;
int pos = 0;
/* Sanity check */
if ((memorySize < PBUFFER_MIN_MEMORY_SIZE) || (pw16_memory == NULL)
|| (maxNoOfPackets < 2) || (maxNoOfPackets > 600))
{
/* Invalid parameters */
return (PBUFFER_INIT_ERROR);
}
/* Clear the buffer instance */
WebRtcSpl_MemSetW16((WebRtc_Word16*) bufferInst, 0,
sizeof(PacketBuf_t) / sizeof(WebRtc_Word16));
/* Clear the buffer memory */
WebRtcSpl_MemSetW16((WebRtc_Word16*) pw16_memory, 0, memorySize);
/* Set maximum number of packets */
bufferInst->maxInsertPositions = maxNoOfPackets;
/* Initialize array pointers */
/* After each pointer has been set, the index pos is advanced to point immediately
* after the the recently allocated vector. Note that one step for the pos index
* corresponds to a WebRtc_Word16.
*/
bufferInst->timeStamp = (WebRtc_UWord32*) &pw16_memory[pos];
pos += maxNoOfPackets << 1; /* advance maxNoOfPackets * WebRtc_UWord32 */
bufferInst->payloadLocation = (WebRtc_Word16**) &pw16_memory[pos];
pos += maxNoOfPackets * (sizeof(WebRtc_Word16*) / sizeof(WebRtc_Word16)); /* advance */
bufferInst->seqNumber = (WebRtc_UWord16*) &pw16_memory[pos];
pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_UWord16 */
bufferInst->payloadType = &pw16_memory[pos];
pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_Word16 */
bufferInst->payloadLengthBytes = &pw16_memory[pos];
pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_Word16 */
bufferInst->rcuPlCntr = &pw16_memory[pos];
pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_Word16 */
/* The payload memory starts after the slot arrays */
bufferInst->startPayloadMemory = &pw16_memory[pos];
bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
bufferInst->memorySizeW16 = (memorySize - pos); /* Remaining memory */
/* Initialize each payload slot as empty with infinite delay */
for (i = 0; i < bufferInst->maxInsertPositions; i++)
{
bufferInst->payloadType[i] = -1;
}
/* Reset buffer parameters */
bufferInst->numPacketsInBuffer = 0;
bufferInst->packSizeSamples = 0;
bufferInst->insertPosition = 0;
/* Reset buffer statistics */
bufferInst->discardedPackets = 0;
bufferInst->totalDiscardedPackets = 0;
bufferInst->totalFlushedPackets = 0;
return (0);
}
int WebRtcNetEQ_PacketBufferFlush(PacketBuf_t *bufferInst)
{
int i;
/* Sanity check */
if (bufferInst->startPayloadMemory == NULL)
{
/* Packet buffer has not been initialized */
/* Don't do the flushing operation, since we do not
know the state of the struct variables */
return (0);
}
/* Increase flush counter */
bufferInst->totalFlushedPackets += bufferInst->numPacketsInBuffer;
/* Set all payload lengths to zero */
WebRtcSpl_MemSetW16(bufferInst->payloadLengthBytes, 0, bufferInst->maxInsertPositions);
/* Reset buffer variables */
bufferInst->numPacketsInBuffer = 0;
bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
bufferInst->insertPosition = 0;
/* Clear all slots, starting with the last one */
for (i = (bufferInst->maxInsertPositions - 1); i >= 0; i--)
{
bufferInst->payloadType[i] = -1;
bufferInst->timeStamp[i] = 0;
bufferInst->seqNumber[i] = 0;
}
return (0);
}
int WebRtcNetEQ_PacketBufferInsert(PacketBuf_t *bufferInst, const RTPPacket_t *RTPpacket,
WebRtc_Word16 *flushed)
{
int nextPos;
int i;
#ifdef NETEQ_DELAY_LOGGING
/* special code for offline delay logging */
int temp_var;
#endif /* NETEQ_DELAY_LOGGING */
/* Initialize to "no flush" */
*flushed = 0;
/* Sanity check */
if (bufferInst->startPayloadMemory == NULL)
{
/* packet buffer has not been initialized */
return (-1);
}
/* Sanity check for payload length
(payloadLen in bytes and memory size in WebRtc_Word16) */
if ((RTPpacket->payloadLen > (bufferInst->memorySizeW16 << 1)) || (RTPpacket->payloadLen
<= 0))
{
/* faulty or too long payload length */
return (-1);
}
/* Find a position in the buffer for this packet */
if (bufferInst->numPacketsInBuffer != 0)
{
/* Get the next slot */
bufferInst->insertPosition++;
if (bufferInst->insertPosition >= bufferInst->maxInsertPositions)
{
/* "Wrap around" and start from the beginning */
bufferInst->insertPosition = 0;
}
/* Check if there is enough space for the new packet */
if (bufferInst->currentMemoryPos + ((RTPpacket->payloadLen + 1) >> 1)
>= &bufferInst->startPayloadMemory[bufferInst->memorySizeW16])
{
WebRtc_Word16 *tempMemAddress;
/*
* Payload does not fit at the end of the memory, put it in the beginning
* instead
*/
bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
/*
* Now, we must search for the next non-empty payload,
* finding the one with the lowest start address for the payload
*/
tempMemAddress = &bufferInst->startPayloadMemory[bufferInst->memorySizeW16];
nextPos = -1;
/* Loop through all slots again */
for (i = 0; i < bufferInst->maxInsertPositions; i++)
{
/* Look for the non-empty slot with the lowest
payload location address */
if (bufferInst->payloadLengthBytes[i] != 0 && bufferInst->payloadLocation[i]
< tempMemAddress)
{
tempMemAddress = bufferInst->payloadLocation[i];
nextPos = i;
}
}
/* Check that we did find a previous payload */
if (nextPos == -1)
{
/* The buffer is corrupt => flush and return error */
WebRtcNetEQ_PacketBufferFlush(bufferInst);
*flushed = 1;
return (-1);
}
}
else
{
/* Payload fits at the end of memory. */
/* Find the next non-empty slot. */
nextPos = bufferInst->insertPosition + 1;
/* Increase nextPos until a non-empty slot is found or end of array is encountered*/
while ((bufferInst->payloadLengthBytes[nextPos] == 0) && (nextPos
< bufferInst->maxInsertPositions))
{
nextPos++;
}
if (nextPos == bufferInst->maxInsertPositions)
{
/*
* Reached the end of the array, so there must be a packet in the first
* position instead
*/
nextPos = 0;
/* Increase nextPos until a non-empty slot is found */
while (bufferInst->payloadLengthBytes[nextPos] == 0)
{
nextPos++;
}
}
} /* end if-else */
/*
* Check if the new payload will extend into a payload later in memory.
* If so, the buffer is full.
*/
if ((bufferInst->currentMemoryPos <= bufferInst->payloadLocation[nextPos])
&& ((&bufferInst->currentMemoryPos[(RTPpacket->payloadLen + 1) >> 1])
> bufferInst->payloadLocation[nextPos]))
{
/* Buffer is full, so the buffer must be flushed */
WebRtcNetEQ_PacketBufferFlush(bufferInst);
*flushed = 1;
}
if (bufferInst->payloadLengthBytes[bufferInst->insertPosition] != 0)
{
/* All positions are already taken and entire buffer should be flushed */
WebRtcNetEQ_PacketBufferFlush(bufferInst);
*flushed = 1;
}
}
else
{
/* Buffer is empty, just insert the packet at the beginning */
bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
bufferInst->insertPosition = 0;
}
/* Insert packet in the found position */
if (RTPpacket->starts_byte1 == 0)
{
/* Payload is 16-bit aligned => just copy it */
WEBRTC_SPL_MEMCPY_W16(bufferInst->currentMemoryPos,
RTPpacket->payload, (RTPpacket->payloadLen + 1) >> 1);
}
else
{
/* Payload is not 16-bit aligned => align it during copy operation */
for (i = 0; i < RTPpacket->payloadLen; i++)
{
/* copy the (i+1)-th byte to the i-th byte */
WEBRTC_SPL_SET_BYTE(bufferInst->currentMemoryPos,
(WEBRTC_SPL_GET_BYTE(RTPpacket->payload, (i + 1))), i);
}
}
/* Copy the packet information */
bufferInst->payloadLocation[bufferInst->insertPosition] = bufferInst->currentMemoryPos;
bufferInst->payloadLengthBytes[bufferInst->insertPosition] = RTPpacket->payloadLen;
bufferInst->payloadType[bufferInst->insertPosition] = RTPpacket->payloadType;
bufferInst->seqNumber[bufferInst->insertPosition] = RTPpacket->seqNumber;
bufferInst->timeStamp[bufferInst->insertPosition] = RTPpacket->timeStamp;
bufferInst->rcuPlCntr[bufferInst->insertPosition] = RTPpacket->rcuPlCntr;
/* Update buffer parameters */
bufferInst->numPacketsInBuffer++;
bufferInst->currentMemoryPos += (RTPpacket->payloadLen + 1) >> 1;
#ifdef NETEQ_DELAY_LOGGING
/* special code for offline delay logging */
if (*flushed)
{
temp_var = NETEQ_DELAY_LOGGING_SIGNAL_FLUSH;
fwrite( &temp_var, sizeof(int), 1, delay_fid2 );
}
temp_var = NETEQ_DELAY_LOGGING_SIGNAL_RECIN;
fwrite( &temp_var, sizeof(int), 1, delay_fid2 );
fwrite( &RTPpacket->timeStamp, sizeof(WebRtc_UWord32), 1, delay_fid2 );
fwrite( &RTPpacket->seqNumber, sizeof(WebRtc_UWord16), 1, delay_fid2 );
fwrite( &RTPpacket->payloadType, sizeof(int), 1, delay_fid2 );
fwrite( &RTPpacket->payloadLen, sizeof(WebRtc_Word16), 1, delay_fid2 );
tot_received_packets++;
#endif /* NETEQ_DELAY_LOGGING */
return (0);
}
int WebRtcNetEQ_PacketBufferExtract(PacketBuf_t *bufferInst, RTPPacket_t *RTPpacket,
int bufferPosition)
{
/* Sanity check */
if (bufferInst->startPayloadMemory == NULL)
{
/* packet buffer has not been initialized */
return (PBUFFER_NOT_INITIALIZED);
}
if (bufferPosition < 0 || bufferPosition >= bufferInst->maxInsertPositions)
{
/* buffer position is outside valid range */
return (NETEQ_OTHER_ERROR);
}
/* Check that there is a valid payload in the specified position */
if (bufferInst->payloadLengthBytes[bufferPosition] <= 0)
{
/* The position does not contain a valid payload */
RTPpacket->payloadLen = 0; /* Set zero length */
return (PBUFFER_NONEXISTING_PACKET); /* Return error */
}
/* Payload exists => extract payload data */
/* Copy the actual data payload to RTP packet struct */
WEBRTC_SPL_MEMCPY_W16((WebRtc_Word16*) RTPpacket->payload,
bufferInst->payloadLocation[bufferPosition],
(bufferInst->payloadLengthBytes[bufferPosition] + 1) >> 1); /*length in WebRtc_Word16*/
/* Copy payload parameters */
RTPpacket->payloadLen = bufferInst->payloadLengthBytes[bufferPosition];
RTPpacket->payloadType = bufferInst->payloadType[bufferPosition];
RTPpacket->seqNumber = bufferInst->seqNumber[bufferPosition];
RTPpacket->timeStamp = bufferInst->timeStamp[bufferPosition];
RTPpacket->rcuPlCntr = bufferInst->rcuPlCntr[bufferPosition];
RTPpacket->starts_byte1 = 0; /* payload is 16-bit aligned */
/* Clear the position in the packet buffer */
bufferInst->payloadType[bufferPosition] = -1;
bufferInst->payloadLengthBytes[bufferPosition] = 0;
bufferInst->seqNumber[bufferPosition] = 0;
bufferInst->timeStamp[bufferPosition] = 0;
bufferInst->payloadLocation[bufferPosition] = bufferInst->startPayloadMemory;
/* Reduce packet counter with one */
bufferInst->numPacketsInBuffer--;
return (0);
}
int WebRtcNetEQ_PacketBufferFindLowestTimestamp(PacketBuf_t *bufferInst,
WebRtc_UWord32 currentTS,
WebRtc_UWord32 *timestamp,
int *bufferPosition, int eraseOldPkts,
WebRtc_Word16 *payloadType)
{
WebRtc_Word32 timeStampDiff = WEBRTC_SPL_WORD32_MAX; /* Smallest diff found */
WebRtc_Word32 newDiff;
int i;
WebRtc_Word16 rcuPlCntr;
/* Sanity check */
if (bufferInst->startPayloadMemory == NULL)
{
/* packet buffer has not been initialized */
return (PBUFFER_NOT_INITIALIZED);
}
/* Initialize all return values */
*timestamp = 0;
*payloadType = -1; /* indicates that no packet was found */
*bufferPosition = -1; /* indicates that no packet was found */
rcuPlCntr = WEBRTC_SPL_WORD16_MAX; /* indicates that no packet was found */
/* Check if buffer is empty */
if (bufferInst->numPacketsInBuffer <= 0)
{
/* Empty buffer */
return (0);
}
/* Loop through all slots in buffer */
for (i = 0; i < bufferInst->maxInsertPositions; i++)
{
/* Calculate difference between this slot and currentTS */
newDiff = (WebRtc_Word32) (bufferInst->timeStamp[i] - currentTS);
/* Check if payload should be discarded */
if ((newDiff < 0) /* payload is too old */
&& (newDiff > -30000) /* account for TS wrap-around */
&& (eraseOldPkts) /* old payloads should be discarded */
&& (bufferInst->payloadLengthBytes[i] > 0)) /* the payload exists */
{
/* Throw away old packet */
/* Clear the position in the buffer */
bufferInst->payloadType[i] = -1;
bufferInst->payloadLengthBytes[i] = 0;
/* Reduce packet counter by one */
bufferInst->numPacketsInBuffer--;
/* Increase discard counter for in-call and post-call statistics */
bufferInst->discardedPackets++;
bufferInst->totalDiscardedPackets++;
}
else if (((newDiff < timeStampDiff) || ((newDiff == timeStampDiff)
&& (bufferInst->rcuPlCntr[i] < rcuPlCntr))) && (bufferInst->payloadLengthBytes[i]
> 0))
{
/*
* New diff is smaller than previous diffs or we have a candidate with a timestamp
* as previous candidate but better RCU-counter; and the payload exists.
*/
/* Save this position as the best candidate */
*bufferPosition = i;
timeStampDiff = newDiff;
*payloadType = bufferInst->payloadType[i];
rcuPlCntr = bufferInst->rcuPlCntr[i];
}
} /* end of for loop */
/* check that we did find a real position */
if (*bufferPosition >= 0)
{
/* get the timestamp for the best position */
*timestamp = bufferInst->timeStamp[*bufferPosition];
}
return 0;
}
WebRtc_Word32 WebRtcNetEQ_PacketBufferGetSize(const PacketBuf_t *bufferInst)
{
int i, count;
WebRtc_Word32 sizeSamples;
count = 0;
/* Loop through all slots in the buffer */
for (i = 0; i < bufferInst->maxInsertPositions; i++)
{
/* Only count the packets with non-zero size */
if (bufferInst->payloadLengthBytes[i] != 0)
{
count++;
}
}
/*
* Calculate buffer size as number of packets times packet size
* (packet size is that of the latest decoded packet)
*/
sizeSamples = WEBRTC_SPL_MUL_16_16(bufferInst->packSizeSamples, count);
/* Sanity check; size cannot be negative */
if (sizeSamples < 0)
{
sizeSamples = 0;
}
return sizeSamples;
}
int WebRtcNetEQ_GetDefaultCodecSettings(const enum WebRtcNetEQDecoder *codecID,
int noOfCodecs, int *maxBytes, int *maxSlots)
{
int i;
int ok = 0;
WebRtc_Word16 w16_tmp;
WebRtc_Word16 codecBytes;
WebRtc_Word16 codecBuffers;
/* Initialize return variables to zero */
*maxBytes = 0;
*maxSlots = 0;
/* Loop through all codecs supplied to function */
for (i = 0; i < noOfCodecs; i++)
{
/* Find current codec and set parameters accordingly */
if (codecID[i] == kDecoderPCMu)
{
codecBytes = 1680; /* Up to 210ms @ 64kbps */
codecBuffers = 30; /* Down to 5ms frames */
}
else if (codecID[i] == kDecoderPCMa)
{
codecBytes = 1680; /* Up to 210ms @ 64kbps */
codecBuffers = 30; /* Down to 5ms frames */
}
else if (codecID[i] == kDecoderILBC)
{
codecBytes = 380; /* 200ms @ 15.2kbps (20ms frames) */
codecBuffers = 10;
}
else if (codecID[i] == kDecoderISAC)
{
codecBytes = 960; /* 240ms @ 32kbps (60ms frames) */
codecBuffers = 8;
}
else if (codecID[i] == kDecoderISACswb)
{
codecBytes = 1560; /* 240ms @ 52kbps (30ms frames) */
codecBuffers = 8;
}
else if (codecID[i] == kDecoderPCM16B)
{
codecBytes = 3360; /* 210ms */
codecBuffers = 15;
}
else if (codecID[i] == kDecoderPCM16Bwb)
{
codecBytes = 6720; /* 210ms */
codecBuffers = 15;
}
else if (codecID[i] == kDecoderPCM16Bswb32kHz)
{
codecBytes = 13440; /* 210ms */
codecBuffers = 15;
}
else if (codecID[i] == kDecoderPCM16Bswb48kHz)
{
codecBytes = 20160; /* 210ms */
codecBuffers = 15;
}
else if (codecID[i] == kDecoderG722)
{
codecBytes = 1680; /* 210ms @ 64kbps */
codecBuffers = 15;
}
else if (codecID[i] == kDecoderRED)
{
codecBytes = 0; /* Should not be max... */
codecBuffers = 0;
}
else if (codecID[i] == kDecoderAVT)
{
codecBytes = 0; /* Should not be max... */
codecBuffers = 0;
}
else if (codecID[i] == kDecoderCNG)
{
codecBytes = 0; /* Should not be max... */
codecBuffers = 0;
}
else if (codecID[i] == kDecoderG729)
{
codecBytes = 210; /* 210ms @ 8kbps */
codecBuffers = 20; /* max 200ms supported for 10ms frames */
}
else if (codecID[i] == kDecoderG729_1)
{
codecBytes = 840; /* 210ms @ 32kbps */
codecBuffers = 10; /* max 200ms supported for 20ms frames */
}
else if (codecID[i] == kDecoderG726_16)
{
codecBytes = 400; /* 200ms @ 16kbps */
codecBuffers = 10;
}
else if (codecID[i] == kDecoderG726_24)
{
codecBytes = 600; /* 200ms @ 24kbps */
codecBuffers = 10;
}
else if (codecID[i] == kDecoderG726_32)
{
codecBytes = 800; /* 200ms @ 32kbps */
codecBuffers = 10;
}
else if (codecID[i] == kDecoderG726_40)
{
codecBytes = 1000; /* 200ms @ 40kbps */
codecBuffers = 10;
}
else if (codecID[i] == kDecoderG722_1_16)
{
codecBytes = 420; /* 210ms @ 16kbps */
codecBuffers = 10;
}
else if (codecID[i] == kDecoderG722_1_24)
{
codecBytes = 630; /* 210ms @ 24kbps */
codecBuffers = 10;
}
else if (codecID[i] == kDecoderG722_1_32)
{
codecBytes = 840; /* 210ms @ 32kbps */
codecBuffers = 10;
}
else if (codecID[i] == kDecoderG722_1C_24)
{
codecBytes = 630; /* 210ms @ 24kbps */
codecBuffers = 10;
}
else if (codecID[i] == kDecoderG722_1C_32)
{
codecBytes = 840; /* 210ms @ 32kbps */
codecBuffers = 10;
}
else if (codecID[i] == kDecoderG722_1C_48)
{
codecBytes = 1260; /* 210ms @ 48kbps */
codecBuffers = 10;
}
else if (codecID[i] == kDecoderSPEEX_8)
{
codecBytes = 1250; /* 210ms @ 50kbps */
codecBuffers = 10;
}
else if (codecID[i] == kDecoderSPEEX_16)
{
codecBytes = 1250; /* 210ms @ 50kbps */
codecBuffers = 10;
}
else if (codecID[i] == kDecoderGSMFR)
{
codecBytes = 340; /* 200ms */
codecBuffers = 10;
}
else if (codecID[i] == kDecoderAMR)
{
codecBytes = 384; /* 240ms @ 12.2kbps+headers (60ms frames) */
codecBuffers = 10;
}
else if (codecID[i] == kDecoderAMRWB)
{
codecBytes = 744;
codecBuffers = 10;
}
else if (codecID[i] == kDecoderArbitrary)
{
codecBytes = 6720; /* Assume worst case uncompressed WB 210ms */
codecBuffers = 15;
}
else
{
/*Unknow codec */
codecBytes = 0;
codecBuffers = 0;
ok = CODEC_DB_UNKNOWN_CODEC;
}
/* Update max variables */
*maxBytes = WEBRTC_SPL_MAX((*maxBytes), codecBytes);
*maxSlots = WEBRTC_SPL_MAX((*maxSlots), codecBuffers);
} /* end of for loop */
/*
* Add size needed by the additional pointers for each slot inside struct,
* as indicated on each line below.
*/
w16_tmp = (sizeof(WebRtc_UWord32) /* timeStamp */
+ sizeof(WebRtc_Word16*) /* payloadLocation */
+ sizeof(WebRtc_UWord16) /* seqNumber */
+ sizeof(WebRtc_Word16) /* payloadType */
+ sizeof(WebRtc_Word16) /* payloadLengthBytes */
+ sizeof(WebRtc_Word16)); /* rcuPlCntr */
/* Add the extra size per slot to the memory count */
*maxBytes += w16_tmp * (*maxSlots);
return ok;
}

View File

@ -1,206 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Interface for the actual packet buffer data structure.
*/
#ifndef PACKET_BUFFER_H
#define PACKET_BUFFER_H
#include "typedefs.h"
#include "webrtc_neteq.h"
#include "rtp.h"
/* Define minimum allowed buffer memory, in 16-bit words */
#define PBUFFER_MIN_MEMORY_SIZE 150
/****************************/
/* The packet buffer struct */
/****************************/
typedef struct
{
/* Variables common to the entire buffer */
WebRtc_UWord16 packSizeSamples; /* packet size in samples of last decoded packet */
WebRtc_Word16 *startPayloadMemory; /* pointer to the payload memory */
int memorySizeW16; /* the size (in WebRtc_Word16) of the payload memory */
WebRtc_Word16 *currentMemoryPos; /* The memory position to insert next payload */
int numPacketsInBuffer; /* The number of packets in the buffer */
int insertPosition; /* The position to insert next packet */
int maxInsertPositions; /* Maximum number of packets allowed */
/* Arrays with one entry per packet slot */
/* NOTE: If these are changed, the changes must be accounted for at the end of
the function WebRtcNetEQ_GetDefaultCodecSettings(). */
WebRtc_UWord32 *timeStamp; /* Timestamp in slot n */
WebRtc_Word16 **payloadLocation; /* Memory location of payload in slot n */
WebRtc_UWord16 *seqNumber; /* Sequence number in slot n */
WebRtc_Word16 *payloadType; /* Payload type of packet in slot n */
WebRtc_Word16 *payloadLengthBytes; /* Payload length of packet in slot n */
WebRtc_Word16 *rcuPlCntr; /* zero for non-RCU payload, 1 for main payload
2 for redundant payload */
/* Statistics counters */
WebRtc_UWord16 discardedPackets; /* Number of discarded packets */
WebRtc_UWord32 totalDiscardedPackets; /* Total number of discarded packets */
WebRtc_UWord32 totalFlushedPackets; /* Total number of flushed packets */
} PacketBuf_t;
/*************************/
/* Function declarations */
/*************************/
/****************************************************************************
* WebRtcNetEQ_PacketBufferInit(...)
*
* This function initializes the packet buffer.
*
* Input:
* - bufferInst : Buffer instance to be initialized
* - noOfPackets : Maximum number of packets that buffer should hold
* - memory : Pointer to the storage memory for the payloads
* - memorySize : The size of the payload memory (in WebRtc_Word16)
*
* Output:
* - bufferInst : Updated buffer instance
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_PacketBufferInit(PacketBuf_t *bufferInst, int maxNoOfPackets,
WebRtc_Word16 *pw16_memory, int memorySize);
/****************************************************************************
* WebRtcNetEQ_PacketBufferFlush(...)
*
* This function flushes all the packets in the buffer.
*
* Input:
* - bufferInst : Buffer instance to be flushed
*
* Output:
* - bufferInst : Flushed buffer instance
*
* Return value : 0 - Ok
*/
int WebRtcNetEQ_PacketBufferFlush(PacketBuf_t *bufferInst);
/****************************************************************************
* WebRtcNetEQ_PacketBufferInsert(...)
*
* This function inserts an RTP packet into the packet buffer.
*
* Input:
* - bufferInst : Buffer instance
* - RTPpacket : An RTP packet struct (with payload, sequence
* number, etc.)
*
* Output:
* - bufferInst : Updated buffer instance
* - flushed : 1 if buffer was flushed, 0 otherwise
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_PacketBufferInsert(PacketBuf_t *bufferInst, const RTPPacket_t *RTPpacket,
WebRtc_Word16 *flushed);
/****************************************************************************
* WebRtcNetEQ_PacketBufferExtract(...)
*
* This function extracts a payload from the buffer.
*
* Input:
* - bufferInst : Buffer instance
* - bufferPosition: Position of the packet that should be extracted
*
* Output:
* - RTPpacket : An RTP packet struct (with payload, sequence
* number, etc)
* - bufferInst : Updated buffer instance
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_PacketBufferExtract(PacketBuf_t *bufferInst, RTPPacket_t *RTPpacket,
int bufferPosition);
/****************************************************************************
* WebRtcNetEQ_PacketBufferFindLowestTimestamp(...)
*
* This function finds the next packet with the lowest timestamp.
*
* Input:
* - bufferInst : Buffer instance
* - currentTS : The timestamp to compare packet timestamps with
* - eraseOldPkts : If non-zero, erase packets older than currentTS
*
* Output:
* - timestamp : Lowest timestamp that was found
* - bufferPosition: Position of this packet (-1 if there are no packets
* in the buffer)
* - payloadType : Payload type of the found payload
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_PacketBufferFindLowestTimestamp(PacketBuf_t *bufferInst,
WebRtc_UWord32 currentTS,
WebRtc_UWord32 *timestamp,
int *bufferPosition, int eraseOldPkts,
WebRtc_Word16 *payloadType);
/****************************************************************************
* WebRtcNetEQ_PacketBufferGetSize(...)
*
* Calculate and return an estimate of the total data length (in samples)
* currently in the buffer. The estimate is calculated as the number of
* packets currently in the buffer (which does not have any remaining waiting
* time), multiplied with the number of samples obtained from the last
* decoded packet.
*
* Input:
* - bufferInst : Buffer instance
*
* Return value : The buffer size in samples
*/
WebRtc_Word32 WebRtcNetEQ_PacketBufferGetSize(const PacketBuf_t *bufferInst);
/****************************************************************************
* WebRtcNetEQ_GetDefaultCodecSettings(...)
*
* Calculates a recommended buffer size for a specific set of codecs.
*
* Input:
* - codecID : An array of codec types that will be used
* - noOfCodecs : Number of codecs in array codecID
*
* Output:
* - maxBytes : Recommended buffer memory size in bytes
* - maxSlots : Recommended number of slots in buffer
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_GetDefaultCodecSettings(const enum WebRtcNetEQDecoder *codecID,
int noOfCodecs, int *maxBytes, int *maxSlots);
#endif /* PACKET_BUFFER_H */

View File

@ -1,232 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Implementation of the peak detection used for finding correlation peaks.
*/
#include "dsp_helpfunctions.h"
#include "signal_processing_library.h"
/* Table of constants used in parabolic fit function WebRtcNetEQ_PrblFit */
const WebRtc_Word16 WebRtcNetEQ_kPrblCf[17][3] = { { 120, 32, 64 }, { 140, 44, 75 },
{ 150, 50, 80 }, { 160, 57, 85 },
{ 180, 72, 96 }, { 200, 89, 107 },
{ 210, 98, 112 }, { 220, 108, 117 },
{ 240, 128, 128 }, { 260, 150, 139 },
{ 270, 162, 144 }, { 280, 174, 149 },
{ 300, 200, 160 }, { 320, 228, 171 },
{ 330, 242, 176 }, { 340, 257, 181 },
{ 360, 288, 192 } };
WebRtc_Word16 WebRtcNetEQ_PeakDetection(WebRtc_Word16 *pw16_data, WebRtc_Word16 w16_dataLen,
WebRtc_Word16 w16_nmbPeaks, WebRtc_Word16 fs_mult,
WebRtc_Word16 *pw16_winIndex,
WebRtc_Word16 *pw16_winValue)
{
/* Local variables */
int i;
WebRtc_Word16 w16_tmp;
WebRtc_Word16 w16_tmp2;
WebRtc_Word16 indMin = 0;
WebRtc_Word16 indMax = 0;
/* Peak detection */
for (i = 0; i <= (w16_nmbPeaks - 1); i++)
{
if (w16_nmbPeaks == 1)
{
/*
* Single peak
* The parabola fit assumes that an extra point is available; worst case it gets
* a zero on the high end of the signal.
*/
w16_dataLen++;
}
pw16_winIndex[i] = WebRtcSpl_MaxIndexW16(pw16_data, (WebRtc_Word16) (w16_dataLen - 1));
if (i != w16_nmbPeaks - 1)
{
w16_tmp = pw16_winIndex[i] - 2; /* *fs_mult; */
indMin = WEBRTC_SPL_MAX(0, w16_tmp);
w16_tmp = pw16_winIndex[i] + 2; /* *fs_mult; */
w16_tmp2 = w16_dataLen - 1;
indMax = WEBRTC_SPL_MIN(w16_tmp2, w16_tmp);
}
if ((pw16_winIndex[i] != 0) && (pw16_winIndex[i] != (w16_dataLen - 2)))
{
/* Parabola fit*/
WebRtcNetEQ_PrblFit(&(pw16_data[pw16_winIndex[i] - 1]), &(pw16_winIndex[i]),
&(pw16_winValue[i]), fs_mult);
}
else
{
if (pw16_winIndex[i] == (w16_dataLen - 2))
{
if (pw16_data[pw16_winIndex[i]] > pw16_data[pw16_winIndex[i] + 1])
{
WebRtcNetEQ_PrblFit(&(pw16_data[pw16_winIndex[i] - 1]),
&(pw16_winIndex[i]), &(pw16_winValue[i]), fs_mult);
}
else if (pw16_data[pw16_winIndex[i]] <= pw16_data[pw16_winIndex[i] + 1])
{
pw16_winValue[i] = (pw16_data[pw16_winIndex[i]]
+ pw16_data[pw16_winIndex[i] + 1]) >> 1; /* lin approx */
pw16_winIndex[i] = (pw16_winIndex[i] * 2 + 1) * fs_mult;
}
}
else
{
pw16_winValue[i] = pw16_data[pw16_winIndex[i]];
pw16_winIndex[i] = pw16_winIndex[i] * 2 * fs_mult;
}
}
if (i != w16_nmbPeaks - 1)
{
WebRtcSpl_MemSetW16(&(pw16_data[indMin]), 0, (indMax - indMin + 1));
/* for (j=indMin; j<=indMax; j++) pw16_data[j] = 0; */
}
}
return 0;
}
WebRtc_Word16 WebRtcNetEQ_PrblFit(WebRtc_Word16 *pw16_3pts, WebRtc_Word16 *pw16_Ind,
WebRtc_Word16 *pw16_outVal, WebRtc_Word16 fs_mult)
{
/* Variables */
WebRtc_Word32 Num, Den;
WebRtc_Word32 temp;
WebRtc_Word16 flag, stp, strt, lmt;
WebRtc_UWord16 PFind[13];
if (fs_mult == 1)
{
PFind[0] = 0;
PFind[1] = 8;
PFind[2] = 16;
}
else if (fs_mult == 2)
{
PFind[0] = 0;
PFind[1] = 4;
PFind[2] = 8;
PFind[3] = 12;
PFind[4] = 16;
}
else if (fs_mult == 4)
{
PFind[0] = 0;
PFind[1] = 2;
PFind[2] = 4;
PFind[3] = 6;
PFind[4] = 8;
PFind[5] = 10;
PFind[6] = 12;
PFind[7] = 14;
PFind[8] = 16;
}
else
{
PFind[0] = 0;
PFind[1] = 1;
PFind[2] = 3;
PFind[3] = 4;
PFind[4] = 5;
PFind[5] = 7;
PFind[6] = 8;
PFind[7] = 9;
PFind[8] = 11;
PFind[9] = 12;
PFind[10] = 13;
PFind[11] = 15;
PFind[12] = 16;
}
/* Num = -3*pw16_3pts[0] + 4*pw16_3pts[1] - pw16_3pts[2]; */
/* Den = pw16_3pts[0] - 2*pw16_3pts[1] + pw16_3pts[2]; */
Num = WEBRTC_SPL_MUL_16_16(pw16_3pts[0],-3) + WEBRTC_SPL_MUL_16_16(pw16_3pts[1],4)
- pw16_3pts[2];
Den = pw16_3pts[0] + WEBRTC_SPL_MUL_16_16(pw16_3pts[1],-2) + pw16_3pts[2];
temp = (WebRtc_Word32) WEBRTC_SPL_MUL(Num, (WebRtc_Word32)120); /* need 32_16 really */
flag = 1;
stp = WebRtcNetEQ_kPrblCf[PFind[fs_mult]][0] - WebRtcNetEQ_kPrblCf[PFind[fs_mult - 1]][0];
strt = (WebRtcNetEQ_kPrblCf[PFind[fs_mult]][0]
+ WebRtcNetEQ_kPrblCf[PFind[fs_mult - 1]][0]) >> 1;
if (temp < (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)strt))
{
lmt = strt - stp;
while (flag)
{
if ((flag == fs_mult) || (temp
> (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)lmt)))
{
*pw16_outVal
= (WebRtc_Word16)
(((WebRtc_Word32) ((WebRtc_Word32) WEBRTC_SPL_MUL(Den,(WebRtc_Word32)WebRtcNetEQ_kPrblCf[PFind[fs_mult-flag]][1])
+ (WebRtc_Word32) WEBRTC_SPL_MUL(Num,(WebRtc_Word32)WebRtcNetEQ_kPrblCf[PFind[fs_mult-flag]][2])
+ WEBRTC_SPL_MUL_16_16(pw16_3pts[0],256))) >> 8);
*pw16_Ind = (*pw16_Ind) * (fs_mult << 1) - flag;
flag = 0;
}
else
{
flag++;
lmt -= stp;
}
}
}
else if (temp > (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)(strt+stp)))
{
lmt = strt + (stp << 1);
while (flag)
{
if ((flag == fs_mult) || (temp
< (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)lmt)))
{
WebRtc_Word32 temp_term_1, temp_term_2, temp_term_3;
temp_term_1 = WEBRTC_SPL_MUL(Den,
(WebRtc_Word32) WebRtcNetEQ_kPrblCf[PFind[fs_mult+flag]][1]);
temp_term_2 = WEBRTC_SPL_MUL(Num,
(WebRtc_Word32) WebRtcNetEQ_kPrblCf[PFind[fs_mult+flag]][2]);
temp_term_3 = WEBRTC_SPL_MUL_16_16(pw16_3pts[0],256);
*pw16_outVal
= (WebRtc_Word16) ((temp_term_1 + temp_term_2 + temp_term_3) >> 8);
*pw16_Ind = (*pw16_Ind) * (fs_mult << 1) + flag;
flag = 0;
}
else
{
flag++;
lmt += stp;
}
}
}
else
{
*pw16_outVal = pw16_3pts[1];
*pw16_Ind = (*pw16_Ind) * 2 * fs_mult;
}
return 0;
}

View File

@ -1,525 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file contains the Pre-emptive Expand algorithm that is used to increase
* the delay by repeating a part of the audio stream.
*/
#include "dsp.h"
#include "signal_processing_library.h"
#include "dsp_helpfunctions.h"
#include "neteq_error_codes.h"
#define PREEMPTIVE_CORR_LEN 50
#define PREEMPTIVE_MIN_LAG 10
#define PREEMPTIVE_MAX_LAG 60
#define PREEMPTIVE_DOWNSAMPLED_LEN (PREEMPTIVE_CORR_LEN + PREEMPTIVE_MAX_LAG)
/* Scratch usage:
Type Name size startpos endpos
WebRtc_Word16 pw16_downSampSpeech 110 0 109
WebRtc_Word32 pw32_corr 2*50 110 209
WebRtc_Word16 pw16_corr 50 0 49
Total: 110+2*50
*/
#define SCRATCH_PW16_DS_SPEECH 0
#define SCRATCH_PW32_CORR PREEMPTIVE_DOWNSAMPLED_LEN
#define SCRATCH_PW16_CORR 0
/****************************************************************************
* WebRtcNetEQ_PreEmptiveExpand(...)
*
* This function tries to extend the audio data by repeating one or several
* pitch periods. The operation is only carried out if the correlation is
* strong or if the signal energy is very low. The algorithm is the
* reciprocal of the Accelerate algorithm.
*
* Input:
* - inst : NetEQ DSP instance
* - scratchPtr : Pointer to scratch vector.
* - decoded : Pointer to newly decoded speech.
* - len : Length of decoded speech.
* - oldDataLen : Length of the part of decoded that has already been played out.
* - BGNonly : If non-zero, Pre-emptive Expand will only copy
* the first DEFAULT_TIME_ADJUST seconds of the
* input and append to the end. No signal matching is
* done.
*
* Output:
* - inst : Updated instance
* - outData : Pointer to a memory space where the output data
* should be stored. The vector must be at least
* min(len + 120*fs/8000, NETEQ_MAX_OUTPUT_SIZE)
* elements long.
* - pw16_len : Number of samples written to outData.
*
* Return value : 0 - Ok
* <0 - Error
*/
int WebRtcNetEQ_PreEmptiveExpand(DSPInst_t *inst,
#ifdef SCRATCH
WebRtc_Word16 *pw16_scratchPtr,
#endif
const WebRtc_Word16 *pw16_decoded, int len, int oldDataLen,
WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
WebRtc_Word16 BGNonly)
{
#ifdef SCRATCH
/* Use scratch memory for internal temporary vectors */
WebRtc_Word16 *pw16_downSampSpeech = pw16_scratchPtr + SCRATCH_PW16_DS_SPEECH;
WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_PW32_CORR);
WebRtc_Word16 *pw16_corr = pw16_scratchPtr + SCRATCH_PW16_CORR;
#else
/* Allocate memory for temporary vectors */
WebRtc_Word16 pw16_downSampSpeech[PREEMPTIVE_DOWNSAMPLED_LEN];
WebRtc_Word32 pw32_corr[PREEMPTIVE_CORR_LEN];
WebRtc_Word16 pw16_corr[PREEMPTIVE_CORR_LEN];
#endif
WebRtc_Word16 w16_decodedMax = 0;
WebRtc_Word16 w16_tmp;
WebRtc_Word16 w16_tmp2;
WebRtc_Word32 w32_tmp;
WebRtc_Word32 w32_tmp2;
const WebRtc_Word16 w16_startLag = PREEMPTIVE_MIN_LAG;
const WebRtc_Word16 w16_endLag = PREEMPTIVE_MAX_LAG;
const WebRtc_Word16 w16_corrLen = PREEMPTIVE_CORR_LEN;
const WebRtc_Word16 *pw16_vec1, *pw16_vec2;
WebRtc_Word16 *pw16_vectmp;
WebRtc_Word16 w16_inc, w16_startfact;
WebRtc_Word16 w16_bestIndex, w16_bestVal;
WebRtc_Word16 w16_VAD = 1;
WebRtc_Word16 fsMult;
WebRtc_Word16 fsMult120;
WebRtc_Word32 w32_en1, w32_en2, w32_cc;
WebRtc_Word16 w16_en1, w16_en2;
WebRtc_Word16 w16_en1Scale, w16_en2Scale;
WebRtc_Word16 w16_sqrtEn1En2;
WebRtc_Word16 w16_bestCorr = 0;
int ok;
#ifdef NETEQ_STEREO
MasterSlaveInfo *msInfo = inst->msInfo;
#endif
fsMult = WebRtcNetEQ_CalcFsMult(inst->fs); /* Calculate fs/8000 */
/* Pre-calculate common multiplication with fsMult */
fsMult120 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fsMult, 120); /* 15 ms */
inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
/*
* Sanity check for len variable; must be (almost) 30 ms (120*fsMult + max(bestIndex)).
* Also, the new part must be at least .625 ms (w16_overlap).
*/
if (len < (WebRtc_Word16) WEBRTC_SPL_MUL_16_16((120 + 119), fsMult) || oldDataLen >= len
- inst->ExpandInst.w16_overlap)
{
/* Length of decoded data too short */
inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
*pw16_len = len;
/* simply move all data from decoded to outData */
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
return NETEQ_OTHER_ERROR;
}
/***********************************/
/* Special operations for BGN only */
/***********************************/
/* Check if "background noise only" flag is set */
if (BGNonly)
{
/* special operation for BGN only; simply insert a chunk of data */
w16_bestIndex = DEFAULT_TIME_ADJUST * (fsMult << 3); /* X*fs/1000 */
/* Sanity check for bestIndex */
if (w16_bestIndex > len)
{ /* not good, do nothing instead */
inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
*pw16_len = len;
/* simply move all data from decoded to outData */
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
return NETEQ_OTHER_ERROR;
}
/* set length parameter */
*pw16_len = len + w16_bestIndex;
/* copy to output */
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, len);
WEBRTC_SPL_MEMCPY_W16(&pw16_outData[len], pw16_decoded, w16_bestIndex);
/* set mode */
inst->w16_mode = MODE_LOWEN_PREEMPTIVE;
/* update statistics */
inst->statInst.preemptiveLength += w16_bestIndex;
return 0;
} /* end of special code for BGN mode */
#ifdef NETEQ_STEREO
/* Sanity for msInfo */
if (msInfo == NULL)
{
/* this should not happen here */
return MASTER_SLAVE_ERROR;
}
if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
{
/* Find correlation lag only for non-slave instances */
#endif
/****************************************************************/
/* Find the strongest correlation lag by downsampling to 4 kHz, */
/* calculating correlation for downsampled signal and finding */
/* the strongest correlation peak. */
/****************************************************************/
/* find maximum absolute value */
w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
/* downsample the decoded speech to 4 kHz */
ok = WebRtcNetEQ_DownSampleTo4kHz(pw16_decoded, len, inst->fs, pw16_downSampSpeech,
PREEMPTIVE_DOWNSAMPLED_LEN, 1 /* compensate delay*/);
if (ok != 0)
{
/* error */
inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
*pw16_len = len;
/* simply move all data from decoded to outData */
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
return NETEQ_OTHER_ERROR;
}
/*
* Set scaling factor for cross correlation to protect against
* overflow (log2(50) => 6)
*/
w16_tmp = 6 - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
/* Perform correlation from lag 10 to lag 60 in 4 kHz domain */WebRtcNetEQ_CrossCorr(
pw32_corr, &pw16_downSampSpeech[w16_endLag],
&pw16_downSampSpeech[w16_endLag - w16_startLag], w16_corrLen,
(WebRtc_Word16) (w16_endLag - w16_startLag), w16_tmp, -1);
/* Normalize correlation to 14 bits and put in a WebRtc_Word16 vector */
w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_corrLen);
w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
WebRtcSpl_VectorBitShiftW32ToW16(pw16_corr, w16_corrLen, pw32_corr, w16_tmp);
/* Find limits for peak finding, in order to avoid overful NetEQ algorithm buffer. */
/* Calculate difference between MAX_OUTPUT_SIZE and len in 4 kHz domain. */
w16_tmp = WebRtcSpl_DivW32W16ResW16((WebRtc_Word32) (NETEQ_MAX_OUTPUT_SIZE - len),
(WebRtc_Word16) (fsMult << 1)) - w16_startLag;
w16_tmp = WEBRTC_SPL_MIN(w16_corrLen, w16_tmp); /* no more than corrLen = 50 */
#ifdef NETEQ_STEREO
} /* end if (msInfo->msMode != NETEQ_SLAVE) */
if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
{
/* Find the strongest correlation peak by using the parabolic fit method */
WebRtcNetEQ_PeakDetection(pw16_corr, w16_tmp, 1, fsMult, &w16_bestIndex, &w16_bestVal);
/* 0 <= bestIndex <= (2*w16_tmp - 1)*fsMult <= (2*corrLen - 1)*fsMult = 99*fsMult */
/* Compensate bestIndex for displaced starting position */
w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
/* 20*fsMult <= bestIndex <= 119*fsMult */
msInfo->bestIndex = w16_bestIndex;
}
else if (msInfo->msMode == NETEQ_SLAVE)
{
if (msInfo->extraInfo == PE_EXP_FAIL)
{
/* Master has signaled an unsuccessful preemptive expand */
w16_bestIndex = 0;
}
else
{
/* Get best index from master */
w16_bestIndex = msInfo->bestIndex;
}
}
else
{
/* Invalid mode */
return (MASTER_SLAVE_ERROR);
}
#else /* NETEQ_STEREO */
/* Find the strongest correlation peak by using the parabolic fit method */
WebRtcNetEQ_PeakDetection(pw16_corr, w16_tmp, 1, fsMult, &w16_bestIndex, &w16_bestVal);
/* 0 <= bestIndex <= (2*w16_tmp - 1)*fsMult <= (2*corrLen - 1)*fsMult = 99*fsMult */
/* Compensate bestIndex for displaced starting position */
w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
/* 20*fsMult <= bestIndex <= 119*fsMult */
#endif /* NETEQ_STEREO */
#ifdef NETEQ_STEREO
if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
{
/* Calculate correlation only for non-slave instances */
#endif /* NETEQ_STEREO */
/*****************************************************/
/* Calculate correlation bestCorr for the found lag. */
/* Also do a simple VAD decision. */
/*****************************************************/
/*
* Calculate scaling to ensure that bestIndex samples can be square-summed
* without overflowing
*/
w16_tmp = (31
- WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax)));
w16_tmp += (31 - WebRtcSpl_NormW32(w16_bestIndex));
w16_tmp -= 31;
w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
/* vec1 starts at 15 ms minus one pitch period */
pw16_vec1 = &pw16_decoded[fsMult120 - w16_bestIndex];
/* vec2 start at 15 ms */
pw16_vec2 = &pw16_decoded[fsMult120];
/* Calculate energies for vec1 and vec2 */
w32_en1 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1,
(WebRtc_Word16*) pw16_vec1, w16_bestIndex, w16_tmp);
w32_en2 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec2,
(WebRtc_Word16*) pw16_vec2, w16_bestIndex, w16_tmp);
/* Calculate cross-correlation at the found lag */
w32_cc = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1, (WebRtc_Word16*) pw16_vec2,
w16_bestIndex, w16_tmp);
/* Check VAD constraint
((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_en1 + w32_en2, 4); /* (en1+en2)/(2*8) */
if (inst->BGNInst.w16_initialized == 1)
{
w32_tmp2 = inst->BGNInst.w32_energy;
}
else
{
/* if BGN parameters have not been estimated, use a fixed threshold */
w32_tmp2 = 75000;
}
w16_tmp2 = 16 - WebRtcSpl_NormW32(w32_tmp2);
w16_tmp2 = WEBRTC_SPL_MAX(0, w16_tmp2);
w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_tmp, w16_tmp2);
w16_tmp2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp2, w16_tmp2);
w32_tmp2 = WEBRTC_SPL_MUL_16_16(w16_bestIndex, w16_tmp2);
/* Scale w32_tmp properly before comparing with w32_tmp2 */
/* (w16_tmp is scaling before energy calculation, thus 2*w16_tmp) */
if (WebRtcSpl_NormW32(w32_tmp) < WEBRTC_SPL_LSHIFT_W32(w16_tmp,1))
{
/* Cannot scale only w32_tmp, must scale w32_temp2 too */
WebRtc_Word16 tempshift = WebRtcSpl_NormW32(w32_tmp);
w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, tempshift);
w32_tmp2 = WEBRTC_SPL_RSHIFT_W32(w32_tmp2,
WEBRTC_SPL_LSHIFT_W32(w16_tmp,1) - tempshift);
}
else
{
w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp,
WEBRTC_SPL_LSHIFT_W32(w16_tmp,1));
}
if (w32_tmp <= w32_tmp2) /*((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
{
/* The signal seems to be passive speech */
w16_VAD = 0;
w16_bestCorr = 0; /* Correlation does not matter */
/* For low energy expansion, the new data can be less than 15 ms,
but we must ensure that bestIndex is not larger than the new data. */
w16_bestIndex = WEBRTC_SPL_MIN( w16_bestIndex, len - oldDataLen );
}
else
{
/* The signal is active speech */
w16_VAD = 1;
/* Calculate correlation (cc/sqrt(en1*en2)) */
/* Start with calculating scale values */
w16_en1Scale = 16 - WebRtcSpl_NormW32(w32_en1);
w16_en1Scale = WEBRTC_SPL_MAX(0, w16_en1Scale);
w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
/* Make sure total scaling is even (to simplify scale factor after sqrt) */
if ((w16_en1Scale + w16_en2Scale) & 1)
{
w16_en1Scale += 1;
}
/* Convert energies to WebRtc_Word16 */
w16_en1 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
w16_en2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
/* Calculate energy product */
w32_tmp = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
/* Calculate square-root of energy product */
w16_sqrtEn1En2 = (WebRtc_Word16) WebRtcSpl_Sqrt(w32_tmp);
/* Calculate cc/sqrt(en1*en2) in Q14 */
w16_tmp = 14 - ((w16_en1Scale + w16_en2Scale) >> 1);
w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_tmp);
w32_cc = WEBRTC_SPL_MAX(0, w32_cc); /* Don't divide with negative number */
w16_bestCorr = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_cc, w16_sqrtEn1En2);
w16_bestCorr = WEBRTC_SPL_MIN(16384, w16_bestCorr); /* set maximum to 1.0 */
}
#ifdef NETEQ_STEREO
} /* end if (msInfo->msMode != NETEQ_SLAVE) */
#endif /* NETEQ_STEREO */
/*******************************************************/
/* Check preemptive expand criteria and insert samples */
/*******************************************************/
/* Check for strong correlation (>0.9) and at least 15 ms new data,
or passive speech */
#ifdef NETEQ_STEREO
if (((((w16_bestCorr > 14746) && (oldDataLen <= fsMult120)) || (w16_VAD == 0))
&& (msInfo->msMode != NETEQ_SLAVE)) || ((msInfo->msMode == NETEQ_SLAVE)
&& (msInfo->extraInfo != PE_EXP_FAIL)))
#else
if (((w16_bestCorr > 14746) && (oldDataLen <= fsMult120))
|| (w16_VAD == 0))
#endif
{
/* Do expand operation by overlap add */
/* Set length of the first part, not to be modified */
WebRtc_Word16 w16_startIndex = WEBRTC_SPL_MAX(oldDataLen, fsMult120);
/*
* Calculate cross-fading slope so that the fading factor goes from
* 1 (16384 in Q14) to 0 in one pitch period (bestIndex).
*/
w16_inc = (WebRtc_Word16) WebRtcSpl_DivW32W16((WebRtc_Word32) 16384,
(WebRtc_Word16) (w16_bestIndex + 1)); /* in Q14 */
/* Initiate fading factor */
w16_startfact = 16384 - w16_inc;
/* vec1 starts at 15 ms minus one pitch period */
pw16_vec1 = &pw16_decoded[w16_startIndex - w16_bestIndex];
/* vec2 start at 15 ms */
pw16_vec2 = &pw16_decoded[w16_startIndex];
/* Copy unmodified part [0 to 15 ms] */
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, w16_startIndex);
/* Generate interpolated part of length bestIndex (1 pitch period) */
pw16_vectmp = pw16_outData + w16_startIndex;
/* Reuse mixing function from Expand */
WebRtcNetEQ_MixVoiceUnvoice(pw16_vectmp, (WebRtc_Word16*) pw16_vec2,
(WebRtc_Word16*) pw16_vec1, &w16_startfact, w16_inc, w16_bestIndex);
/* Move the last part (also unmodified) */
/* Take from decoded at 15 ms */
pw16_vec2 = &pw16_decoded[w16_startIndex];
WEBRTC_SPL_MEMMOVE_W16(&pw16_outData[w16_startIndex + w16_bestIndex], pw16_vec2,
(WebRtc_Word16) (len - w16_startIndex));
/* Set the mode flag */
if (w16_VAD)
{
inst->w16_mode = MODE_SUCCESS_PREEMPTIVE;
}
else
{
inst->w16_mode = MODE_LOWEN_PREEMPTIVE;
}
/* Calculate resulting length = original length + pitch period */
*pw16_len = len + w16_bestIndex;
/* Update in-call statistics */
inst->statInst.preemptiveLength += w16_bestIndex;
return 0;
}
else
{
/* Preemptive Expand not allowed */
#ifdef NETEQ_STEREO
/* Signal to slave(s) that this was unsuccessful */
if (msInfo->msMode == NETEQ_MASTER)
{
msInfo->extraInfo = PE_EXP_FAIL;
}
#endif
/* Set mode flag to unsuccessful preemptive expand */
inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
/* Length is unmodified */
*pw16_len = len;
/* Simply move all data from decoded to outData */
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
return 0;
}
}
#undef SCRATCH_PW16_DS_SPEECH
#undef SCRATCH_PW32_CORR
#undef SCRATCH_PW16_CORR

View File

@ -1,54 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This function generates a pseudo-random vector.
*/
#include "dsp_helpfunctions.h"
/*
* Values are normalized so that
* sqrt(dot(pw16_NETEQFIX_RANDN_TBL,pw16_NETEQFIX_RANDN_TBL)/256)=2^13
*/
const WebRtc_Word16 WebRtcNetEQ_kRandnTbl[RANDVEC_NO_OF_SAMPLES] =
{
2680, 5532, 441, 5520, 16170, -5146, -1024, -8733, 3115, 9598, -10380, -4959, -1280, -21716, 7133, -1522,
13458, -3902, 2789, -675, 3441, 5016, -13599, -4003, -2739, 3922, -7209, 13352, -11617, -7241, 12905, -2314,
5426, 10121, -9702, 11207, -13542, 1373, 816, -5934, -12504, 4798, 1811, 4112, -613, 201, -10367, -2960,
-2419, 3442, 4299, -6116, -6092, 1552, -1650, -480, -1237, 18720, -11858, -8303, -8212, 865, -2890, -16968,
12052, -5845, -5912, 9777, -5665, -6294, 5426, -4737, -6335, 1652, 761, 3832, 641, -8552, -9084, -5753,
8146, 12156, -4915, 15086, -1231, -1869, 11749, -9319, -6403, 11407, 6232, -1683, 24340, -11166, 4017, -10448,
3153, -2936, 6212, 2891, -866, -404, -4807, -2324, -1917, -2388, -6470, -3895, -10300, 5323, -5403, 2205,
4640, 7022, -21186, -6244, -882, -10031, -3395, -12885, 7155, -5339, 5079, -2645, -9515, 6622, 14651, 15852,
359, 122, 8246, -3502, -6696, -3679, -13535, -1409, -704, -7403, -4007, 1798, 279, -420, -12796, -14219,
1141, 3359, 11434, 7049, -6684, -7473, 14283, -4115, -9123, -8969, 4152, 4117, 13792, 5742, 16168, 8661,
-1609, -6095, 1881, 14380, -5588, 6758, -6425, -22969, -7269, 7031, 1119, -1611, -5850, -11281, 3559, -8952,
-10146, -4667, -16251, -1538, 2062, -1012, -13073, 227, -3142, -5265, 20, 5770, -7559, 4740, -4819, 992,
-8208, -7130, -4652, 6725, 7369, -1036, 13144, -1588, -5304, -2344, -449, -5705, -8894, 5205, -17904, -11188,
-1022, 4852, 10101, -5255, -4200, -752, 7941, -1543, 5959, 14719, 13346, 17045, -15605, -1678, -1600, -9230,
68, 23348, 1172, 7750, 11212, -18227, 9956, 4161, 883, 3947, 4341, 1014, -4889, -2603, 1246, -5630,
-3596, -870, -1298, 2784, -3317, -6612, -20541, 4166, 4181, -8625, 3562, 12890, 4761, 3205, -12259, -8579
};
void WebRtcNetEQ_RandomVec(WebRtc_UWord32 *w32_seed, WebRtc_Word16 *pw16_randVec,
WebRtc_Word16 w16_len, WebRtc_Word16 w16_incval)
{
int i;
WebRtc_Word16 w16_pos;
for (i = 0; i < w16_len; i++)
{
*w32_seed = (*w32_seed) + w16_incval;
w16_pos = (WebRtc_Word16) ((*w32_seed) & (RANDVEC_NO_OF_SAMPLES - 1));
pw16_randVec[i] = WebRtcNetEQ_kRandnTbl[w16_pos];
}
}

View File

@ -1,517 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Implementation of the RecIn function, which is the main function for inserting RTP
* packets into NetEQ.
*/
#include "mcu.h"
#include <string.h>
#include "signal_processing_library.h"
#include "automode.h"
#include "dtmf_buffer.h"
#include "neteq_defines.h"
#include "neteq_error_codes.h"
int WebRtcNetEQ_RecInInternal(MCUInst_t *MCU_inst, RTPPacket_t *RTPpacketInput,
WebRtc_UWord32 uw32_timeRec)
{
RTPPacket_t RTPpacket[2];
int i_k;
int i_ok = 0, i_No_Of_Payloads = 1;
WebRtc_Word16 flushed = 0;
WebRtc_Word16 codecPos;
WebRtc_UWord32 diffTS, uw32_tmp;
int curr_Codec;
WebRtc_Word16 isREDPayload = 0;
WebRtc_Word32 temp_bufsize = MCU_inst->PacketBuffer_inst.numPacketsInBuffer;
#ifdef NETEQ_RED_CODEC
RTPPacket_t* RTPpacketPtr[2]; /* Support for redundancy up to 2 payloads */
RTPpacketPtr[0] = &RTPpacket[0];
RTPpacketPtr[1] = &RTPpacket[1];
#endif
/*
* Copy from input RTP packet to local copy
* (mainly to enable multiple payloads using RED)
*/
WEBRTC_SPL_MEMCPY_W8(&RTPpacket[0], RTPpacketInput, sizeof(RTPPacket_t));
/* Reinitialize NetEq if it's needed (changed SSRC or first call) */
if ((RTPpacket[0].ssrc != MCU_inst->ssrc) || (MCU_inst->first_packet == 1))
{
WebRtcNetEQ_RTCPInit(&MCU_inst->RTCP_inst, RTPpacket[0].seqNumber);
MCU_inst->first_packet = 0;
/* Flush the buffer */
WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst);
/* Store new SSRC */
MCU_inst->ssrc = RTPpacket[0].ssrc;
/* Update codecs */
MCU_inst->timeStamp = RTPpacket[0].timeStamp;
MCU_inst->current_Payload = RTPpacket[0].payloadType;
/*Set MCU to update codec on next SignalMCU call */
MCU_inst->new_codec = 1;
/* Reset timestamp scaling */
MCU_inst->TSscalingInitialized = 0;
}
/* Call RTCP statistics */
i_ok |= WebRtcNetEQ_RTCPUpdate(&(MCU_inst->RTCP_inst), RTPpacket[0].seqNumber,
RTPpacket[0].timeStamp, uw32_timeRec);
/* If Redundancy is supported and this is the redundancy payload, separate the payloads */
#ifdef NETEQ_RED_CODEC
if (RTPpacket[0].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
kDecoderRED))
{
/* Split the payload into a main and a redundancy payloads */
i_ok = WebRtcNetEQ_RedundancySplit(RTPpacketPtr, 2, &i_No_Of_Payloads);
if (i_ok < 0)
{
/* error returned */
return i_ok;
}
/*
* Only accept a few redundancies of the same type as the main data,
* AVT events and CNG.
*/
if ((i_No_Of_Payloads > 1) && (RTPpacket[0].payloadType != RTPpacket[1].payloadType)
&& (RTPpacket[0].payloadType != WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
kDecoderAVT)) && (RTPpacket[1].payloadType != WebRtcNetEQ_DbGetPayload(
&MCU_inst->codec_DB_inst, kDecoderAVT)) && (!WebRtcNetEQ_DbIsCNGPayload(
&MCU_inst->codec_DB_inst, RTPpacket[0].payloadType))
&& (!WebRtcNetEQ_DbIsCNGPayload(&MCU_inst->codec_DB_inst, RTPpacket[1].payloadType)))
{
i_No_Of_Payloads = 1;
}
isREDPayload = 1;
}
#endif
/* loop over the number of payloads */
for (i_k = 0; i_k < i_No_Of_Payloads; i_k++)
{
if (isREDPayload == 1)
{
RTPpacket[i_k].rcuPlCntr = i_k;
}
else
{
RTPpacket[i_k].rcuPlCntr = 0;
}
/* Force update of SplitInfo if it's iLBC because of potential change between 20/30ms */
if (RTPpacket[i_k].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
kDecoderILBC))
{
i_ok = WebRtcNetEQ_DbGetSplitInfo(
&MCU_inst->PayloadSplit_inst,
(enum WebRtcNetEQDecoder) WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst,
RTPpacket[i_k].payloadType), RTPpacket[i_k].payloadLen);
if (i_ok < 0)
{
/* error returned */
return i_ok;
}
}
/* Get information about timestamp scaling for this payload type */
i_ok = WebRtcNetEQ_GetTimestampScaling(MCU_inst, RTPpacket[i_k].payloadType);
if (i_ok < 0)
{
/* error returned */
return i_ok;
}
if (MCU_inst->TSscalingInitialized == 0 && MCU_inst->scalingFactor != kTSnoScaling)
{
/* Must initialize scaling with current timestamps */
MCU_inst->externalTS = RTPpacket[i_k].timeStamp;
MCU_inst->internalTS = RTPpacket[i_k].timeStamp;
MCU_inst->TSscalingInitialized = 1;
}
/* Adjust timestamp if timestamp scaling is needed (e.g. SILK or G.722) */
if (MCU_inst->TSscalingInitialized == 1)
{
WebRtc_UWord32 newTS = WebRtcNetEQ_ScaleTimestampExternalToInternal(MCU_inst,
RTPpacket[i_k].timeStamp);
/* save the incoming timestamp for next time */
MCU_inst->externalTS = RTPpacket[i_k].timeStamp;
/* add the scaled difference to last scaled timestamp and save ... */
MCU_inst->internalTS = newTS;
RTPpacket[i_k].timeStamp = newTS;
}
/* Is this a DTMF packet?*/
if (RTPpacket[i_k].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
kDecoderAVT))
{
#ifdef NETEQ_ATEVENT_DECODE
if (MCU_inst->AVT_PlayoutOn)
{
i_ok = WebRtcNetEQ_DtmfInsertEvent(&MCU_inst->DTMF_inst,
RTPpacket[i_k].payload, RTPpacket[i_k].payloadLen,
RTPpacket[i_k].timeStamp);
if (i_ok != 0)
{
return i_ok;
}
}
#endif
#ifdef NETEQ_STEREO
if (MCU_inst->usingStereo == 0)
{
/* do not set this for DTMF packets when using stereo mode */
MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1;
}
#else
MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1;
#endif
}
else if (WebRtcNetEQ_DbIsCNGPayload(&MCU_inst->codec_DB_inst,
RTPpacket[i_k].payloadType))
{
/* Is this a CNG packet? how should we handle this?*/
#ifdef NETEQ_CNG_CODEC
/* Get CNG sample rate */
WebRtc_UWord16 fsCng = WebRtcNetEQ_DbGetSampleRate(&MCU_inst->codec_DB_inst,
RTPpacket[i_k].payloadType);
if ((fsCng != MCU_inst->fs) && (fsCng > 8000))
{
/*
* We have received CNG with a different sample rate from what we are using
* now (must be > 8000, since we may use only one CNG type (default) for all
* frequencies). Flush buffer and signal new codec.
*/
WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst);
MCU_inst->new_codec = 1;
MCU_inst->current_Codec = -1;
}
i_ok = WebRtcNetEQ_PacketBufferInsert(&MCU_inst->PacketBuffer_inst,
&RTPpacket[i_k], &flushed);
if (i_ok < 0)
{
return RECIN_CNG_ERROR;
}
MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1;
#else /* NETEQ_CNG_CODEC not defined */
return RECIN_UNKNOWNPAYLOAD;
#endif /* NETEQ_CNG_CODEC */
}
else
{
/* Reinitialize the splitting if the payload and/or the payload length has changed */
curr_Codec = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst,
RTPpacket[i_k].payloadType);
if (curr_Codec != MCU_inst->current_Codec)
{
if (curr_Codec < 0)
{
return RECIN_UNKNOWNPAYLOAD;
}
MCU_inst->current_Codec = curr_Codec;
MCU_inst->current_Payload = RTPpacket[i_k].payloadType;
i_ok = WebRtcNetEQ_DbGetSplitInfo(&MCU_inst->PayloadSplit_inst,
(enum WebRtcNetEQDecoder) MCU_inst->current_Codec,
RTPpacket[i_k].payloadLen);
if (i_ok < 0)
{ /* error returned */
return i_ok;
}
WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst);
MCU_inst->new_codec = 1;
}
/* update post-call statistics */
if (MCU_inst->new_codec != 1) /* not if in codec change */
{
diffTS = RTPpacket[i_k].timeStamp - MCU_inst->timeStamp; /* waiting time */
if (diffTS < 0x0FFFFFFF) /* guard against re-ordering */
{
/* waiting time in ms */
diffTS = WEBRTC_SPL_UDIV(diffTS,
WEBRTC_SPL_UDIV((WebRtc_UWord32) MCU_inst->fs, 1000) );
if (diffTS < MCU_inst->statInst.minPacketDelayMs)
{
/* new all-time low */
MCU_inst->statInst.minPacketDelayMs = diffTS;
}
if (diffTS > MCU_inst->statInst.maxPacketDelayMs)
{
/* new all-time high */
MCU_inst->statInst.maxPacketDelayMs = diffTS;
}
/* Update avg waiting time:
* avgPacketDelayMs =
* (avgPacketCount * avgPacketDelayMs + diffTS)/(avgPacketCount+1)
* with proper rounding.
*/
uw32_tmp
= WEBRTC_SPL_UMUL((WebRtc_UWord32) MCU_inst->statInst.avgPacketCount,
(WebRtc_UWord32) MCU_inst->statInst.avgPacketDelayMs);
uw32_tmp
= WEBRTC_SPL_ADD_SAT_W32(uw32_tmp,
(diffTS + (MCU_inst->statInst.avgPacketCount>>1)));
uw32_tmp = WebRtcSpl_DivU32U16(uw32_tmp,
(WebRtc_UWord16) (MCU_inst->statInst.avgPacketCount + 1));
MCU_inst->statInst.avgPacketDelayMs
= (WebRtc_UWord16) WEBRTC_SPL_MIN(uw32_tmp, (WebRtc_UWord32) 65535);
/* increase counter, but not to more than 65534 */
if (MCU_inst->statInst.avgPacketCount < (0xFFFF - 1))
{
MCU_inst->statInst.avgPacketCount++;
}
}
}
/* Parse the payload and insert it into the buffer */
i_ok = WebRtcNetEQ_SplitAndInsertPayload(&RTPpacket[i_k],
&MCU_inst->PacketBuffer_inst, &MCU_inst->PayloadSplit_inst, &flushed);
if (i_ok < 0)
{
return i_ok;
}
if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF != 0)
{
/* first normal packet after CNG or DTMF */
MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = -1;
}
}
/* Reset DSP timestamp etc. if packet buffer flushed */
if (flushed)
{
MCU_inst->new_codec = 1;
}
}
/*
* Update Bandwidth Estimate
* Only send the main payload to BWE
*/
if ((curr_Codec = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst,
RTPpacket[0].payloadType)) >= 0)
{
codecPos = MCU_inst->codec_DB_inst.position[curr_Codec];
if (MCU_inst->codec_DB_inst.funcUpdBWEst[codecPos] != NULL) /* codec has BWE function */
{
if (RTPpacket[0].starts_byte1) /* check for shifted byte alignment */
{
/* re-align to 16-bit alignment */
for (i_k = 0; i_k < RTPpacket[0].payloadLen; i_k++)
{
WEBRTC_SPL_SET_BYTE(RTPpacket[0].payload,
WEBRTC_SPL_GET_BYTE(RTPpacket[0].payload, i_k+1),
i_k);
}
RTPpacket[0].starts_byte1 = 0;
}
MCU_inst->codec_DB_inst.funcUpdBWEst[codecPos](
MCU_inst->codec_DB_inst.codec_state[codecPos],
(G_CONST WebRtc_UWord16 *) RTPpacket[0].payload,
(WebRtc_Word32) RTPpacket[0].payloadLen, RTPpacket[0].seqNumber,
(WebRtc_UWord32) RTPpacket[0].timeStamp, (WebRtc_UWord32) uw32_timeRec);
}
}
if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF == 0)
{
/* Calculate the total speech length carried in each packet */
temp_bufsize = MCU_inst->PacketBuffer_inst.numPacketsInBuffer - temp_bufsize;
temp_bufsize *= MCU_inst->PacketBuffer_inst.packSizeSamples;
if ((temp_bufsize > 0) && (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF
== 0) && (temp_bufsize
!= MCU_inst->BufferStat_inst.Automode_inst.packetSpeechLenSamp))
{
/* Change the auto-mode parameters if packet length has changed */
WebRtcNetEQ_SetPacketSpeechLen(&(MCU_inst->BufferStat_inst.Automode_inst),
(WebRtc_Word16) temp_bufsize, MCU_inst->fs);
}
/* update statistics */
if ((WebRtc_Word32) (RTPpacket[0].timeStamp - MCU_inst->timeStamp) >= 0
&& !MCU_inst->new_codec)
{
/*
* Only update statistics if incoming packet is not older than last played out
* packet, and if new codec flag is not set.
*/
WebRtcNetEQ_UpdateIatStatistics(&MCU_inst->BufferStat_inst.Automode_inst,
MCU_inst->PacketBuffer_inst.maxInsertPositions, RTPpacket[0].seqNumber,
RTPpacket[0].timeStamp, MCU_inst->fs,
WebRtcNetEQ_DbIsMDCodec((enum WebRtcNetEQDecoder) MCU_inst->current_Codec),
(MCU_inst->NetEqPlayoutMode == kPlayoutStreaming));
}
}
else if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF == -1)
{
/*
* This is first "normal" packet after CNG or DTMF.
* Reset packet time counter and measure time until next packet,
* but don't update statistics.
*/
MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 0;
MCU_inst->BufferStat_inst.Automode_inst.packetIatCountSamp = 0;
}
return 0;
}
int WebRtcNetEQ_GetTimestampScaling(MCUInst_t *MCU_inst, int rtpPayloadType)
{
enum WebRtcNetEQDecoder codec;
int codecNumber;
codecNumber = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst, rtpPayloadType);
if (codecNumber < 0)
{
/* error */
return codecNumber;
}
/* cast to enumerator */
codec = (enum WebRtcNetEQDecoder) codecNumber;
/*
* The factor obtained below is the number with which the RTP timestamp must be
* multiplied to get the true sample count.
*/
switch (codec)
{
case kDecoderG722:
{
/* Use timestamp scaling with factor 2 (two output samples per RTP timestamp) */
MCU_inst->scalingFactor = kTSscalingTwo;
break;
}
case kDecoderAVT:
case kDecoderCNG:
{
/* do not change the timestamp scaling settings */
break;
}
default:
{
/* do not use timestamp scaling */
MCU_inst->scalingFactor = kTSnoScaling;
break;
}
}
return 0;
}
WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampExternalToInternal(const MCUInst_t *MCU_inst,
WebRtc_UWord32 externalTS)
{
WebRtc_Word32 timestampDiff;
WebRtc_UWord32 internalTS;
/* difference between this and last incoming timestamp */
timestampDiff = externalTS - MCU_inst->externalTS;
switch (MCU_inst->scalingFactor)
{
case kTSscalingTwo:
{
/* multiply with 2 */
timestampDiff = WEBRTC_SPL_LSHIFT_W32(timestampDiff, 1);
break;
}
case kTSscalingTwoThirds:
{
/* multiply with 2/3 */
timestampDiff = WEBRTC_SPL_LSHIFT_W32(timestampDiff, 1);
timestampDiff = WebRtcSpl_DivW32W16(timestampDiff, 3);
break;
}
case kTSscalingFourThirds:
{
/* multiply with 4/3 */
timestampDiff = WEBRTC_SPL_LSHIFT_W32(timestampDiff, 2);
timestampDiff = WebRtcSpl_DivW32W16(timestampDiff, 3);
break;
}
default:
{
/* no scaling */
}
}
/* add the scaled difference to last scaled timestamp and save ... */
internalTS = MCU_inst->internalTS + timestampDiff;
return internalTS;
}
WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampInternalToExternal(const MCUInst_t *MCU_inst,
WebRtc_UWord32 internalTS)
{
WebRtc_Word32 timestampDiff;
WebRtc_UWord32 externalTS;
/* difference between this and last incoming timestamp */
timestampDiff = (WebRtc_Word32) internalTS - MCU_inst->internalTS;
switch (MCU_inst->scalingFactor)
{
case kTSscalingTwo:
{
/* divide by 2 */
timestampDiff = WEBRTC_SPL_RSHIFT_W32(timestampDiff, 1);
break;
}
case kTSscalingTwoThirds:
{
/* multiply with 3/2 */
timestampDiff = WEBRTC_SPL_MUL_32_16(timestampDiff, 3);
timestampDiff = WEBRTC_SPL_RSHIFT_W32(timestampDiff, 1);
break;
}
case kTSscalingFourThirds:
{
/* multiply with 3/4 */
timestampDiff = WEBRTC_SPL_MUL_32_16(timestampDiff, 3);
timestampDiff = WEBRTC_SPL_RSHIFT_W32(timestampDiff, 2);
break;
}
default:
{
/* no scaling */
}
}
/* add the scaled difference to last scaled timestamp and save ... */
externalTS = MCU_inst->externalTS + timestampDiff;
return externalTS;
}

File diff suppressed because it is too large Load Diff

View File

@ -1,134 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Implementation of RTCP statistics reporting.
*/
#include "rtcp.h"
#include <string.h>
#include "signal_processing_library.h"
int WebRtcNetEQ_RTCPInit(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo)
{
/*
* Initialize everything to zero and then set the start values for the RTP packet stream.
*/
WebRtcSpl_MemSetW16((WebRtc_Word16*) RTCP_inst, 0,
sizeof(WebRtcNetEQ_RTCP_t) / sizeof(WebRtc_Word16));
RTCP_inst->base_seq = uw16_seqNo;
RTCP_inst->max_seq = uw16_seqNo;
return 0;
}
int WebRtcNetEQ_RTCPUpdate(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo,
WebRtc_UWord32 uw32_timeStamp, WebRtc_UWord32 uw32_recTime)
{
WebRtc_Word16 w16_SeqDiff;
WebRtc_Word32 w32_TimeDiff;
WebRtc_Word32 w32_JitterDiff;
/*
* Update number of received packets, and largest packet number received.
*/
RTCP_inst->received++;
w16_SeqDiff = uw16_seqNo - RTCP_inst->max_seq;
if (w16_SeqDiff >= 0)
{
if (uw16_seqNo < RTCP_inst->max_seq)
{
/* Wrap around detected */
RTCP_inst->cycles++;
}
RTCP_inst->max_seq = uw16_seqNo;
}
/* Calculate Jitter, and update previous timestamps */
/* Note that the value in RTCP_inst->jitter is in Q4. */
if (RTCP_inst->received > 1)
{
w32_TimeDiff = (uw32_recTime - (uw32_timeStamp - RTCP_inst->transit));
w32_TimeDiff = WEBRTC_SPL_ABS_W32(w32_TimeDiff);
w32_JitterDiff = WEBRTC_SPL_LSHIFT_W16(w32_TimeDiff, 4) - RTCP_inst->jitter;
RTCP_inst->jitter = RTCP_inst->jitter + WEBRTC_SPL_RSHIFT_W32((w32_JitterDiff + 8), 4);
}
RTCP_inst->transit = (uw32_timeStamp - uw32_recTime);
return 0;
}
int WebRtcNetEQ_RTCPGetStats(WebRtcNetEQ_RTCP_t *RTCP_inst,
WebRtc_UWord16 *puw16_fraction_lost,
WebRtc_UWord32 *puw32_cum_lost, WebRtc_UWord32 *puw32_ext_max,
WebRtc_UWord32 *puw32_jitter, WebRtc_Word16 doNotReset)
{
WebRtc_UWord32 uw32_exp_nr, uw32_exp_interval, uw32_rec_interval;
WebRtc_Word32 w32_lost;
/* Extended highest sequence number received */
*puw32_ext_max
= (WebRtc_UWord32) WEBRTC_SPL_LSHIFT_W32((WebRtc_UWord32)RTCP_inst->cycles, 16)
+ RTCP_inst->max_seq;
/*
* Calculate expected number of packets and compare it to the number of packets that
* were actually received => the cumulative number of packets lost can be extracted.
*/
uw32_exp_nr = *puw32_ext_max - RTCP_inst->base_seq + 1;
if (RTCP_inst->received == 0)
{
/* no packets received, assume none lost */
*puw32_cum_lost = 0;
}
else if (uw32_exp_nr > RTCP_inst->received)
{
*puw32_cum_lost = uw32_exp_nr - RTCP_inst->received;
if (*puw32_cum_lost > (WebRtc_UWord32) 0xFFFFFF)
{
*puw32_cum_lost = 0xFFFFFF;
}
}
else
{
*puw32_cum_lost = 0;
}
/* Fraction lost (Since last report) */
uw32_exp_interval = uw32_exp_nr - RTCP_inst->exp_prior;
if (!doNotReset)
{
RTCP_inst->exp_prior = uw32_exp_nr;
}
uw32_rec_interval = RTCP_inst->received - RTCP_inst->rec_prior;
if (!doNotReset)
{
RTCP_inst->rec_prior = RTCP_inst->received;
}
w32_lost = (WebRtc_Word32) (uw32_exp_interval - uw32_rec_interval);
if (uw32_exp_interval == 0 || w32_lost <= 0 || RTCP_inst->received == 0)
{
*puw16_fraction_lost = 0;
}
else
{
*puw16_fraction_lost = (WebRtc_UWord16) (WEBRTC_SPL_LSHIFT_W32(w32_lost, 8)
/ uw32_exp_interval);
}
if (*puw16_fraction_lost > 0xFF)
{
*puw16_fraction_lost = 0xFF;
}
/* Inter-arrival jitter */
*puw32_jitter = (RTCP_inst->jitter) >> 4; /* scaling from Q4 */
return 0;
}

View File

@ -1,102 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* RTCP statistics reporting.
*/
#ifndef RTCP_H
#define RTCP_H
#include "typedefs.h"
typedef struct
{
WebRtc_UWord16 cycles; /* The number of wrap-arounds for the sequence number */
WebRtc_UWord16 max_seq; /* The maximum sequence number received
(starts from 0 again after wrap around) */
WebRtc_UWord16 base_seq; /* The sequence number of the first packet that arrived */
WebRtc_UWord32 received; /* The number of packets that has been received */
WebRtc_UWord32 rec_prior; /* Number of packets received when last report was generated */
WebRtc_UWord32 exp_prior; /* Number of packets that should have been received if no
packets were lost. Stored value from last report. */
WebRtc_UWord32 jitter; /* Jitter statistics at this instance (calculated according to RFC) */
WebRtc_Word32 transit; /* Clock difference for previous packet (RTPtimestamp - LOCALtime_rec) */
} WebRtcNetEQ_RTCP_t;
/****************************************************************************
* WebRtcNetEQ_RTCPInit(...)
*
* This function calculates the parameters that are needed for the RTCP
* report.
*
* Input:
* - RTCP_inst : RTCP instance, that contains information about the
* packets that have been received etc.
* - seqNo : Packet number of the first received frame.
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_RTCPInit(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo);
/****************************************************************************
* WebRtcNetEQ_RTCPUpdate(...)
*
* This function calculates the parameters that are needed for the RTCP
* report.
*
* Input:
* - RTCP_inst : RTCP instance, that contains information about the
* packets that have been received etc.
* - seqNo : Packet number of the first received frame.
* - timeStamp : Time stamp from the RTP header.
* - recTime : Time (in RTP timestamps) when this packet was received.
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_RTCPUpdate(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo,
WebRtc_UWord32 uw32_timeStamp, WebRtc_UWord32 uw32_recTime);
/****************************************************************************
* WebRtcNetEQ_RTCPGetStats(...)
*
* This function calculates the parameters that are needed for the RTCP
* report.
*
* Input:
* - RTCP_inst : RTCP instance, that contains information about the
* packets that have been received etc.
* - doNotReset : If non-zero, the fraction lost statistics will not
* be reset.
*
* Output:
* - RTCP_inst : Updated RTCP information (some statistics are
* reset when generating this report)
* - fraction_lost : Number of lost RTP packets divided by the number of
* expected packets, since the last RTCP Report.
* - cum_lost : Cumulative number of lost packets during this
* session.
* - ext_max : Extended highest sequence number received.
* - jitter : Inter-arrival jitter.
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_RTCPGetStats(WebRtcNetEQ_RTCP_t *RTCP_inst,
WebRtc_UWord16 *puw16_fraction_lost,
WebRtc_UWord32 *puw32_cum_lost, WebRtc_UWord32 *puw32_ext_max,
WebRtc_UWord32 *puw32_jitter, WebRtc_Word16 doNotReset);
#endif

View File

@ -1,240 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* RTP related functions.
*/
#include "rtp.h"
#include "typedefs.h" /* to define endianness */
#include "neteq_error_codes.h"
int WebRtcNetEQ_RTPPayloadInfo(WebRtc_Word16* pw16_Datagram, int i_DatagramLen,
RTPPacket_t* RTPheader)
{
int i_P, i_X, i_CC, i_startPosition;
int i_IPver;
int i_extlength = -1; /* Default value is there is no extension */
int i_padlength = 0; /* Default value if there is no padding */
if (i_DatagramLen < 12)
{
return RTP_TOO_SHORT_PACKET;
}
#ifdef WEBRTC_BIG_ENDIAN
i_IPver = (((WebRtc_UWord16) (pw16_Datagram[0] & 0xC000)) >> 14); /* Extract the version */
i_P = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x2000)) >> 13); /* Extract the P bit */
i_X = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x1000)) >> 12); /* Extract the X bit */
i_CC = ((WebRtc_UWord16) (pw16_Datagram[0] >> 8) & 0xF); /* Get the CC number */
RTPheader->payloadType = pw16_Datagram[0] & 0x7F; /* Get the coder type */
RTPheader->seqNumber = pw16_Datagram[1]; /* Get the sequence number */
RTPheader->timeStamp = ((((WebRtc_UWord32) ((WebRtc_UWord16) pw16_Datagram[2])) << 16)
| (WebRtc_UWord16) (pw16_Datagram[3])); /* Get timestamp */
RTPheader->ssrc = (((WebRtc_UWord32) pw16_Datagram[4]) << 16)
+ (((WebRtc_UWord32) pw16_Datagram[5])); /* Get the SSRC */
if (i_X == 1)
{
/* Extension header exists. Find out how many WebRtc_Word32 it consists of. */
i_extlength = pw16_Datagram[7 + 2 * i_CC];
}
if (i_P == 1)
{
/* Padding exists. Find out how many bytes the padding consists of. */
if (i_DatagramLen & 0x1)
{
/* odd number of bytes => last byte in higher byte */
i_padlength = (((WebRtc_UWord16) pw16_Datagram[i_DatagramLen >> 1]) >> 8);
}
else
{
/* even number of bytes => last byte in lower byte */
i_padlength = ((pw16_Datagram[(i_DatagramLen >> 1) - 1]) & 0xFF);
}
}
#else /* WEBRTC_LITTLE_ENDIAN */
i_IPver = (((WebRtc_UWord16) (pw16_Datagram[0] & 0xC0)) >> 6); /* Extract the IP version */
i_P = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x20)) >> 5); /* Extract the P bit */
i_X = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x10)) >> 4); /* Extract the X bit */
i_CC = (WebRtc_UWord16) (pw16_Datagram[0] & 0xF); /* Get the CC number */
RTPheader->payloadType = (pw16_Datagram[0] >> 8) & 0x7F; /* Get the coder type */
RTPheader->seqNumber = (((((WebRtc_UWord16) pw16_Datagram[1]) >> 8) & 0xFF)
| (((WebRtc_UWord16) (pw16_Datagram[1] & 0xFF)) << 8)); /* Get the packet number */
RTPheader->timeStamp = ((((WebRtc_UWord16) pw16_Datagram[2]) & 0xFF) << 24)
| ((((WebRtc_UWord16) pw16_Datagram[2]) & 0xFF00) << 8)
| ((((WebRtc_UWord16) pw16_Datagram[3]) >> 8) & 0xFF)
| ((((WebRtc_UWord16) pw16_Datagram[3]) & 0xFF) << 8); /* Get timestamp */
RTPheader->ssrc = ((((WebRtc_UWord16) pw16_Datagram[4]) & 0xFF) << 24)
| ((((WebRtc_UWord16) pw16_Datagram[4]) & 0xFF00) << 8)
| ((((WebRtc_UWord16) pw16_Datagram[5]) >> 8) & 0xFF)
| ((((WebRtc_UWord16) pw16_Datagram[5]) & 0xFF) << 8); /* Get the SSRC */
if (i_X == 1)
{
/* Extension header exists. Find out how many WebRtc_Word32 it consists of. */
i_extlength = (((((WebRtc_UWord16) pw16_Datagram[7 + 2 * i_CC]) >> 8) & 0xFF)
| (((WebRtc_UWord16) (pw16_Datagram[7 + 2 * i_CC] & 0xFF)) << 8));
}
if (i_P == 1)
{
/* Padding exists. Find out how many bytes the padding consists of. */
if (i_DatagramLen & 0x1)
{
/* odd number of bytes => last byte in higher byte */
i_padlength = (pw16_Datagram[i_DatagramLen >> 1] & 0xFF);
}
else
{
/* even number of bytes => last byte in lower byte */
i_padlength = (((WebRtc_UWord16) pw16_Datagram[(i_DatagramLen >> 1) - 1]) >> 8);
}
}
#endif
i_startPosition = 12 + 4 * (i_extlength + 1) + 4 * i_CC;
RTPheader->payload = &pw16_Datagram[i_startPosition >> 1];
RTPheader->payloadLen = i_DatagramLen - i_startPosition - i_padlength;
RTPheader->starts_byte1 = 0;
if ((i_IPver != 2) || (RTPheader->payloadLen <= 0) || (RTPheader->payloadLen >= 16000)
|| (i_startPosition < 12) || (i_startPosition > i_DatagramLen))
{
return RTP_CORRUPT_PACKET;
}
return 0;
}
#ifdef NETEQ_RED_CODEC
int WebRtcNetEQ_RedundancySplit(RTPPacket_t* RTPheader[], int i_MaximumPayloads,
int *i_No_Of_Payloads)
{
const WebRtc_Word16 *pw16_data = RTPheader[0]->payload; /* Pointer to the data */
WebRtc_UWord16 uw16_offsetTimeStamp = 65535, uw16_secondPayload = 65535;
int i_blockLength, i_k;
int i_discardedBlockLength = 0;
int singlePayload = 0;
#ifdef WEBRTC_BIG_ENDIAN
if ((pw16_data[0] & 0x8000) == 0)
{
/* Only one payload in this packet*/
singlePayload = 1;
/* set the blocklength to -4 to deduce the non-existent 4-byte RED header */
i_blockLength = -4;
RTPheader[0]->payloadType = ((((WebRtc_UWord16)pw16_data[0]) & 0x7F00) >> 8);
}
else
{
/* Discard all but the two last payloads. */
while (((pw16_data[2] & 0x8000) == 1)&&
(pw16_data<((RTPheader[0]->payload)+((RTPheader[0]->payloadLen+1)>>1))))
{
i_discardedBlockLength += (4+(((WebRtc_UWord16)pw16_data[1]) & 0x3FF));
pw16_data+=2;
}
if (pw16_data>=(RTPheader[0]->payload+((RTPheader[0]->payloadLen+1)>>1)))
{
return RED_SPLIT_ERROR2; /* Error, we are outside the packet */
}
singlePayload = 0; /* the packet contains more than one payload */
uw16_secondPayload = ((((WebRtc_UWord16)pw16_data[0]) & 0x7F00) >> 8);
RTPheader[0]->payloadType = ((((WebRtc_UWord16)pw16_data[2]) & 0x7F00) >> 8);
uw16_offsetTimeStamp = ((((WebRtc_UWord16)pw16_data[0]) & 0xFF) << 6) +
((((WebRtc_UWord16)pw16_data[1]) & 0xFC00) >> 10);
i_blockLength = (((WebRtc_UWord16)pw16_data[1]) & 0x3FF);
}
#else /* WEBRTC_LITTLE_ENDIAN */
if ((pw16_data[0] & 0x80) == 0)
{
/* Only one payload in this packet */
singlePayload = 1;
/* set the blocklength to -4 to deduce the non-existent 4-byte RED header */
i_blockLength = -4;
RTPheader[0]->payloadType = (((WebRtc_UWord16) pw16_data[0]) & 0x7F);
}
else
{
/* Discard all but the two last payloads. */
while (((pw16_data[2] & 0x80) == 1) && (pw16_data < ((RTPheader[0]->payload)
+ ((RTPheader[0]->payloadLen + 1) >> 1))))
{
i_discardedBlockLength += (4 + ((((WebRtc_UWord16) pw16_data[1]) & 0x3) << 8)
+ ((((WebRtc_UWord16) pw16_data[1]) & 0xFF00) >> 8));
pw16_data += 2;
}
if (pw16_data >= (RTPheader[0]->payload + ((RTPheader[0]->payloadLen + 1) >> 1)))
{
return RED_SPLIT_ERROR2; /* Error, we are outside the packet */;
}
singlePayload = 0; /* the packet contains more than one payload */
uw16_secondPayload = (((WebRtc_UWord16) pw16_data[0]) & 0x7F);
RTPheader[0]->payloadType = (((WebRtc_UWord16) pw16_data[2]) & 0x7F);
uw16_offsetTimeStamp = ((((WebRtc_UWord16) pw16_data[0]) & 0xFF00) >> 2)
+ ((((WebRtc_UWord16) pw16_data[1]) & 0xFC) >> 2);
i_blockLength = ((((WebRtc_UWord16) pw16_data[1]) & 0x3) << 8)
+ ((((WebRtc_UWord16) pw16_data[1]) & 0xFF00) >> 8);
}
#endif
if (i_MaximumPayloads < 2 || singlePayload == 1)
{
/* Reject the redundancy; or no redundant payload present. */
for (i_k = 1; i_k < i_MaximumPayloads; i_k++)
{
RTPheader[i_k]->payloadType = -1;
RTPheader[i_k]->payloadLen = 0;
}
/* update the pointer for the main data */
pw16_data = &pw16_data[(5 + i_blockLength) >> 1];
RTPheader[0]->starts_byte1 = (5 + i_blockLength) & 0x1;
RTPheader[0]->payloadLen = RTPheader[0]->payloadLen - (i_blockLength + 5)
- i_discardedBlockLength;
RTPheader[0]->payload = pw16_data;
*i_No_Of_Payloads = 1;
}
else
{
/* Redundancy accepted, put the redundancy in second RTPheader. */
RTPheader[1]->payloadType = uw16_secondPayload;
RTPheader[1]->payload = &pw16_data[5 >> 1];
RTPheader[1]->starts_byte1 = 5 & 0x1;
RTPheader[1]->seqNumber = RTPheader[0]->seqNumber;
RTPheader[1]->timeStamp = RTPheader[0]->timeStamp - uw16_offsetTimeStamp;
RTPheader[1]->ssrc = RTPheader[0]->ssrc;
RTPheader[1]->payloadLen = i_blockLength;
/* Modify first RTP packet, so that it contains the main data. */
RTPheader[0]->payload = &pw16_data[(5 + i_blockLength) >> 1];
RTPheader[0]->starts_byte1 = (5 + i_blockLength) & 0x1;
RTPheader[0]->payloadLen = RTPheader[0]->payloadLen - (i_blockLength + 5)
- i_discardedBlockLength;
/* Clear the following payloads. */
for (i_k = 2; i_k < i_MaximumPayloads; i_k++)
{
RTPheader[i_k]->payloadType = -1;
RTPheader[i_k]->payloadLen = 0;
}
*i_No_Of_Payloads = 2;
}
return 0;
}
#endif

View File

@ -1,78 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* RTP data struct and related functions.
*/
#ifndef RTP_H
#define RTP_H
#include "typedefs.h"
#include "codec_db.h"
typedef struct
{
WebRtc_UWord16 seqNumber;
WebRtc_UWord32 timeStamp;
WebRtc_UWord32 ssrc;
int payloadType;
const WebRtc_Word16 *payload;
WebRtc_Word16 payloadLen;
WebRtc_Word16 starts_byte1;
WebRtc_Word16 rcuPlCntr;
} RTPPacket_t;
/****************************************************************************
* WebRtcNetEQ_RTPPayloadInfo(...)
*
* Converts a datagram into an RTP header struct.
*
* Input:
* - Datagram : UDP datagram from the network
* - DatagramLen : Length in bytes of the datagram
*
* Output:
* - RTPheader : Structure with the datagram info
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_RTPPayloadInfo(WebRtc_Word16* pw16_Datagram, int i_DatagramLen,
RTPPacket_t* RTPheader);
/****************************************************************************
* WebRtcNetEQ_RedundancySplit(...)
*
* Splits a Redundancy RTP struct into two RTP structs. User has to check
* that it's really the redundancy payload. No such check is done inside this
* function.
*
* Input:
* - RTPheader : First header holds the whole RTP packet (with the redundancy payload)
* - MaximumPayloads:
* The maximum number of RTP payloads that should be
* extracted (1+maximum_no_of_Redundancies).
*
* Output:
* - RTPheader : First header holds the main RTP data, while 2..N
* holds the redundancy data.
* - No_Of
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNetEQ_RedundancySplit(RTPPacket_t* RTPheader[], int i_MaximumPayloads,
int *i_No_Of_Payloads);
#endif

View File

@ -1,78 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Function were the sample rate is set.
*/
#include "mcu.h"
#include "dtmf_buffer.h"
#include "neteq_error_codes.h"
int WebRtcNetEQ_McuSetFs(MCUInst_t *inst, WebRtc_UWord16 fs)
{
WebRtc_Word16 ok = 0;
switch (fs)
{
case 8000:
{
#ifdef NETEQ_ATEVENT_DECODE
ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 8000, 560);
#endif
inst->timestampsPerCall = inst->millisecondsPerCall * 8;
break;
}
#ifdef NETEQ_WIDEBAND
case 16000:
{
#ifdef NETEQ_ATEVENT_DECODE
ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 16000, 1120);
#endif
inst->timestampsPerCall = inst->millisecondsPerCall * 16;
break;
}
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
case 32000:
{
#ifdef NETEQ_ATEVENT_DECODE
ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 32000, 2240);
#endif
inst->timestampsPerCall = inst->millisecondsPerCall * 32;
break;
}
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
case 48000:
{
#ifdef NETEQ_ATEVENT_DECODE
ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 48000, 3360);
#endif
inst->timestampsPerCall = inst->millisecondsPerCall * 48;
break;
}
#endif
default:
{
/* Not supported yet */
return CODEC_DB_UNSUPPORTED_FS;
}
} /* end switch */
inst->fs = fs;
return ok;
}

View File

@ -1,838 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Signal the MCU that data is available and ask for a RecOut decision.
*/
#include "mcu.h"
#include <string.h>
#include "signal_processing_library.h"
#include "automode.h"
#include "dtmf_buffer.h"
#include "mcu_dsp_common.h"
#include "neteq_error_codes.h"
#ifdef NETEQ_DELAY_LOGGING
#include "delay_logging.h"
#include <stdio.h>
extern FILE *delay_fid2; /* file pointer to delay log file */
#endif
/*
* Signals the MCU that DSP status data is available.
*/
int WebRtcNetEQ_SignalMcu(MCUInst_t *inst)
{
int i_bufferpos, i_res;
WebRtc_UWord16 uw16_instr;
DSP2MCU_info_t dspInfo;
WebRtc_Word16 *blockPtr, blockLen;
WebRtc_UWord32 uw32_availableTS;
RTPPacket_t temp_pkt;
WebRtc_Word32 w32_bufsize, w32_tmp;
WebRtc_Word16 payloadType = -1;
WebRtc_Word16 wantedNoOfTimeStamps;
WebRtc_Word32 totalTS;
WebRtc_Word16 oldPT, latePacketExist = 0;
WebRtc_UWord32 oldTS, prevTS, uw32_tmp;
WebRtc_UWord16 prevSeqNo;
WebRtc_Word16 nextSeqNoAvail;
WebRtc_Word16 fs_mult, w16_tmp;
WebRtc_Word16 lastModeBGNonly = 0;
#ifdef NETEQ_DELAY_LOGGING
int temp_var;
#endif
int playDtmf = 0;
fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
/* Increment counter since last statistics report */
inst->lastReportTS += inst->timestampsPerCall;
/* Read info from DSP so we now current status */
WEBRTC_SPL_MEMCPY_W8(&dspInfo,inst->pw16_readAddress,sizeof(DSP2MCU_info_t));
/* Set blockPtr to first payload block */
blockPtr = &inst->pw16_writeAddress[3];
/* Clear instruction word and number of lost samples (2*WebRtc_Word16) */
inst->pw16_writeAddress[0] = 0;
inst->pw16_writeAddress[1] = 0;
inst->pw16_writeAddress[2] = 0;
if ((dspInfo.lastMode & MODE_AWAITING_CODEC_PTR) != 0)
{
/*
* Make sure state is adjusted so that a codec update is
* performed when first packet arrives.
*/
if (inst->new_codec != 1)
{
inst->current_Codec = -1;
}
dspInfo.lastMode = (dspInfo.lastMode ^ MODE_AWAITING_CODEC_PTR);
}
#ifdef NETEQ_STEREO
if ((dspInfo.lastMode & MODE_MASTER_DTMF_SIGNAL) != 0)
{
playDtmf = 1; /* force DTMF decision */
dspInfo.lastMode = (dspInfo.lastMode ^ MODE_MASTER_DTMF_SIGNAL);
}
if ((dspInfo.lastMode & MODE_USING_STEREO) != 0)
{
if (inst->usingStereo == 0)
{
/* stereo mode changed; reset automode instance to re-synchronize statistics */
WebRtcNetEQ_ResetAutomode(&(inst->BufferStat_inst.Automode_inst),
inst->PacketBuffer_inst.maxInsertPositions);
}
inst->usingStereo = 1;
dspInfo.lastMode = (dspInfo.lastMode ^ MODE_USING_STEREO);
}
else
{
inst->usingStereo = 0;
}
#endif
/* detect if BGN_ONLY flag is set in lastMode */
if ((dspInfo.lastMode & MODE_BGN_ONLY) != 0)
{
lastModeBGNonly = 1; /* remember flag */
dspInfo.lastMode ^= MODE_BGN_ONLY; /* clear the flag */
}
if ((dspInfo.lastMode == MODE_RFC3389CNG) || (dspInfo.lastMode == MODE_CODEC_INTERNAL_CNG)
|| (dspInfo.lastMode == MODE_EXPAND))
{
/*
* If last mode was CNG (or Expand, since this could be covering up for a lost CNG
* packet), increase the CNGplayedTS counter.
*/
inst->BufferStat_inst.uw32_CNGplayedTS += inst->timestampsPerCall;
if (dspInfo.lastMode == MODE_RFC3389CNG)
{
/* remember that RFC3389CNG is on (needed if CNG is interrupted by DTMF) */
inst->BufferStat_inst.w16_cngOn = CNG_RFC3389_ON;
}
else if (dspInfo.lastMode == MODE_CODEC_INTERNAL_CNG)
{
/* remember that internal CNG is on (needed if CNG is interrupted by DTMF) */
inst->BufferStat_inst.w16_cngOn = CNG_INTERNAL_ON;
}
}
/* Update packet size from previously decoded packet */
if (dspInfo.frameLen > 0)
{
inst->PacketBuffer_inst.packSizeSamples = dspInfo.frameLen;
}
/* Look for late packet (unless codec has changed) */
if (inst->new_codec != 1)
{
if (WebRtcNetEQ_DbIsMDCodec((enum WebRtcNetEQDecoder) inst->current_Codec))
{
WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
inst->timeStamp, &uw32_availableTS, &i_bufferpos, 1, &payloadType);
if ((inst->new_codec != 1) && (inst->timeStamp == uw32_availableTS)
&& (inst->timeStamp < dspInfo.playedOutTS) && (i_bufferpos != -1)
&& (WebRtcNetEQ_DbGetPayload(&(inst->codec_DB_inst),
(enum WebRtcNetEQDecoder) inst->current_Codec) == payloadType))
{
temp_pkt.payload = blockPtr + 1;
i_res = WebRtcNetEQ_PacketBufferExtract(&inst->PacketBuffer_inst, &temp_pkt,
i_bufferpos);
if (i_res < 0)
{ /* error returned */
return i_res;
}
*blockPtr = temp_pkt.payloadLen;
/* set the flag if this is a redundant payload */
if (temp_pkt.rcuPlCntr > 0)
{
*blockPtr = (*blockPtr) | (DSP_CODEC_RED_FLAG);
}
blockPtr += ((temp_pkt.payloadLen + 1) >> 1) + 1;
/*
* Close the data with a zero size block, in case we will not write any
* more data.
*/
*blockPtr = 0;
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0xf0ff)
| DSP_CODEC_ADD_LATE_PKT;
latePacketExist = 1;
}
}
}
i_res = WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
dspInfo.playedOutTS, &uw32_availableTS, &i_bufferpos, (inst->new_codec == 0),
&payloadType);
if (i_res < 0)
{ /* error returned */
return i_res;
}
if (inst->BufferStat_inst.w16_cngOn == CNG_RFC3389_ON)
{
/*
* Because of timestamp peculiarities, we have to "manually" disallow using a CNG
* packet with the same timestamp as the one that was last played. This can happen
* when using redundancy and will cause the timing to shift.
*/
while (i_bufferpos != -1 && WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst,
payloadType) && dspInfo.playedOutTS >= uw32_availableTS)
{
/* Don't use this packet, discard it */
inst->PacketBuffer_inst.payloadType[i_bufferpos] = -1;
inst->PacketBuffer_inst.payloadLengthBytes[i_bufferpos] = 0;
inst->PacketBuffer_inst.numPacketsInBuffer--;
/* Check buffer again */
WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
dspInfo.playedOutTS, &uw32_availableTS, &i_bufferpos, (inst->new_codec == 0),
&payloadType);
}
}
/* Check packet buffer */
w32_bufsize = WebRtcNetEQ_PacketBufferGetSize(&inst->PacketBuffer_inst);
if (dspInfo.lastMode == MODE_SUCCESS_ACCELERATE || dspInfo.lastMode
== MODE_LOWEN_ACCELERATE || dspInfo.lastMode == MODE_SUCCESS_PREEMPTIVE
|| dspInfo.lastMode == MODE_LOWEN_PREEMPTIVE)
{
/* Subtract (dspInfo.samplesLeft + inst->timestampsPerCall) from sampleMemory */
inst->BufferStat_inst.Automode_inst.sampleMemory -= dspInfo.samplesLeft
+ inst->timestampsPerCall;
/* Update post-call statistics */
inst->statInst.jbChangeCount++;
}
/* calculate total current buffer size (in ms*8), including sync buffer */
w32_bufsize = WebRtcSpl_DivW32W16((w32_bufsize + dspInfo.samplesLeft), fs_mult);
if (((WebRtc_UWord32) w32_bufsize >> 3) < inst->statInst.jbMinSize)
{
/* new all-time low */
inst->statInst.jbMinSize = ((WebRtc_UWord32) w32_bufsize >> 3); /* shift to ms */
}
if (((WebRtc_UWord32) w32_bufsize >> 3) > inst->statInst.jbMaxSize)
{
/* new all-time high */
inst->statInst.jbMaxSize = ((WebRtc_UWord32) w32_bufsize >> 3); /* shift to ms */
}
/* Update avg bufsize:
* jbAvgSize = (jbAvgCount * jbAvgSize + w32_bufsize/8)/(jbAvgCount+1)
* with proper rounding
*/
{
WebRtc_Word32 avgTmp;
/* Simplify the above formula to:
* jbAvgSizeQ16 =
* jbAvgSizeQ16 + ( (w32_bufsize/8 << 16) - jbAvgSizeQ16 + d ) / (jbAvgCount+1)
* where d = jbAvgCount/2 for proper rounding.
*/
avgTmp = (((WebRtc_UWord32) w32_bufsize >> 3) << 16) - inst->statInst.jbAvgSizeQ16;
avgTmp = WEBRTC_SPL_DIV( avgTmp + (inst->statInst.jbAvgCount>>1),
inst->statInst.jbAvgCount + 1 );
inst->statInst.jbAvgSizeQ16 += avgTmp;
if (inst->statInst.jbAvgCount < (0xFFFF - 1))
{
inst->statInst.jbAvgCount++;
}
}
#ifdef NETEQ_ATEVENT_DECODE
/* DTMF data will affect the decision */
if (WebRtcNetEQ_DtmfDecode(&inst->DTMF_inst, blockPtr + 1, blockPtr + 2,
dspInfo.playedOutTS + inst->BufferStat_inst.uw32_CNGplayedTS) > 0)
{
playDtmf = 1;
/* Flag DTMF payload */
inst->pw16_writeAddress[0] = inst->pw16_writeAddress[0] | DSP_DTMF_PAYLOAD;
/* Block Length in bytes */
blockPtr[0] = 4;
/* Advance to next payload position */
blockPtr += 3;
}
#endif
/* Update statistics and make decision */
uw16_instr = WebRtcNetEQ_BufstatsDecision(&inst->BufferStat_inst,
inst->PacketBuffer_inst.packSizeSamples, w32_bufsize, dspInfo.playedOutTS,
uw32_availableTS, i_bufferpos == -1,
WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst, payloadType), dspInfo.lastMode,
inst->NetEqPlayoutMode, inst->timestampsPerCall, inst->NoOfExpandCalls, fs_mult,
lastModeBGNonly, playDtmf);
/* Check if time to reset loss counter */
if (inst->lastReportTS > WEBRTC_SPL_UMUL(inst->fs, MAX_LOSS_REPORT_PERIOD))
{
/* reset loss counter */
WebRtcNetEQ_ResetMcuInCallStats(inst);
}
/* Check sync buffer size */
if ((dspInfo.samplesLeft >= inst->timestampsPerCall) && (uw16_instr
!= BUFSTATS_DO_ACCELERATE) && (uw16_instr != BUFSTATS_DO_MERGE) && (uw16_instr
!= BUFSTATS_DO_PREEMPTIVE_EXPAND))
{
*blockPtr = 0;
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff) | DSP_INSTR_NORMAL;
return 0;
}
if (uw16_instr == BUFSTATS_DO_EXPAND)
{
inst->NoOfExpandCalls++;
}
else
{
/* update post-call statistics */
WebRtc_UWord32
expandTime =
WEBRTC_SPL_UDIV(WEBRTC_SPL_UMUL_32_16(
WEBRTC_SPL_UMUL_32_16((WebRtc_UWord32) inst->NoOfExpandCalls,
(WebRtc_UWord16) 1000),
inst->timestampsPerCall), inst->fs); /* expand time in ms */
if (expandTime > 2000)
{
inst->statInst.countExpandMoreThan2000ms++;
}
else if (expandTime > 500)
{
inst->statInst.countExpandMoreThan500ms++;
}
else if (expandTime > 250)
{
inst->statInst.countExpandMoreThan250ms++;
}
else if (expandTime > 120)
{
inst->statInst.countExpandMoreThan120ms++;
}
if (expandTime > inst->statInst.longestExpandDurationMs)
{
inst->statInst.longestExpandDurationMs = expandTime;
}
/* reset counter */
inst->NoOfExpandCalls = 0;
}
/* New codec or big change in packet number? */
if (((inst->new_codec) || (uw16_instr == BUFSTAT_REINIT)) && (uw16_instr
!= BUFSTATS_DO_EXPAND))
{
CodecFuncInst_t cinst;
/* Clear other instructions */
blockPtr = &inst->pw16_writeAddress[3];
/* Clear instruction word */
inst->pw16_writeAddress[0] = 0;
inst->timeStamp = uw32_availableTS;
dspInfo.playedOutTS = uw32_availableTS;
if (inst->current_Codec != -1)
{
i_res = WebRtcNetEQ_DbGetPtrs(&inst->codec_DB_inst,
(enum WebRtcNetEQDecoder) inst->current_Codec, &cinst);
if (i_res < 0)
{ /* error returned */
return i_res;
}
}
else
{
/* The main codec has not been initialized yet (first packets are DTMF or CNG). */
if (WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst, payloadType))
{
/* The currently extracted packet is CNG; get CNG fs */
WebRtc_UWord16 tempFs;
tempFs = WebRtcNetEQ_DbGetSampleRate(&inst->codec_DB_inst, payloadType);
if (tempFs > 0)
{
inst->fs = tempFs;
}
}
WebRtcSpl_MemSetW16((WebRtc_Word16*) &cinst, 0,
sizeof(CodecFuncInst_t) / sizeof(WebRtc_Word16));
cinst.codec_fs = inst->fs;
}
cinst.timeStamp = inst->timeStamp;
blockLen = (sizeof(CodecFuncInst_t)) >> (sizeof(WebRtc_Word16) - 1); /* in Word16 */
*blockPtr = blockLen * 2;
blockPtr++;
WEBRTC_SPL_MEMCPY_W8(blockPtr,&cinst,sizeof(CodecFuncInst_t));
blockPtr += blockLen;
inst->new_codec = 0;
/* Reinitialize the MCU fs */
i_res = WebRtcNetEQ_McuSetFs(inst, cinst.codec_fs);
if (i_res < 0)
{ /* error returned */
return i_res;
}
/* Set the packet size by guessing */
inst->PacketBuffer_inst.packSizeSamples = inst->timestampsPerCall * 3;
WebRtcNetEQ_ResetAutomode(&(inst->BufferStat_inst.Automode_inst),
inst->PacketBuffer_inst.maxInsertPositions);
#ifdef NETEQ_CNG_CODEC
/* Also insert CNG state as this might be needed by DSP */
i_res = WebRtcNetEQ_DbGetPtrs(&inst->codec_DB_inst, kDecoderCNG, &cinst);
if ((i_res < 0) && (i_res != CODEC_DB_NOT_EXIST1))
{
/* other error returned */
/* (CODEC_DB_NOT_EXIST1 simply indicates that CNG is not used */
return i_res;
}
else
{
/* CNG exists */
blockLen = (sizeof(cinst.codec_state)) >> (sizeof(WebRtc_Word16) - 1);
*blockPtr = blockLen * 2;
blockPtr++;
WEBRTC_SPL_MEMCPY_W8(blockPtr,&cinst.codec_state,sizeof(cinst.codec_state));
blockPtr += blockLen;
}
#endif
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0xf0ff)
| DSP_CODEC_NEW_CODEC;
if (uw16_instr == BUFSTATS_DO_RFC3389CNG_NOPACKET)
{
/*
* Change decision to CNG packet, since we do have a CNG packet, but it was
* considered too early to use. Now, use it anyway.
*/
uw16_instr = BUFSTATS_DO_RFC3389CNG_PACKET;
}
else if (uw16_instr != BUFSTATS_DO_RFC3389CNG_PACKET)
{
uw16_instr = BUFSTATS_DO_NORMAL;
}
/* reset loss counter */
WebRtcNetEQ_ResetMcuInCallStats(inst);
}
/* Should we just reset the decoder? */
if (uw16_instr == BUFSTAT_REINIT_DECODER)
{
/* Change decision to normal and flag decoder reset */
uw16_instr = BUFSTATS_DO_NORMAL;
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0xf0ff) | DSP_CODEC_RESET;
}
/* Expand requires no new packet */
if (uw16_instr == BUFSTATS_DO_EXPAND)
{
inst->timeStamp = dspInfo.playedOutTS;
/* Have we got one descriptor left? */
if (WebRtcNetEQ_DbIsMDCodec((enum WebRtcNetEQDecoder) inst->current_Codec)
&& (dspInfo.MD || latePacketExist))
{
if (dspInfo.lastMode != MODE_ONE_DESCRIPTOR)
{
/* this is the first "consecutive" one-descriptor decoding; reset counter */
inst->one_desc = 0;
}
if (inst->one_desc < MAX_ONE_DESC)
{
/* use that one descriptor */
inst->one_desc++; /* increase counter */
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_NORMAL_ONE_DESC;
/* decrease counter since we did no Expand */
inst->NoOfExpandCalls = WEBRTC_SPL_MAX(inst->NoOfExpandCalls - 1, 0);
return 0;
}
else
{
/* too many consecutive one-descriptor decodings; do expand instead */
inst->one_desc = 0; /* reset counter */
}
}
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff) | DSP_INSTR_EXPAND;
return 0;
}
/* Merge is not needed if we still have a descriptor */
if ((uw16_instr == BUFSTATS_DO_MERGE) && (dspInfo.MD != 0))
{
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_NORMAL_ONE_DESC;
*blockPtr = 0;
return 0;
}
/* Do CNG without trying to extract any packets from buffer */
if (uw16_instr == BUFSTATS_DO_RFC3389CNG_NOPACKET)
{
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_DO_RFC3389CNG;
*blockPtr = 0;
return 0;
}
/* Do built-in CNG without extracting any new packets from buffer */
if (uw16_instr == BUFSTATS_DO_INTERNAL_CNG_NOPACKET)
{
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_DO_CODEC_INTERNAL_CNG;
*blockPtr = 0;
return 0;
}
/* Do DTMF without extracting any new packets from buffer */
if (uw16_instr == BUFSTATS_DO_DTMF_ONLY)
{
WebRtc_UWord32 timeStampJump = 0;
/* Update timestamp */
if ((inst->BufferStat_inst.uw32_CNGplayedTS > 0) && (dspInfo.lastMode != MODE_DTMF))
{
/* Jump in timestamps if needed */
timeStampJump = inst->BufferStat_inst.uw32_CNGplayedTS;
inst->pw16_writeAddress[1] = (WebRtc_UWord16) (timeStampJump >> 16);
inst->pw16_writeAddress[2] = (WebRtc_UWord16) (timeStampJump & 0xFFFF);
}
inst->timeStamp = dspInfo.playedOutTS + timeStampJump;
/* update post-call statistics (since we will reset the CNG counter) */
inst->statInst.generatedSilentMs
+= WEBRTC_SPL_UDIV(
WEBRTC_SPL_UMUL_32_16(inst->BufferStat_inst.uw32_CNGplayedTS, (WebRtc_UWord16) 1000),
inst->fs);
inst->BufferStat_inst.uw32_CNGplayedTS = 0;
inst->NoOfExpandCalls = 0;
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_DTMF_GENERATE;
*blockPtr = 0;
return 0;
}
if (uw16_instr == BUFSTATS_DO_ACCELERATE)
{
/* In order to do a Accelerate we need at least 30 ms of data */
if (dspInfo.samplesLeft >= (3 * 80 * fs_mult))
{
/* Already have enough data, so we do not need to extract any more */
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_ACCELERATE;
*blockPtr = 0;
inst->BufferStat_inst.Automode_inst.sampleMemory
= (WebRtc_Word32) dspInfo.samplesLeft;
inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
return 0;
}
else if ((dspInfo.samplesLeft >= (1 * 80 * fs_mult))
&& (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
{
/* Avoid decoding more data as it might overflow playout buffer */
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_NORMAL;
*blockPtr = 0;
return 0;
}
else if ((dspInfo.samplesLeft < (1 * 80 * fs_mult))
&& (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
{
/* For >= 30ms allow Accelerate with a decoding to avoid overflow in playout buffer */
wantedNoOfTimeStamps = inst->timestampsPerCall;
}
else if (dspInfo.samplesLeft >= (2 * 80 * fs_mult))
{
/* We need to decode another 10 ms in order to do an Accelerate */
wantedNoOfTimeStamps = inst->timestampsPerCall;
}
else
{
/*
* Build up decoded data by decoding at least 20 ms of data.
* Do not perform Accelerate yet, but wait until we only need to do one decoding.
*/
wantedNoOfTimeStamps = 2 * inst->timestampsPerCall;
uw16_instr = BUFSTATS_DO_NORMAL;
}
}
else if (uw16_instr == BUFSTATS_DO_PREEMPTIVE_EXPAND)
{
/* In order to do a Preemptive Expand we need at least 30 ms of data */
if (dspInfo.samplesLeft >= (3 * 80 * fs_mult))
{
/* Already have enough data, so we do not need to extract any more */
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_PREEMPTIVE_EXPAND;
*blockPtr = 0;
inst->BufferStat_inst.Automode_inst.sampleMemory
= (WebRtc_Word32) dspInfo.samplesLeft;
inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
return 0;
}
else if ((dspInfo.samplesLeft >= (1 * 80 * fs_mult))
&& (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
{
/*
* Avoid decoding more data as it might overflow playout buffer;
* still try Preemptive Expand though.
*/
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_PREEMPTIVE_EXPAND;
*blockPtr = 0;
inst->BufferStat_inst.Automode_inst.sampleMemory
= (WebRtc_Word32) dspInfo.samplesLeft;
inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
return 0;
}
else if ((dspInfo.samplesLeft < (1 * 80 * fs_mult))
&& (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
{
/*
* For >= 30ms allow Preemptive Expand with a decoding to avoid overflow in
* playout buffer
*/
wantedNoOfTimeStamps = inst->timestampsPerCall;
}
else if (dspInfo.samplesLeft >= (2 * 80 * fs_mult))
{
/* We need to decode another 10 ms in order to do an Preemptive Expand */
wantedNoOfTimeStamps = inst->timestampsPerCall;
}
else
{
/*
* Build up decoded data by decoding at least 20 ms of data,
* Still try to perform Preemptive Expand.
*/
wantedNoOfTimeStamps = 2 * inst->timestampsPerCall;
}
}
else
{
wantedNoOfTimeStamps = inst->timestampsPerCall;
}
/* Otherwise get data from buffer, try to get at least 10ms */
totalTS = 0;
oldTS = uw32_availableTS;
if ((i_bufferpos > -1) && (uw16_instr != BUFSTATS_DO_ALTERNATIVE_PLC) && (uw16_instr
!= BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS) && (uw16_instr != BUFSTATS_DO_AUDIO_REPETITION)
&& (uw16_instr != BUFSTATS_DO_AUDIO_REPETITION_INC_TS))
{
uw32_tmp = (uw32_availableTS - dspInfo.playedOutTS);
inst->pw16_writeAddress[1] = (WebRtc_UWord16) (uw32_tmp >> 16);
inst->pw16_writeAddress[2] = (WebRtc_UWord16) (uw32_tmp & 0xFFFF);
if (inst->BufferStat_inst.w16_cngOn == CNG_OFF)
{
/*
* Adjustment of TS only corresponds to an actual packet loss
* if comfort noise is not played. If comfort noise was just played,
* this adjustment of TS is only done to get back in sync with the
* stream TS; no loss to report.
*/
inst->lostTS += uw32_tmp;
}
if (uw16_instr != BUFSTATS_DO_RFC3389CNG_PACKET)
{
/* We are about to decode and use a non-CNG packet => CNG period is ended */
inst->BufferStat_inst.w16_cngOn = CNG_OFF;
}
/* update post-call statistics */
inst->statInst.generatedSilentMs
+= WEBRTC_SPL_UDIV(
WEBRTC_SPL_UMUL_32_16(inst->BufferStat_inst.uw32_CNGplayedTS, (WebRtc_UWord16) 1000),
inst->fs);
/*
* Reset CNG timestamp as a new packet will be delivered.
* (Also if CNG packet, since playedOutTS is updated.)
*/
inst->BufferStat_inst.uw32_CNGplayedTS = 0;
prevSeqNo = inst->PacketBuffer_inst.seqNumber[i_bufferpos];
prevTS = inst->PacketBuffer_inst.timeStamp[i_bufferpos];
oldPT = inst->PacketBuffer_inst.payloadType[i_bufferpos];
/* clear flag bits */
inst->pw16_writeAddress[0] = inst->pw16_writeAddress[0] & 0xFF3F;
do
{
inst->timeStamp = uw32_availableTS;
/* Write directly to shared memory */
temp_pkt.payload = blockPtr + 1;
i_res = WebRtcNetEQ_PacketBufferExtract(&inst->PacketBuffer_inst, &temp_pkt,
i_bufferpos);
if (i_res < 0)
{
/* error returned */
return i_res;
}
#ifdef NETEQ_DELAY_LOGGING
temp_var = NETEQ_DELAY_LOGGING_SIGNAL_DECODE;
fwrite(&temp_var,sizeof(int),1,delay_fid2);
fwrite(&temp_pkt.timeStamp,sizeof(WebRtc_UWord32),1,delay_fid2);
fwrite(&dspInfo.samplesLeft, sizeof(WebRtc_UWord16), 1, delay_fid2);
#endif
*blockPtr = temp_pkt.payloadLen;
/* set the flag if this is a redundant payload */
if (temp_pkt.rcuPlCntr > 0)
{
*blockPtr = (*blockPtr) | (DSP_CODEC_RED_FLAG);
}
blockPtr += ((temp_pkt.payloadLen + 1) >> 1) + 1;
if (i_bufferpos > -1)
{
/*
* Store number of TS extracted (last extracted is assumed to be of
* packSizeSamples).
*/
totalTS = uw32_availableTS - oldTS + inst->PacketBuffer_inst.packSizeSamples;
}
/* Check what next packet is available */
WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
inst->timeStamp, &uw32_availableTS, &i_bufferpos, 0, &payloadType);
nextSeqNoAvail = 0;
if ((i_bufferpos > -1) && (oldPT
== inst->PacketBuffer_inst.payloadType[i_bufferpos]))
{
w16_tmp = inst->PacketBuffer_inst.seqNumber[i_bufferpos] - prevSeqNo;
w32_tmp = inst->PacketBuffer_inst.timeStamp[i_bufferpos] - prevTS;
if ((w16_tmp == 1) || /* Next packet */
((w16_tmp == 0) && (w32_tmp == inst->PacketBuffer_inst.packSizeSamples)))
{ /* or packet split into frames */
nextSeqNoAvail = 1;
}
prevSeqNo = inst->PacketBuffer_inst.seqNumber[i_bufferpos];
}
}
while ((totalTS < wantedNoOfTimeStamps) && (nextSeqNoAvail == 1));
}
if ((uw16_instr == BUFSTATS_DO_ACCELERATE)
|| (uw16_instr == BUFSTATS_DO_PREEMPTIVE_EXPAND))
{
/* Check that we have enough data (30ms) to do the Accelearate */
if ((totalTS + dspInfo.samplesLeft) < WEBRTC_SPL_MUL(3,inst->timestampsPerCall)
&& (uw16_instr == BUFSTATS_DO_ACCELERATE))
{
/* Not enough, do normal operation instead */
uw16_instr = BUFSTATS_DO_NORMAL;
}
else
{
inst->BufferStat_inst.Automode_inst.sampleMemory
= (WebRtc_Word32) dspInfo.samplesLeft + totalTS;
inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
}
}
/* Close the data with a zero size block */
*blockPtr = 0;
/* Write data to DSP */
switch (uw16_instr)
{
case BUFSTATS_DO_NORMAL:
/* Normal with decoding included */
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_NORMAL;
break;
case BUFSTATS_DO_ACCELERATE:
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_ACCELERATE;
break;
case BUFSTATS_DO_MERGE:
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_MERGE;
break;
case BUFSTATS_DO_RFC3389CNG_PACKET:
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_DO_RFC3389CNG;
break;
case BUFSTATS_DO_ALTERNATIVE_PLC:
inst->pw16_writeAddress[1] = 0;
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_DO_ALTERNATIVE_PLC;
break;
case BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS:
inst->pw16_writeAddress[1] = 0;
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_DO_ALTERNATIVE_PLC_INC_TS;
break;
case BUFSTATS_DO_AUDIO_REPETITION:
inst->pw16_writeAddress[1] = 0;
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_DO_AUDIO_REPETITION;
break;
case BUFSTATS_DO_AUDIO_REPETITION_INC_TS:
inst->pw16_writeAddress[1] = 0;
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_DO_AUDIO_REPETITION_INC_TS;
break;
case BUFSTATS_DO_PREEMPTIVE_EXPAND:
inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
| DSP_INSTR_PREEMPTIVE_EXPAND;
break;
default:
return UNKNOWN_BUFSTAT_DECISION;
}
inst->timeStamp = dspInfo.playedOutTS;
return 0;
}

View File

@ -1,141 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Split an RTP payload (if possible and suitable) and insert into packet buffer.
*/
#include "mcu.h"
#include <string.h>
#include "signal_processing_library.h"
#include "neteq_error_codes.h"
int WebRtcNetEQ_SplitAndInsertPayload(RTPPacket_t *packet, PacketBuf_t *Buffer_inst,
SplitInfo_t *split_inst, WebRtc_Word16 *flushed)
{
int i_ok;
int len;
int i;
RTPPacket_t temp_packet;
WebRtc_Word16 localFlushed = 0;
const WebRtc_Word16 *pw16_startPayload;
*flushed = 0;
len = packet->payloadLen;
/* Copy to temp packet that can be modified. */
WEBRTC_SPL_MEMCPY_W8(&temp_packet,packet,sizeof(RTPPacket_t));
if (split_inst->deltaBytes == NO_SPLIT)
{
/* Not splittable codec */
i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, packet, &localFlushed);
*flushed |= localFlushed;
if (i_ok < 0)
{
return PBUFFER_INSERT_ERROR5;
}
}
else if (split_inst->deltaBytes < -10)
{
/* G711, PCM16B or G722, use "soft splitting" */
int split_size = packet->payloadLen;
int mult = WEBRTC_SPL_ABS_W32(split_inst->deltaBytes) - 10;
/* Find "chunk size" >= 20 ms and < 40 ms
* split_inst->deltaTime in this case contains the number of bytes per
* timestamp unit times 2
*/
while (split_size >= ((80 << split_inst->deltaTime) * mult))
{
split_size >>= 1;
}
/* Make the size an even value. */
if (split_size > 1)
{
split_size >>= 1;
split_size *= 2;
}
temp_packet.payloadLen = split_size;
pw16_startPayload = temp_packet.payload;
i = 0;
while (len >= (2 * split_size))
{
/* insert every chunk */
i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet, &localFlushed);
*flushed |= localFlushed;
temp_packet.timeStamp += ((2 * split_size) >> split_inst->deltaTime);
i++;
temp_packet.payload = &(pw16_startPayload[(i * split_size) >> 1]);
temp_packet.starts_byte1 = temp_packet.starts_byte1 ^ (split_size & 0x1);
len -= split_size;
if (i_ok < 0)
{
return PBUFFER_INSERT_ERROR1;
}
}
/* Insert the rest */
temp_packet.payloadLen = len;
i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet, &localFlushed);
*flushed |= localFlushed;
if (i_ok < 0)
{
return PBUFFER_INSERT_ERROR2;
}
}
else
{
/* Frame based codec, use hard splitting. */
i = 0;
pw16_startPayload = temp_packet.payload;
while (len >= split_inst->deltaBytes)
{
temp_packet.payloadLen = split_inst->deltaBytes;
i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet, &localFlushed);
*flushed |= localFlushed;
i++;
temp_packet.payload = &(pw16_startPayload[(i * split_inst->deltaBytes) >> 1]);
temp_packet.timeStamp += split_inst->deltaTime;
temp_packet.starts_byte1 = temp_packet.starts_byte1 ^ (split_inst->deltaBytes
& 0x1);
if (i_ok < 0)
{
return PBUFFER_INSERT_ERROR3;
}
len -= split_inst->deltaBytes;
}
if (len > 0)
{
/* Must be a either an error or a SID frame at the end of the packet. */
temp_packet.payloadLen = len;
i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet, &localFlushed);
*flushed |= localFlushed;
if (i_ok < 0)
{
return PBUFFER_INSERT_ERROR4;
}
}
}
return 0;
}

View File

@ -1,41 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This function "unmutes" a vector on a sample by sample basis.
*/
#include "dsp_helpfunctions.h"
#include "signal_processing_library.h"
void WebRtcNetEQ_UnmuteSignal(WebRtc_Word16 *pw16_inVec, WebRtc_Word16 *startMuteFact,
WebRtc_Word16 *pw16_outVec, WebRtc_Word16 unmuteFact,
WebRtc_Word16 N)
{
int i;
WebRtc_UWord16 w16_tmp;
WebRtc_Word32 w32_tmp;
w16_tmp = (WebRtc_UWord16) *startMuteFact;
w32_tmp = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)w16_tmp,6) + 32;
for (i = 0; i < N; i++)
{
pw16_outVec[i]
= (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(w16_tmp, pw16_inVec[i]) + 8192) >> 14);
w32_tmp += unmuteFact;
w32_tmp = WEBRTC_SPL_MAX(0, w32_tmp);
w16_tmp = (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 6); /* 20 - 14 = 6 */
w16_tmp = WEBRTC_SPL_MIN(16384, w16_tmp);
}
*startMuteFact = (WebRtc_Word16) w16_tmp;
}

File diff suppressed because it is too large Load Diff

View File

@ -1,629 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "NETEQTEST_CodecClass.h"
#include "webrtc_neteq_help_macros.h"
NETEQTEST_Decoder::NETEQTEST_Decoder(enum WebRtcNetEQDecoder type, WebRtc_UWord16 fs, const char * name, WebRtc_UWord8 pt)
:
_decoder(NULL),
_decoderType(type),
_pt(pt),
_fs(fs),
_name(name)
{
}
int NETEQTEST_Decoder::loadToNetEQ(NETEQTEST_NetEQClass & neteq, WebRtcNetEQ_CodecDef & codecInst)
{
SET_CODEC_PAR(codecInst, _decoderType, _pt, _decoder, _fs);
int err = neteq.loadCodec(codecInst);
if (err)
{
printf("Error loading codec %s into NetEQ database\n", _name.c_str());
}
return(err);
}
// iSAC
#ifdef CODEC_ISAC
#include "isac.h"
decoder_iSAC::decoder_iSAC(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderISAC, 16000, "iSAC", pt)
{
WebRtc_Word16 err = WebRtcIsac_Create((ISACStruct **) &_decoder);
if (err)
{
throw std::exception();
}
WebRtcIsac_EncoderInit((ISACStruct *) _decoder, 0);
WebRtcIsac_SetDecSampRate((ISACStruct *) _decoder, kIsacWideband);
}
decoder_iSAC::~decoder_iSAC()
{
if (_decoder)
{
WebRtcIsac_Free((ISACStruct *) _decoder);
_decoder = NULL;
}
}
int decoder_iSAC::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_ISAC_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_ISAC_SWB
decoder_iSACSWB::decoder_iSACSWB(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderISACswb, 32000, "iSAC swb", pt)
{
WebRtc_Word16 err = WebRtcIsac_Create((ISACStruct **) &_decoder);
if (err)
{
throw std::exception();
}
WebRtcIsac_EncoderInit((ISACStruct *) _decoder, 0);
WebRtcIsac_SetDecSampRate((ISACStruct *) _decoder, kIsacSuperWideband);
}
decoder_iSACSWB::~decoder_iSACSWB()
{
if (_decoder)
{
WebRtcIsac_Free((ISACStruct *) _decoder);
_decoder = NULL;
}
}
int decoder_iSACSWB::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_ISACSWB_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
// PCM u/A
#ifdef CODEC_G711
#include "g711_interface.h"
decoder_PCMU::decoder_PCMU(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderPCMu, 8000, "G.711-u", pt)
{
// no state to crate or init
}
int decoder_PCMU::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_PCMU_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
decoder_PCMA::decoder_PCMA(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderPCMa, 8000, "G.711-A", pt)
{
// no state to crate or init
}
int decoder_PCMA::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_PCMA_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
// Linear PCM16b
#if (defined(CODEC_PCM16B) || defined(CODEC_PCM16B_WB) || \
defined(CODEC_PCM16B_32KHZ) || defined(CODEC_PCM16B_48KHZ))
#include "pcm16b.h"
#endif
#ifdef CODEC_PCM16B
int decoder_PCM16B_NB::loadToNetEQ(NETEQTEST_NetEQClass &neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_PCM16B_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_PCM16B_WB
int decoder_PCM16B_WB::loadToNetEQ(NETEQTEST_NetEQClass &neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_PCM16B_WB_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_PCM16B_32KHZ
int decoder_PCM16B_SWB32::loadToNetEQ(NETEQTEST_NetEQClass &neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_PCM16B_SWB32_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_PCM16B_48KHZ
int decoder_PCM16B_SWB48::loadToNetEQ(NETEQTEST_NetEQClass &neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_PCM16B_SWB48_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_ILBC
#include "ilbc.h"
decoder_ILBC::decoder_ILBC(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderILBC, 8000, "iLBC", pt)
{
WebRtc_Word16 err = WebRtcIlbcfix_DecoderCreate((iLBC_decinst_t **) &_decoder);
if (err)
{
throw std::exception();
}
}
decoder_ILBC::~decoder_ILBC()
{
WebRtcIlbcfix_DecoderFree((iLBC_decinst_t *) _decoder);
}
int decoder_ILBC::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_ILBC_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_G729
#include "G729Interface.h"
decoder_G729::decoder_G729(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderG729, 8000, "G.729", pt)
{
WebRtc_Word16 err = WebRtcG729_CreateDec((G729_decinst_t **) &_decoder);
if (err)
{
throw std::exception();
}
}
decoder_G729::~decoder_G729()
{
WebRtcG729_FreeDec((G729_decinst_t *) _decoder);
}
int decoder_G729::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_G729_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_G729_1
#include "G729_1Interface.h"
decoder_G729_1::decoder_G729_1(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderG729_1, 16000, "G.729.1", pt)
{
WebRtc_Word16 err = WebRtcG7291_Create((G729_1_inst_t **) &_decoder);
if (err)
{
throw std::exception();
}
}
decoder_G729_1::~decoder_G729_1()
{
WebRtcG7291_Free((G729_1_inst_t *) _decoder);
}
int decoder_G729_1::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_G729_1_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_G722
#include "g722_interface.h"
decoder_G722::decoder_G722(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderG722, 16000, "G.722", pt)
{
WebRtc_Word16 err = WebRtcG722_CreateDecoder((G722DecInst **) &_decoder);
if (err)
{
throw std::exception();
}
}
decoder_G722::~decoder_G722()
{
WebRtcG722_FreeDecoder((G722DecInst *) _decoder);
}
int decoder_G722::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_G722_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#if (defined(CODEC_G722_1_16) || defined(CODEC_G722_1_24) || \
defined(CODEC_G722_1_32) || defined(CODEC_G722_1C_24) || \
defined(CODEC_G722_1C_32) || defined(CODEC_G722_1C_48))
#include "G722_1Interface.h"
#endif
#ifdef CODEC_G722_1_16
decoder_G722_1_16::decoder_G722_1_16(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderG722_1_16, 16000, "G.722.1 (16 kbps)", pt)
{
if (WebRtcG7221_CreateDec16((G722_1_16_decinst_t **) &_decoder))
{
throw std::exception();
}
}
decoder_G722_1_16::~decoder_G722_1_16()
{
WebRtcG7221_FreeDec16((G722_1_16_decinst_t *) _decoder);
}
int decoder_G722_1_16::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_G722_1_16_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_G722_1_24
decoder_G722_1_24::decoder_G722_1_24(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderG722_1_24, 16000, "G.722.1 (24 kbps)", pt)
{
if (WebRtcG7221_CreateDec24((G722_1_24_decinst_t **) &_decoder))
{
throw std::exception();
}
}
decoder_G722_1_24::~decoder_G722_1_24()
{
WebRtcG7221_FreeDec24((G722_1_24_decinst_t *) _decoder);
}
int decoder_G722_1_24::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_G722_1_24_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_G722_1_32
decoder_G722_1_32::decoder_G722_1_32(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderG722_1_32, 16000, "G.722.1 (32 kbps)", pt)
{
if (WebRtcG7221_CreateDec32((G722_1_32_decinst_t **) &_decoder))
{
throw std::exception();
}
}
decoder_G722_1_32::~decoder_G722_1_32()
{
WebRtcG7221_FreeDec32((G722_1_32_decinst_t *) _decoder);
}
int decoder_G722_1_32::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_G722_1_32_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_G722_1C_24
decoder_G722_1C_24::decoder_G722_1C_24(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderG722_1C_24, 32000, "G.722.1C (24 kbps)", pt)
{
if (WebRtcG7221C_CreateDec24((G722_1C_24_decinst_t **) &_decoder))
throw std::exception();
}
decoder_G722_1C_24::~decoder_G722_1C_24()
{
WebRtcG7221C_FreeDec24((G722_1C_24_decinst_t *) _decoder);
}
int decoder_G722_1C_24::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_G722_1C_24_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_G722_1C_32
decoder_G722_1C_32::decoder_G722_1C_32(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderG722_1C_32, 32000, "G.722.1C (32 kbps)", pt)
{
if (WebRtcG7221C_CreateDec32((G722_1C_32_decinst_t **) &_decoder))
throw std::exception();
}
decoder_G722_1C_32::~decoder_G722_1C_32()
{
WebRtcG7221C_FreeDec32((G722_1C_32_decinst_t *) _decoder);
}
int decoder_G722_1C_32::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_G722_1C_32_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_G722_1C_48
decoder_G722_1C_48::decoder_G722_1C_48(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderG722_1C_48, 32000, "G.722.1C (48 kbps)", pt)
{
if (WebRtcG7221C_CreateDec48((G722_1C_48_decinst_t **) &_decoder))
throw std::exception();
}
decoder_G722_1C_48::~decoder_G722_1C_48()
{
WebRtcG7221C_FreeDec48((G722_1C_48_decinst_t *) _decoder);
}
int decoder_G722_1C_48::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_G722_1C_48_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_AMR
#include "AMRInterface.h"
#include "AMRCreation.h"
decoder_AMR::decoder_AMR(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderAMR, 8000, "AMR", pt)
{
if (WebRtcAmr_CreateDec((AMR_decinst_t **) &_decoder))
throw std::exception();
WebRtcAmr_DecodeBitmode((AMR_decinst_t *) _decoder, AMRBandwidthEfficient);
}
decoder_AMR::~decoder_AMR()
{
WebRtcAmr_FreeDec((AMR_decinst_t *) _decoder);
}
int decoder_AMR::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_AMR_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_AMRWB
#include "AMRWBInterface.h"
#include "AMRWBCreation.h"
decoder_AMRWB::decoder_AMRWB(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderAMRWB, 16000, "AMR wb", pt)
{
if (WebRtcAmrWb_CreateDec((AMRWB_decinst_t **) &_decoder))
throw std::exception();
WebRtcAmrWb_DecodeBitmode((AMRWB_decinst_t *) _decoder, AMRBandwidthEfficient);
}
decoder_AMRWB::~decoder_AMRWB()
{
WebRtcAmrWb_FreeDec((AMRWB_decinst_t *) _decoder);
}
int decoder_AMRWB::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_AMRWB_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_GSMFR
#include "GSMFRInterface.h"
#include "GSMFRCreation.h"
decoder_GSMFR::decoder_GSMFR(WebRtc_UWord8 pt)
:
NETEQTEST_Decoder(kDecoderGSMFR, 8000, "GSM-FR", pt)
{
if (WebRtcGSMFR_CreateDec((GSMFR_decinst_t **) &_decoder))
throw std::exception();
}
decoder_GSMFR::~decoder_GSMFR()
{
WebRtcGSMFR_FreeDec((GSMFR_decinst_t *) _decoder);
}
int decoder_GSMFR::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_GSMFR_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#if (defined(CODEC_SPEEX_8) || defined (CODEC_SPEEX_16))
#include "SpeexInterface.h"
decoder_SPEEX::decoder_SPEEX(WebRtc_UWord8 pt, WebRtc_UWord16 fs)
:
NETEQTEST_Decoder(fs == 8000 ? kDecoderSPEEX_8 : kDecoderSPEEX_16,
fs, "SPEEX " + fs/1000, pt)
{
if (fs != 8000 && fs != 16000)
throw std::exception("Wrong sample rate for SPEEX");
if (WebRtcSpeex_CreateDec((SPEEX_decinst_t **) &_decoder, fs, 1))
throw std::exception();
}
decoder_SPEEX::~decoder_SPEEX()
{
WebRtcSpeex_FreeDec((SPEEX_decinst_t *) _decoder);
}
int decoder_SPEEX::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_SPEEX_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_RED
int decoder_RED::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_RED_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#ifdef CODEC_ATEVENT_DECODE
int decoder_AVT::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_AVT_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif
#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
#include "webrtc_cng.h"
decoder_CNG::decoder_CNG(WebRtc_UWord8 pt, WebRtc_UWord16 fs)
:
NETEQTEST_Decoder(kDecoderCNG, fs, "CNG " + fs/1000, pt)
{
if (fs != 8000 && fs != 16000 && fs != 32000 && fs != 48000)
throw std::exception();
if (WebRtcCng_CreateDec((CNG_dec_inst **) &_decoder))
throw std::exception();
}
decoder_CNG::~decoder_CNG()
{
WebRtcCng_FreeDec((CNG_dec_inst *) _decoder);
}
int decoder_CNG::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
{
WebRtcNetEQ_CodecDef codecInst;
SET_CNG_FUNCTIONS(codecInst);
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
#endif

View File

@ -1,293 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef NETEQTEST_CODECCLASS_H
#define NETEQTEST_CODECCLASS_H
#include <string>
#include <string.h>
#include "typedefs.h"
#include "webrtc_neteq.h"
#include "NETEQTEST_NetEQClass.h"
class NETEQTEST_Decoder
{
public:
NETEQTEST_Decoder(enum WebRtcNetEQDecoder type, WebRtc_UWord16 fs, const char * name, WebRtc_UWord8 pt = 0);
virtual ~NETEQTEST_Decoder() {};
virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq) = 0;
int getName(char * name, int maxLen) const { strncpy( name, _name.c_str(), maxLen ); return 0;};
void setPT(WebRtc_UWord8 pt) { _pt = pt; };
WebRtc_UWord16 getFs() const { return (_fs); };
enum WebRtcNetEQDecoder getType() const { return (_decoderType); };
WebRtc_UWord8 getPT() const { return (_pt); };
protected:
int loadToNetEQ(NETEQTEST_NetEQClass & neteq, WebRtcNetEQ_CodecDef & codecInst);
void * _decoder;
enum WebRtcNetEQDecoder _decoderType;
WebRtc_UWord8 _pt;
WebRtc_UWord16 _fs;
std::string _name;
private:
};
class decoder_iSAC : public NETEQTEST_Decoder
{
public:
decoder_iSAC(WebRtc_UWord8 pt = 0);
virtual ~decoder_iSAC();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_iSACSWB : public NETEQTEST_Decoder
{
public:
decoder_iSACSWB(WebRtc_UWord8 pt = 0);
virtual ~decoder_iSACSWB();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_PCMU : public NETEQTEST_Decoder
{
public:
decoder_PCMU(WebRtc_UWord8 pt = 0);
virtual ~decoder_PCMU() {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_PCMA : public NETEQTEST_Decoder
{
public:
decoder_PCMA(WebRtc_UWord8 pt = 0);
virtual ~decoder_PCMA() {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_PCM16B_NB : public NETEQTEST_Decoder
{
public:
decoder_PCM16B_NB(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16B, 8000, "PCM16 nb", pt) {};
virtual ~decoder_PCM16B_NB() {};
virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_PCM16B_WB : public NETEQTEST_Decoder
{
public:
decoder_PCM16B_WB(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16Bwb, 16000, "PCM16 wb", pt) {};
virtual ~decoder_PCM16B_WB() {};
virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_PCM16B_SWB32 : public NETEQTEST_Decoder
{
public:
decoder_PCM16B_SWB32(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16Bswb32kHz, 32000, "PCM16 swb32", pt) {};
virtual ~decoder_PCM16B_SWB32() {};
virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_PCM16B_SWB48 : public NETEQTEST_Decoder
{
public:
decoder_PCM16B_SWB48(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16Bswb48kHz, 48000, "PCM16 swb48", pt) {};
virtual ~decoder_PCM16B_SWB48() {};
virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_ILBC : public NETEQTEST_Decoder
{
public:
decoder_ILBC(WebRtc_UWord8 pt = 0);
virtual ~decoder_ILBC();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G729 : public NETEQTEST_Decoder
{
public:
decoder_G729(WebRtc_UWord8 pt = 0);
virtual ~decoder_G729();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G729_1 : public NETEQTEST_Decoder
{
public:
decoder_G729_1(WebRtc_UWord8 pt = 0);
virtual ~decoder_G729_1();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G722 : public NETEQTEST_Decoder
{
public:
decoder_G722(WebRtc_UWord8 pt = 0);
virtual ~decoder_G722();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G722_1_16 : public NETEQTEST_Decoder
{
public:
decoder_G722_1_16(WebRtc_UWord8 pt = 0);
virtual ~decoder_G722_1_16();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G722_1_24 : public NETEQTEST_Decoder
{
public:
decoder_G722_1_24(WebRtc_UWord8 pt = 0);
virtual ~decoder_G722_1_24();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G722_1_32 : public NETEQTEST_Decoder
{
public:
decoder_G722_1_32(WebRtc_UWord8 pt = 0);
virtual ~decoder_G722_1_32();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G722_1C_24 : public NETEQTEST_Decoder
{
public:
decoder_G722_1C_24(WebRtc_UWord8 pt = 0);
virtual ~decoder_G722_1C_24();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G722_1C_32 : public NETEQTEST_Decoder
{
public:
decoder_G722_1C_32(WebRtc_UWord8 pt = 0);
virtual ~decoder_G722_1C_32();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G722_1C_48 : public NETEQTEST_Decoder
{
public:
decoder_G722_1C_48(WebRtc_UWord8 pt = 0);
virtual ~decoder_G722_1C_48();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_AMR : public NETEQTEST_Decoder
{
public:
decoder_AMR(WebRtc_UWord8 pt = 0);
virtual ~decoder_AMR();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_AMRWB : public NETEQTEST_Decoder
{
public:
decoder_AMRWB(WebRtc_UWord8 pt = 0);
virtual ~decoder_AMRWB();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_GSMFR : public NETEQTEST_Decoder
{
public:
decoder_GSMFR(WebRtc_UWord8 pt = 0);
virtual ~decoder_GSMFR();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G726 : public NETEQTEST_Decoder
{
public:
//virtual decoder_G726(WebRtc_UWord8 pt = 0) = 0;
decoder_G726(enum WebRtcNetEQDecoder type, const char * name, WebRtc_UWord8 pt = 0);
virtual ~decoder_G726();
virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq) = 0;
};
class decoder_G726_16 : public decoder_G726
{
public:
decoder_G726_16(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_16, "G.726 (16 kbps)", pt) {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G726_24 : public decoder_G726
{
public:
decoder_G726_24(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_24, "G.726 (24 kbps)", pt) {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G726_32 : public decoder_G726
{
public:
decoder_G726_32(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_32, "G.726 (32 kbps)", pt) {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G726_40 : public decoder_G726
{
public:
decoder_G726_40(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_40, "G.726 (40 kbps)", pt) {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_SPEEX : public NETEQTEST_Decoder
{
public:
decoder_SPEEX(WebRtc_UWord8 pt = 0, WebRtc_UWord16 fs = 8000);
virtual ~decoder_SPEEX();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_RED : public NETEQTEST_Decoder
{
public:
decoder_RED(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderRED, 8000, "RED", pt) {};
virtual ~decoder_RED() {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_AVT : public NETEQTEST_Decoder
{
public:
decoder_AVT(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderAVT, 8000, "AVT", pt) {};
virtual ~decoder_AVT() {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_CNG : public NETEQTEST_Decoder
{
public:
decoder_CNG(WebRtc_UWord8 pt = 0, WebRtc_UWord16 fs = 8000);
virtual ~decoder_CNG();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
#endif //NETEQTEST_CODECCLASS_H

View File

@ -1,370 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory.h>
#include "NETEQTEST_NetEQClass.h"
NETEQTEST_NetEQClass::NETEQTEST_NetEQClass()
:
_inst(NULL),
_instMem(NULL),
_bufferMem(NULL),
_preparseRTP(false),
_fsmult(1),
_isMaster(true)
{
#ifdef WINDOWS_TIMING
_totTimeRecIn.QuadPart = 0;
_totTimeRecOut.QuadPart = 0;
#endif
}
NETEQTEST_NetEQClass::NETEQTEST_NetEQClass(enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs,
WebRtc_UWord16 fs, WebRtcNetEQNetworkType nwType)
:
_inst(NULL),
_instMem(NULL),
_bufferMem(NULL),
_preparseRTP(false),
_fsmult(1),
_isMaster(true)
{
#ifdef WINDOWS_TIMING
_totTimeRecIn.QuadPart = 0;
_totTimeRecOut.QuadPart = 0;
#endif
if (assign() == 0)
{
if (init(fs) == 0)
{
assignBuffer(usedCodec, noOfCodecs, nwType);
}
}
}
NETEQTEST_NetEQClass::~NETEQTEST_NetEQClass()
{
if (_instMem)
{
delete [] _instMem;
_instMem = NULL;
}
if (_bufferMem)
{
delete [] _bufferMem;
_bufferMem = NULL;
}
_inst = NULL;
}
int NETEQTEST_NetEQClass::assign()
{
int memSize;
WebRtcNetEQ_AssignSize(&memSize);
if (_instMem)
{
delete [] _instMem;
_instMem = NULL;
}
_instMem = new WebRtc_Word8[memSize];
int ret = WebRtcNetEQ_Assign(&_inst, _instMem);
if (ret)
{
printError();
}
return (ret);
}
int NETEQTEST_NetEQClass::init(WebRtc_UWord16 fs)
{
int ret;
if (!_inst)
{
// not assigned
ret = assign();
if (ret != 0)
{
printError();
return (ret);
}
}
ret = WebRtcNetEQ_Init(_inst, fs);
if (ret != 0)
{
printError();
}
return (ret);
}
int NETEQTEST_NetEQClass::assignBuffer(enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs, WebRtcNetEQNetworkType nwType)
{
int numPackets, memSize, ret;
if (!_inst)
{
// not assigned
ret = assign();
if (ret != 0)
{
printError();
return (ret);
}
ret = init();
if (ret != 0)
{
printError();
return (ret);
}
}
ret = WebRtcNetEQ_GetRecommendedBufferSize(_inst, usedCodec, noOfCodecs, nwType, &numPackets, &memSize);
if (ret != 0)
{
printError();
return (ret);
}
if (_bufferMem)
{
delete [] _bufferMem;
_bufferMem = NULL;
}
_bufferMem = new WebRtc_Word8[memSize];
memset(_bufferMem, -1, memSize);
ret = WebRtcNetEQ_AssignBuffer(_inst, numPackets, _bufferMem, memSize);
if (ret != 0)
{
printError();
}
return (ret);
}
int NETEQTEST_NetEQClass::loadCodec(WebRtcNetEQ_CodecDef &codecInst)
{
int err = WebRtcNetEQ_CodecDbAdd(_inst, &codecInst);
if (err)
{
printError();
}
return (err);
}
void NETEQTEST_NetEQClass::printError()
{
if (_inst)
{
int errorCode = WebRtcNetEQ_GetErrorCode(_inst);
if (errorCode)
{
char errorName[WEBRTC_NETEQ_MAX_ERROR_NAME];
WebRtcNetEQ_GetErrorName(errorCode, errorName, WEBRTC_NETEQ_MAX_ERROR_NAME);
printf("Error %i: %s\n", errorCode, errorName);
}
}
}
void NETEQTEST_NetEQClass::printError(NETEQTEST_RTPpacket &rtp)
{
// print regular error info
printError();
// print extra info from packet
printf("\tRTP: TS=%lu, SN=%u, PT=%u, M=%i, len=%i\n",
rtp.timeStamp(), rtp.sequenceNumber(), rtp.payloadType(),
rtp.markerBit(), rtp.payloadLen());
}
int NETEQTEST_NetEQClass::recIn(NETEQTEST_RTPpacket &rtp)
{
int err;
#ifdef WINDOWS_TIMING
LARGE_INTEGER countA, countB;
#endif
if (_preparseRTP)
{
WebRtcNetEQ_RTPInfo rtpInfo;
// parse RTP header
rtp.parseHeader(rtpInfo);
#ifdef WINDOWS_TIMING
QueryPerformanceCounter(&countA); // get start count for processor
#endif
err = WebRtcNetEQ_RecInRTPStruct(_inst, &rtpInfo, rtp.payload(), rtp.payloadLen(), rtp.time() * _fsmult * 8);
#ifdef WINDOWS_TIMING
QueryPerformanceCounter(&countB); // get stop count for processor
_totTimeRecIn.QuadPart += (countB.QuadPart - countA.QuadPart);
#endif
}
else
{
#ifdef WINDOWS_TIMING
QueryPerformanceCounter(&countA); // get start count for processor
#endif
err = WebRtcNetEQ_RecIn(_inst, (WebRtc_Word16 *) rtp.datagram(), rtp.dataLen(), rtp.time() * _fsmult * 8);
#ifdef WINDOWS_TIMING
QueryPerformanceCounter(&countB); // get stop count for processor
_totTimeRecIn.QuadPart += (countB.QuadPart - countA.QuadPart);
#endif
}
if (err)
{
printError(rtp);
}
return (err);
}
WebRtc_Word16 NETEQTEST_NetEQClass::recOut(WebRtc_Word16 *outData, void *msInfo, enum WebRtcNetEQOutputType *outputType)
{
int err;
WebRtc_Word16 outLen = 0;
#ifdef WINDOWS_TIMING
LARGE_INTEGER countA, countB;
#endif
#ifdef WINDOWS_TIMING
QueryPerformanceCounter(&countA); // get start count for processor
#endif
if (!msInfo)
{
// no msInfo given, do mono mode
err = WebRtcNetEQ_RecOut(_inst, outData, &outLen);
}
else
{
// master/slave mode
err = WebRtcNetEQ_RecOutMasterSlave(_inst, outData, &outLen, msInfo, static_cast<WebRtc_Word16>(_isMaster));
}
#ifdef WINDOWS_TIMING
QueryPerformanceCounter(&countB); // get stop count for processor
_totTimeRecOut.QuadPart += (countB.QuadPart - countA.QuadPart);
#endif
if (err)
{
printError();
}
else
{
int newfsmult = static_cast<int>(outLen / 80);
if (newfsmult != _fsmult)
{
printf("Warning: output sample rate changed\n");
_fsmult = newfsmult;
}
}
if (outputType != NULL)
{
err = WebRtcNetEQ_GetSpeechOutputType(_inst, outputType);
if (err)
{
printError();
}
}
return (outLen);
}
WebRtc_UWord32 NETEQTEST_NetEQClass::getSpeechTimeStamp()
{
WebRtc_UWord32 ts = 0;
int err;
err = WebRtcNetEQ_GetSpeechTimeStamp(_inst, &ts);
if (err)
{
printError();
ts = 0;
}
return (ts);
}
//NETEQTEST_NetEQVector::NETEQTEST_NetEQVector(int numChannels)
//:
//channels(numChannels, new NETEQTEST_NetEQClass())
//{
// //for (int i = 0; i < numChannels; i++)
// //{
// // channels.push_back(new NETEQTEST_NetEQClass());
// //}
//}
//
//NETEQTEST_NetEQVector::NETEQTEST_NetEQVector(int numChannels, enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs,
// WebRtc_UWord16 fs, WebRtcNetEQNetworkType nwType)
// :
//channels(numChannels, new NETEQTEST_NetEQClass(usedCodec, noOfCodecs, fs, nwType))
//{
// //for (int i = 0; i < numChannels; i++)
// //{
// // channels.push_back(new NETEQTEST_NetEQClass(usedCodec, noOfCodecs, fs, nwType));
// //}
//}
//
//NETEQTEST_NetEQVector::~NETEQTEST_NetEQVector()
//{
//}

View File

@ -1,91 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef NETEQTEST_NETEQCLASS_H
#define NETEQTEST_NETEQCLASS_H
#include <stdio.h>
#include <vector>
#include "webrtc_neteq.h"
#include "webrtc_neteq_internal.h"
#include "NETEQTEST_RTPpacket.h"
#ifdef WIN32
#define WINDOWS_TIMING // complexity measurement only implemented for windows
//TODO(hlundin):Add complexity testing for Linux.
#include <windows.h>
#endif
class NETEQTEST_NetEQClass
{
public:
NETEQTEST_NetEQClass();
NETEQTEST_NetEQClass(enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs,
WebRtc_UWord16 fs = 8000, WebRtcNetEQNetworkType nwType = kTCPLargeJitter);
~NETEQTEST_NetEQClass();
int assign();
int init(WebRtc_UWord16 fs = 8000);
int assignBuffer(enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs, WebRtcNetEQNetworkType nwType = kTCPLargeJitter);
int loadCodec(WebRtcNetEQ_CodecDef & codecInst);
int recIn(NETEQTEST_RTPpacket & rtp);
WebRtc_Word16 recOut(WebRtc_Word16 *outData, void *msInfo = NULL, enum WebRtcNetEQOutputType *outputType = NULL);
WebRtc_UWord32 getSpeechTimeStamp();
void * instance() { return (_inst); };
void usePreparseRTP( bool useIt = true ) { _preparseRTP = useIt; };
bool usingPreparseRTP() { return (_preparseRTP); };
void setMaster( bool isMaster = true ) { _isMaster = isMaster; };
void setSlave() { _isMaster = false; };
bool isMaster() { return (_isMaster); };
bool isSlave() { return (!_isMaster); };
#ifdef WINDOWS_TIMING
double getRecInTime() { return (static_cast<double>( _totTimeRecIn.QuadPart )); };
double getRecOutTime() { return (static_cast<double>( _totTimeRecOut.QuadPart )); };
#else
double getRecInTime() { return (0.0); };
double getRecOutTime() { return (0.0); };
#endif
void printError();
void printError(NETEQTEST_RTPpacket & rtp);
private:
void * _inst;
WebRtc_Word8 * _instMem;
WebRtc_Word8 * _bufferMem;
bool _preparseRTP;
int _fsmult;
bool _isMaster;
#ifdef WINDOWS_TIMING
LARGE_INTEGER _totTimeRecIn;
LARGE_INTEGER _totTimeRecOut;
#endif
};
//class NETEQTEST_NetEQVector
//{
//public:
// NETEQTEST_NetEQVector(int numChannels);
// NETEQTEST_NetEQVector(int numChannels, enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs,
// WebRtc_UWord16 fs = 8000, WebRtcNetEQNetworkType nwType = kTCPLargeJitter);
// ~NETEQTEST_NetEQVector();
//
//private:
// std::vector<NETEQTEST_NetEQClass *> channels;
//};
#endif //NETEQTEST_NETEQCLASS_H

View File

@ -1,766 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "NETEQTEST_RTPpacket.h"
#include <string.h>
#ifdef WIN32
#include <winsock2.h>
#else
#include <netinet/in.h> // for htons, htonl, etc
#endif
#define HDR_SIZE 8 // rtpplay packet header size in bytes
NETEQTEST_RTPpacket::NETEQTEST_RTPpacket()
:
_datagram(NULL),
_payloadPtr(NULL),
_memSize(0),
_datagramLen(-1),
_payloadLen(0),
_rtpParsed(false),
_receiveTime(0),
_lost(false)
{
memset(&_rtpInfo, 0, sizeof(_rtpInfo));
_blockList.clear();
}
NETEQTEST_RTPpacket::NETEQTEST_RTPpacket(const NETEQTEST_RTPpacket& copyFromMe)
{
memcpy(this, &copyFromMe, sizeof(NETEQTEST_RTPpacket));
_datagram = NULL;
_payloadPtr = NULL;
if(copyFromMe._datagram)
{
_datagram = new WebRtc_UWord8[_memSize];
if(_datagram)
{
memcpy(_datagram, copyFromMe._datagram, _memSize);
}
}
if(copyFromMe._payloadPtr)
{
_payloadPtr = _datagram + (copyFromMe._payloadPtr - copyFromMe._datagram);
}
_blockList = copyFromMe._blockList;
}
NETEQTEST_RTPpacket & NETEQTEST_RTPpacket::operator = (const NETEQTEST_RTPpacket & other)
{
if (this != &other) // protect against invalid self-assignment
{
// deallocate datagram memory if allocated
if(_datagram)
{
delete[] _datagram;
}
// do shallow copy
memcpy(this, &other, sizeof(NETEQTEST_RTPpacket));
// reset pointers
_datagram = NULL;
_payloadPtr = NULL;
if(other._datagram)
{
_datagram = new WebRtc_UWord8[other._memSize];
_memSize = other._memSize;
if(_datagram)
{
memcpy(_datagram, other._datagram, _memSize);
}
}
if(other._payloadPtr)
{
_payloadPtr = _datagram + (other._payloadPtr - other._datagram);
}
// copy the blocking list (map)
_blockList = other._blockList;
}
// by convention, always return *this
return *this;
}
NETEQTEST_RTPpacket::~NETEQTEST_RTPpacket()
{
if(_datagram)
{
delete _datagram;
}
}
void NETEQTEST_RTPpacket::reset()
{
if(_datagram) {
delete _datagram;
}
_datagram = NULL;
_memSize = 0;
_datagramLen = -1;
_payloadLen = 0;
_payloadPtr = NULL;
_receiveTime = 0;
memset(&_rtpInfo, 0, sizeof(_rtpInfo));
_rtpParsed = false;
}
int NETEQTEST_RTPpacket::readFromFile(FILE *fp)
{
if(!fp)
{
return(-1);
}
WebRtc_UWord16 length, plen;
WebRtc_UWord32 offset;
if (fread(&length,2,1,fp)==0)
{
reset();
return(-2);
}
length = ntohs(length);
if (fread(&plen,2,1,fp)==0)
{
reset();
return(-1);
}
int packetLen = ntohs(plen);
if (fread(&offset,4,1,fp)==0)
{
reset();
return(-1);
}
WebRtc_UWord32 receiveTime = ntohl(offset); // store in local variable until we have passed the reset below
// Use length here because a plen of 0 specifies rtcp
length = (WebRtc_UWord16) (length - HDR_SIZE);
// check buffer size
if (_datagram && _memSize < length)
{
reset();
}
if (!_datagram)
{
_datagram = new WebRtc_UWord8[length];
_memSize = length;
}
if (fread((unsigned short *) _datagram,1,length,fp) != length)
{
reset();
return(-1);
}
_datagramLen = length;
_receiveTime = receiveTime;
if (!_blockList.empty() && _blockList.count(payloadType()) > 0)
{
// discard this payload
return(readFromFile(fp));
}
return(packetLen);
}
int NETEQTEST_RTPpacket::readFixedFromFile(FILE *fp, int length)
{
if(!fp)
{
return(-1);
}
// check buffer size
if (_datagram && _memSize < length)
{
reset();
}
if (!_datagram)
{
_datagram = new WebRtc_UWord8[length];
_memSize = length;
}
if (fread((unsigned short *) _datagram,1,length,fp) != length)
{
reset();
return(-1);
}
_datagramLen = length;
_receiveTime = 0;
if (!_blockList.empty() && _blockList.count(payloadType()) > 0)
{
// discard this payload
return(readFromFile(fp));
}
return(length);
}
int NETEQTEST_RTPpacket::writeToFile(FILE *fp)
{
if(!fp)
{
return(-1);
}
WebRtc_UWord16 length, plen;
WebRtc_UWord32 offset;
// length including RTPplay header
length = htons(_datagramLen + HDR_SIZE);
if (fwrite(&length, 2, 1, fp) != 1)
{
return(-1);
}
// payload length
plen = htons(_datagramLen);
if (fwrite(&plen, 2, 1, fp) != 1)
{
return(-1);
}
// offset (=receive time)
offset = htonl(_receiveTime);
if (fwrite(&offset, 4, 1, fp) != 1)
{
return(-1);
}
// write packet data
if (fwrite((unsigned short *) _datagram, 1, _datagramLen, fp) != _datagramLen)
{
return(-1);
}
return(_datagramLen + HDR_SIZE); // total number of bytes written
}
void NETEQTEST_RTPpacket::blockPT(WebRtc_UWord8 pt)
{
_blockList[pt] = true;
}
void NETEQTEST_RTPpacket::parseHeader()
{
if (_rtpParsed)
{
// nothing to do
return;
}
if (_datagramLen < 12)
{
// corrupt packet?
return;
}
_payloadLen = parseRTPheader(_datagram, _datagramLen, &_rtpInfo, &_payloadPtr);
_rtpParsed = true;
return;
}
void NETEQTEST_RTPpacket::parseHeader(WebRtcNetEQ_RTPInfo & rtpInfo)
{
if (!_rtpParsed)
{
// parse the header
parseHeader();
}
memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcNetEQ_RTPInfo));
}
WebRtcNetEQ_RTPInfo const * NETEQTEST_RTPpacket::RTPinfo() const
{
if (_rtpParsed)
{
return &_rtpInfo;
}
else
{
return NULL;
}
}
WebRtc_UWord8 * NETEQTEST_RTPpacket::datagram() const
{
if (_datagramLen > 0)
{
return _datagram;
}
else
{
return NULL;
}
}
WebRtc_UWord8 * NETEQTEST_RTPpacket::payload() const
{
if (_payloadLen > 0)
{
return _payloadPtr;
}
else
{
return NULL;
}
}
WebRtc_Word16 NETEQTEST_RTPpacket::payloadLen() const
{
return _payloadLen;
}
WebRtc_Word16 NETEQTEST_RTPpacket::dataLen() const
{
return _datagramLen;
}
bool NETEQTEST_RTPpacket::isParsed() const
{
return _rtpParsed;
}
bool NETEQTEST_RTPpacket::isLost() const
{
return _lost;
}
WebRtc_UWord8 NETEQTEST_RTPpacket::payloadType() const
{
WebRtcNetEQ_RTPInfo tempRTPinfo;
if(_datagram)
{
parseRTPheader(_datagram, _datagramLen, &tempRTPinfo);
}
else
{
return 0;
}
return tempRTPinfo.payloadType;
}
WebRtc_UWord16 NETEQTEST_RTPpacket::sequenceNumber() const
{
WebRtcNetEQ_RTPInfo tempRTPinfo;
if(_datagram)
{
parseRTPheader(_datagram, _datagramLen, &tempRTPinfo);
}
else
{
return 0;
}
return tempRTPinfo.sequenceNumber;
}
WebRtc_UWord32 NETEQTEST_RTPpacket::timeStamp() const
{
WebRtcNetEQ_RTPInfo tempRTPinfo;
if(_datagram)
{
parseRTPheader(_datagram, _datagramLen, &tempRTPinfo);
}
else
{
return 0;
}
return tempRTPinfo.timeStamp;
}
WebRtc_UWord32 NETEQTEST_RTPpacket::SSRC() const
{
WebRtcNetEQ_RTPInfo tempRTPinfo;
if(_datagram)
{
parseRTPheader(_datagram, _datagramLen, &tempRTPinfo);
}
else
{
return 0;
}
return tempRTPinfo.SSRC;
}
WebRtc_UWord8 NETEQTEST_RTPpacket::markerBit() const
{
WebRtcNetEQ_RTPInfo tempRTPinfo;
if(_datagram)
{
parseRTPheader(_datagram, _datagramLen, &tempRTPinfo);
}
else
{
return 0;
}
return tempRTPinfo.markerBit;
}
int NETEQTEST_RTPpacket::setPayloadType(WebRtc_UWord8 pt)
{
if (_datagramLen < 12)
{
return -1;
}
if (!_rtpParsed)
{
_rtpInfo.payloadType = pt;
}
_datagram[1]=(unsigned char)(pt & 0xFF);
return 0;
}
int NETEQTEST_RTPpacket::setSequenceNumber(WebRtc_UWord16 sn)
{
if (_datagramLen < 12)
{
return -1;
}
if (!_rtpParsed)
{
_rtpInfo.sequenceNumber = sn;
}
_datagram[2]=(unsigned char)((sn>>8)&0xFF);
_datagram[3]=(unsigned char)((sn)&0xFF);
return 0;
}
int NETEQTEST_RTPpacket::setTimeStamp(WebRtc_UWord32 ts)
{
if (_datagramLen < 12)
{
return -1;
}
if (!_rtpParsed)
{
_rtpInfo.timeStamp = ts;
}
_datagram[4]=(unsigned char)((ts>>24)&0xFF);
_datagram[5]=(unsigned char)((ts>>16)&0xFF);
_datagram[6]=(unsigned char)((ts>>8)&0xFF);
_datagram[7]=(unsigned char)(ts & 0xFF);
return 0;
}
int NETEQTEST_RTPpacket::setSSRC(WebRtc_UWord32 ssrc)
{
if (_datagramLen < 12)
{
return -1;
}
if (!_rtpParsed)
{
_rtpInfo.SSRC = ssrc;
}
_datagram[8]=(unsigned char)((ssrc>>24)&0xFF);
_datagram[9]=(unsigned char)((ssrc>>16)&0xFF);
_datagram[10]=(unsigned char)((ssrc>>8)&0xFF);
_datagram[11]=(unsigned char)(ssrc & 0xFF);
return 0;
}
int NETEQTEST_RTPpacket::setMarkerBit(WebRtc_UWord8 mb)
{
if (_datagramLen < 12)
{
return -1;
}
if (_rtpParsed)
{
_rtpInfo.markerBit = mb;
}
if (mb)
{
_datagram[0] |= 0x01;
}
else
{
_datagram[0] &= 0xFE;
}
return 0;
}
int NETEQTEST_RTPpacket::setRTPheader(const WebRtcNetEQ_RTPInfo *RTPinfo)
{
if (_datagramLen < 12)
{
// this packet is not ok
return -1;
}
makeRTPheader(_datagram,
RTPinfo->payloadType,
RTPinfo->sequenceNumber,
RTPinfo->timeStamp,
RTPinfo->SSRC,
RTPinfo->markerBit);
return 0;
}
int NETEQTEST_RTPpacket::splitStereo(NETEQTEST_RTPpacket& slaveRtp, enum stereoModes mode)
{
// if mono, do nothing
if (mode == stereoModeMono)
{
return 0;
}
// check that the RTP header info is parsed
parseHeader();
// start by copying the main rtp packet
slaveRtp = *this;
if(_payloadLen == 0)
{
// do no more
return 0;
}
if(_payloadLen%2 != 0)
{
// length must be a factor of 2
return -1;
}
switch(mode)
{
case stereoModeSample1:
{
// sample based codec with 1-byte samples
splitStereoSample(slaveRtp, 1 /* 1 byte/sample */);
break;
}
case stereoModeSample2:
{
// sample based codec with 2-byte samples
splitStereoSample(slaveRtp, 2 /* 2 bytes/sample */);
break;
}
case stereoModeFrame:
{
// frame based codec
splitStereoFrame(slaveRtp);
break;
}
}
return 0;
}
void NETEQTEST_RTPpacket::makeRTPheader(unsigned char* rtp_data, WebRtc_UWord8 payloadType, WebRtc_UWord16 seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc, WebRtc_UWord8 markerBit) const
{
rtp_data[0]=(unsigned char)0x80;
if (markerBit)
{
rtp_data[0] |= 0x01;
}
else
{
rtp_data[0] &= 0xFE;
}
rtp_data[1]=(unsigned char)(payloadType & 0xFF);
rtp_data[2]=(unsigned char)((seqNo>>8)&0xFF);
rtp_data[3]=(unsigned char)((seqNo)&0xFF);
rtp_data[4]=(unsigned char)((timestamp>>24)&0xFF);
rtp_data[5]=(unsigned char)((timestamp>>16)&0xFF);
rtp_data[6]=(unsigned char)((timestamp>>8)&0xFF);
rtp_data[7]=(unsigned char)(timestamp & 0xFF);
rtp_data[8]=(unsigned char)((ssrc>>24)&0xFF);
rtp_data[9]=(unsigned char)((ssrc>>16)&0xFF);
rtp_data[10]=(unsigned char)((ssrc>>8)&0xFF);
rtp_data[11]=(unsigned char)(ssrc & 0xFF);
}
WebRtc_UWord16 NETEQTEST_RTPpacket::parseRTPheader(const WebRtc_UWord8 *datagram, int datagramLen, WebRtcNetEQ_RTPInfo *RTPinfo, WebRtc_UWord8 **payloadPtr) const
{
WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) datagram;
int i_P, i_X, i_CC, i_extlength=-1, i_padlength=0, i_startPosition;
i_P=(((WebRtc_UWord16)(rtp_data[0] & 0x20))>>5); /* Extract the P bit */
i_X=(((WebRtc_UWord16)(rtp_data[0] & 0x10))>>4); /* Extract the X bit */
i_CC=(WebRtc_UWord16)(rtp_data[0] & 0xF); /* Get the CC number */
RTPinfo->markerBit = (WebRtc_UWord8) ((rtp_data[0] >> 15) & 0x01); /* Get the marker bit */
RTPinfo->payloadType = (WebRtc_UWord8) ((rtp_data[0] >> 8) & 0x7F); /* Get the coder type */
RTPinfo->sequenceNumber = ((( ((WebRtc_UWord16)rtp_data[1]) >> 8) & 0xFF) |
( ((WebRtc_UWord16)(rtp_data[1] & 0xFF)) << 8)); /* Get the packet number */
RTPinfo->timeStamp = ((((WebRtc_UWord16)rtp_data[2]) & 0xFF) << 24) |
((((WebRtc_UWord16)rtp_data[2]) & 0xFF00) << 8) |
((((WebRtc_UWord16)rtp_data[3]) >> 8) & 0xFF) |
((((WebRtc_UWord16)rtp_data[3]) & 0xFF) << 8); /* Get timestamp */
RTPinfo->SSRC=((((WebRtc_UWord16)rtp_data[4]) & 0xFF) << 24) |
((((WebRtc_UWord16)rtp_data[4]) & 0xFF00) << 8) |
((((WebRtc_UWord16)rtp_data[5]) >> 8) & 0xFF) |
((((WebRtc_UWord16)rtp_data[5]) & 0xFF) << 8); /* Get the SSRC */
if (i_X==1) {
/* Extention header exists. Find out how many WebRtc_Word32 it consists of */
i_extlength=((( ((WebRtc_UWord16)rtp_data[7+2*i_CC]) >> 8) & 0xFF) |
( ((WebRtc_UWord16)(rtp_data[7+2*i_CC]&0xFF)) << 8));
}
if (i_P==1) {
/* Padding exists. Find out how many bytes the padding consists of */
if (datagramLen & 0x1) {
/* odd number of bytes => last byte in higher byte */
i_padlength=(rtp_data[datagramLen>>1] & 0xFF);
} else {
/* even number of bytes => last byte in lower byte */
i_padlength=(((WebRtc_UWord16)rtp_data[(datagramLen>>1)-1]) >> 8);
}
}
i_startPosition=12+4*(i_extlength+1)+4*i_CC;
if (payloadPtr) {
*payloadPtr = (WebRtc_UWord8*) &rtp_data[i_startPosition>>1];
}
return (WebRtc_UWord16) (datagramLen-i_startPosition-i_padlength);
}
//void NETEQTEST_RTPpacket::splitStereoSample(WebRtc_UWord8 *data, WebRtc_UWord16 *lenBytes, WebRtc_UWord8 *slaveData, WebRtc_UWord16 *slaveLenBytes, int stride)
void NETEQTEST_RTPpacket::splitStereoSample(NETEQTEST_RTPpacket& slaveRtp, int stride)
{
if(!_payloadPtr || !slaveRtp._payloadPtr
|| _payloadLen <= 0 || slaveRtp._memSize < _memSize)
{
return;
}
WebRtc_UWord8 *readDataPtr = _payloadPtr;
WebRtc_UWord8 *writeDataPtr = _payloadPtr;
WebRtc_UWord8 *slaveData = slaveRtp._payloadPtr;
while (readDataPtr - _payloadPtr < _payloadLen)
{
// master data
for (int ix = 0; ix < stride; ix++) {
*writeDataPtr = *readDataPtr;
writeDataPtr++;
readDataPtr++;
}
// slave data
for (int ix = 0; ix < stride; ix++) {
*slaveData = *readDataPtr;
slaveData++;
readDataPtr++;
}
}
_payloadLen /= 2;
slaveRtp._payloadLen = _payloadLen;
}
//void NETEQTEST_RTPpacket::splitStereoFrame(WebRtc_UWord8 *data, WebRtc_UWord16 *lenBytes, WebRtc_UWord8 *slaveData, WebRtc_UWord16 *slaveLenBytes)
void NETEQTEST_RTPpacket::splitStereoFrame(NETEQTEST_RTPpacket& slaveRtp)
{
if(!_payloadPtr || !slaveRtp._payloadPtr
|| _payloadLen <= 0 || slaveRtp._memSize < _memSize)
{
return;
}
memmove(slaveRtp._payloadPtr, _payloadPtr + _payloadLen/2, _payloadLen/2);
_payloadLen /= 2;
slaveRtp._payloadLen = _payloadLen;
}

View File

@ -1,86 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef NETEQTEST_RTPPACKET_H
#define NETEQTEST_RTPPACKET_H
#include <map>
#include <stdio.h>
#include "typedefs.h"
#include "webrtc_neteq_internal.h"
enum stereoModes {
stereoModeMono,
stereoModeSample1,
stereoModeSample2,
stereoModeFrame
};
class NETEQTEST_RTPpacket
{
public:
NETEQTEST_RTPpacket();
NETEQTEST_RTPpacket(const NETEQTEST_RTPpacket& copyFromMe);
NETEQTEST_RTPpacket & operator = (const NETEQTEST_RTPpacket & other);
bool operator !() const { return (dataLen() < 0); };
~NETEQTEST_RTPpacket();
void reset();
int readFromFile(FILE *fp);
int readFixedFromFile(FILE *fp, int len);
int writeToFile(FILE *fp);
void blockPT(WebRtc_UWord8 pt);
//WebRtc_Word16 payloadType();
void parseHeader();
void parseHeader(WebRtcNetEQ_RTPInfo & rtpInfo);
WebRtcNetEQ_RTPInfo const * RTPinfo() const;
WebRtc_UWord8 * datagram() const;
WebRtc_UWord8 * payload() const;
WebRtc_Word16 payloadLen() const;
WebRtc_Word16 dataLen() const;
bool isParsed() const;
bool isLost() const;
WebRtc_UWord32 time() const { return _receiveTime; };
WebRtc_UWord8 payloadType() const;
WebRtc_UWord16 sequenceNumber() const;
WebRtc_UWord32 timeStamp() const;
WebRtc_UWord32 SSRC() const;
WebRtc_UWord8 markerBit() const;
int setPayloadType(WebRtc_UWord8 pt);
int setSequenceNumber(WebRtc_UWord16 sn);
int setTimeStamp(WebRtc_UWord32 ts);
int setSSRC(WebRtc_UWord32 ssrc);
int setMarkerBit(WebRtc_UWord8 mb);
void setTime(WebRtc_UWord32 receiveTime) { _receiveTime = receiveTime; };
int setRTPheader(const WebRtcNetEQ_RTPInfo *RTPinfo);
int splitStereo(NETEQTEST_RTPpacket& slaveRtp, enum stereoModes mode);
WebRtc_UWord8 * _datagram;
WebRtc_UWord8 * _payloadPtr;
int _memSize;
WebRtc_Word16 _datagramLen;
WebRtc_Word16 _payloadLen;
WebRtcNetEQ_RTPInfo _rtpInfo;
bool _rtpParsed;
WebRtc_UWord32 _receiveTime;
bool _lost;
std::map<WebRtc_UWord8, bool> _blockList;
private:
void makeRTPheader(unsigned char* rtp_data, WebRtc_UWord8 payloadType, WebRtc_UWord16 seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc, WebRtc_UWord8 markerBit) const;
WebRtc_UWord16 parseRTPheader(const WebRtc_UWord8 *datagram, int datagramLen, WebRtcNetEQ_RTPInfo *RTPinfo, WebRtc_UWord8 **payloadPtr = NULL) const;
void splitStereoSample(NETEQTEST_RTPpacket& slaveRtp, int stride);
void splitStereoFrame(NETEQTEST_RTPpacket& slaveRtp);
};
#endif //NETEQTEST_RTPPACKET_H

File diff suppressed because it is too large Load Diff

View File

@ -1,76 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/* PayloadTypes.h */
/* Used by NetEqRTPplay application */
/* RTP defined codepoints */
#define NETEQ_CODEC_PCMU_PT 0
#define NETEQ_CODEC_GSMFR_PT 3
#define NETEQ_CODEC_G723_PT 4
#define NETEQ_CODEC_DVI4_PT 125 // 8 kHz version
//#define NETEQ_CODEC_DVI4_16_PT 6 // 16 kHz version
#define NETEQ_CODEC_PCMA_PT 8
#define NETEQ_CODEC_G722_PT 9
#define NETEQ_CODEC_CN_PT 13
//#define NETEQ_CODEC_G728_PT 15
//#define NETEQ_CODEC_DVI4_11_PT 16 // 11.025 kHz version
//#define NETEQ_CODEC_DVI4_22_PT 17 // 22.050 kHz version
#define NETEQ_CODEC_G729_PT 18
/* Dynamic RTP codepoints as defined in VoiceEngine (file VEAPI.cpp) */
#define NETEQ_CODEC_IPCMWB_PT 97
#define NETEQ_CODEC_SPEEX8_PT 98
#define NETEQ_CODEC_SPEEX16_PT 99
#define NETEQ_CODEC_EG711U_PT 100
#define NETEQ_CODEC_EG711A_PT 101
#define NETEQ_CODEC_ILBC_PT 102
#define NETEQ_CODEC_ISAC_PT 103
#define NETEQ_CODEC_ISACLC_PT 119
#define NETEQ_CODEC_ISACSWB_PT 104
#define NETEQ_CODEC_AVT_PT 106
#define NETEQ_CODEC_G722_1_16_PT 108
#define NETEQ_CODEC_G722_1_24_PT 109
#define NETEQ_CODEC_G722_1_32_PT 110
#define NETEQ_CODEC_SC3_PT 111
#define NETEQ_CODEC_AMR_PT 112
#define NETEQ_CODEC_GSMEFR_PT 113
//#define NETEQ_CODEC_ILBCRCU_PT 114
#define NETEQ_CODEC_G726_16_PT 115
#define NETEQ_CODEC_G726_24_PT 116
#define NETEQ_CODEC_G726_32_PT 121
#define NETEQ_CODEC_RED_PT 117
#define NETEQ_CODEC_G726_40_PT 118
//#define NETEQ_CODEC_ENERGY_PT 120
#define NETEQ_CODEC_CN_WB_PT 105
#define NETEQ_CODEC_CN_SWB_PT 126
#define NETEQ_CODEC_G729_1_PT 107
#define NETEQ_CODEC_G729D_PT 123
#define NETEQ_CODEC_MELPE_PT 124
/* Extra dynamic codepoints */
#define NETEQ_CODEC_AMRWB_PT 120
#define NETEQ_CODEC_PCM16B_PT 93
#define NETEQ_CODEC_PCM16B_WB_PT 94
#define NETEQ_CODEC_PCM16B_SWB32KHZ_PT 95
#define NETEQ_CODEC_PCM16B_SWB48KHZ_PT 96
#define NETEQ_CODEC_MPEG4AAC_PT 122
/* Not default in VoiceEngine */
#define NETEQ_CODEC_G722_1C_24_PT 84
#define NETEQ_CODEC_G722_1C_32_PT 85
#define NETEQ_CODEC_G722_1C_48_PT 86
#define NETEQ_CODEC_SILK_8_PT 80
#define NETEQ_CODEC_SILK_12_PT 81
#define NETEQ_CODEC_SILK_16_PT 82
#define NETEQ_CODEC_SILK_24_PT 83

View File

@ -1,64 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stdio.h>
#include <vector>
#include "NETEQTEST_RTPpacket.h"
/*********************/
/* Misc. definitions */
/*********************/
#define FIRSTLINELEN 40
int main(int argc, char* argv[])
{
FILE *inFile=fopen(argv[1],"rb");
if (!inFile)
{
printf("Cannot open input file %s\n", argv[1]);
return(-1);
}
printf("Input file: %s\n",argv[1]);
FILE *outFile=fopen(argv[2],"wt");
if (!outFile)
{
printf("Cannot open output file %s\n", argv[2]);
return(-1);
}
printf("Output file: %s\n\n",argv[2]);
// print file header
fprintf(outFile, "SeqNo TimeStamp SendTime Size\n");
// read file header
char firstline[FIRSTLINELEN];
fgets(firstline, FIRSTLINELEN, inFile);
fread(firstline, 4+4+4+2+2, 1, inFile); // start_sec + start_usec + source + port + padding
NETEQTEST_RTPpacket packet;
while (packet.readFromFile(inFile) >= 0)
{
// write packet data to file
fprintf(outFile, "%5hu %10lu %10lu %5hi\n",
packet.sequenceNumber(), packet.timeStamp(), packet.time(), packet.dataLen());
}
fclose(inFile);
fclose(outFile);
return 0;
}

View File

@ -1,87 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <algorithm>
#include <stdio.h>
#include <vector>
#include "NETEQTEST_RTPpacket.h"
/*********************/
/* Misc. definitions */
/*********************/
#define FIRSTLINELEN 40
int main(int argc, char* argv[])
{
if (argc < 3) {
printf("Usage: RTPcat in1.rtp int2.rtp [...] out.rtp\n");
exit(1);
}
FILE *inFile = fopen(argv[1], "rb");
if (!inFile) {
printf("Cannot open input file %s\n", argv[1]);
return (-1);
}
FILE *outFile = fopen(argv[argc - 1], "wb"); // last parameter is output file
if (!outFile) {
printf("Cannot open output file %s\n", argv[argc - 1]);
return (-1);
}
printf("Output RTP file: %s\n\n", argv[argc - 1]);
// read file header and write directly to output file
char firstline[FIRSTLINELEN];
fgets(firstline, FIRSTLINELEN, inFile);
fputs(firstline, outFile);
fread(firstline, 4 + 4 + 4 + 2 + 2, 1, inFile); // start_sec + start_usec + source + port + padding
fwrite(firstline, 4 + 4 + 4 + 2 + 2, 1, outFile);
// close input file and re-open it later (easier to write the loop below)
fclose(inFile);
for (int i = 1; i < argc - 1; i++) {
inFile = fopen(argv[i], "rb");
if (!inFile) {
printf("Cannot open input file %s\n", argv[i]);
return (-1);
}
printf("Input RTP file: %s\n", argv[i]);
// skip file header
fgets(firstline, FIRSTLINELEN, inFile);
fread(firstline, 4 + 4 + 4 + 2 + 2, 1, inFile); // start_sec + start_usec + source + port + padding
NETEQTEST_RTPpacket packet;
int packLen = packet.readFromFile(inFile);
if (packLen < 0) {
exit(1);
}
while (packLen >= 0) {
packet.writeToFile(outFile);
packLen = packet.readFromFile(inFile);
}
fclose(inFile);
}
fclose(outFile);
return 0;
}

View File

@ -1,138 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <algorithm>
#include <stdio.h>
#include <vector>
#include "NETEQTEST_RTPpacket.h"
/*********************/
/* Misc. definitions */
/*********************/
#define FIRSTLINELEN 40
bool pktCmp (NETEQTEST_RTPpacket *a, NETEQTEST_RTPpacket *b)
{
return (a->time() < b->time());
}
int main(int argc, char* argv[])
{
FILE *inFile=fopen(argv[1],"rb");
if (!inFile)
{
printf("Cannot open input file %s\n", argv[1]);
return(-1);
}
printf("Input RTP file: %s\n",argv[1]);
FILE *statFile=fopen(argv[2],"rt");
if (!statFile)
{
printf("Cannot open timing file %s\n", argv[2]);
return(-1);
}
printf("Timing file: %s\n",argv[2]);
FILE *outFile=fopen(argv[3],"wb");
if (!outFile)
{
printf("Cannot open output file %s\n", argv[3]);
return(-1);
}
printf("Output RTP file: %s\n\n",argv[3]);
// read all statistics and insert into map
// read first line
char tempStr[100];
fgets(tempStr, 100, statFile);
// define map
std::map<std::pair<WebRtc_UWord16, WebRtc_UWord32>, WebRtc_UWord32>
packetStats;
WebRtc_UWord16 seqNo;
WebRtc_UWord32 ts;
WebRtc_UWord32 sendTime;
while(fscanf(statFile, "%u %u %u %*i\n", &seqNo, &ts, &sendTime) == 3)
{
std::pair<WebRtc_UWord16, WebRtc_UWord32> tempPair =
std::pair<WebRtc_UWord16, WebRtc_UWord32>(seqNo, ts);
packetStats[tempPair] = sendTime;
}
fclose(statFile);
// read file header and write directly to output file
char firstline[FIRSTLINELEN];
fgets(firstline, FIRSTLINELEN, inFile);
fputs(firstline, outFile);
fread(firstline, 4+4+4+2+2, 1, inFile); // start_sec + start_usec + source + port + padding
fwrite(firstline, 4+4+4+2+2, 1, outFile);
std::vector<NETEQTEST_RTPpacket *> packetVec;
int i = 0;
while (1)
{
// insert in vector
NETEQTEST_RTPpacket *newPacket = new NETEQTEST_RTPpacket();
if (newPacket->readFromFile(inFile) < 0)
{
// end of file
break;
}
// look for new send time in statistics vector
std::pair<WebRtc_UWord16, WebRtc_UWord32> tempPair =
std::pair<WebRtc_UWord16, WebRtc_UWord32>(newPacket->sequenceNumber(), newPacket->timeStamp());
WebRtc_UWord32 newSendTime = packetStats[tempPair];
if (newSendTime >= 0)
{
newPacket->setTime(newSendTime); // set new send time
packetVec.push_back(newPacket); // insert in vector
}
else
{
// negative value represents lost packet
// don't insert, but delete packet object
delete newPacket;
}
}
// sort the vector according to send times
std::sort(packetVec.begin(), packetVec.end(), pktCmp);
std::vector<NETEQTEST_RTPpacket *>::iterator it;
for (it = packetVec.begin(); it != packetVec.end(); it++)
{
// write to out file
if ((*it)->writeToFile(outFile) < 0)
{
printf("Error writing to file\n");
return(-1);
}
// delete packet
delete *it;
}
fclose(inFile);
fclose(outFile);
return 0;
}

File diff suppressed because it is too large Load Diff

View File

@ -1,198 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/* header includes */
#include "typedefs.h"
#include <stdio.h>
#include <stdlib.h>
#ifdef WIN32
#include <winsock2.h>
#include <io.h>
#endif
#ifdef WEBRTC_LINUX
#include <netinet/in.h>
#endif
#include <search.h>
#include <float.h>
/*********************/
/* Misc. definitions */
/*********************/
#define FIRSTLINELEN 40
#define CHECK_ZERO(a) {int errCode = a; if((errCode)!=0){fprintf(stderr,"\n %s \n line: %d \n error at %s\n Error Code = %d\n",__FILE__,__LINE__,#a, WebRtcNetEQ_GetErrorCode(inst)); exit(0);}}
#define CHECK_NOT_NULL(a) if((a)==NULL){fprintf(stderr,"\n %s \n line: %d \nerror at %s\n",__FILE__,__LINE__,#a );return(-1);}
/********************/
/* Global variables */
/********************/
FILE *in_file;
FILE *out_file;
FILE *dat_file;
struct arr_time {
float time;
WebRtc_UWord32 ix;
};
int filelen(FILE *fid)
{
fpos_t cur_pos;
int len;
if (!fid || fgetpos(fid, &cur_pos)) {
return(-1);
}
fseek(fid, 0, SEEK_END);
len = ftell(fid);
fsetpos(fid, &cur_pos);
return (len);
}
int compare_arr_time(const void *x, const void *y);
int main(int argc, char* argv[])
{
unsigned int dat_len, rtp_len, Npack, k;
arr_time *time_vec;
char firstline[FIRSTLINELEN];
unsigned char *rtp_vec, **packet_ptr, *temp_packet;
WebRtc_UWord16 len;
WebRtc_UWord32 *offset;
/* check number of parameters */
if (argc != 4) {
/* print help text and exit */
printf("Apply jitter on RTP stream.\n");
printf("The program reads an RTP stream and packet timing from two files.\n");
printf("The RTP stream is modified to have the same jitter as described in the timing files.\n");
printf("The format of the RTP stream file should be the same as for rtpplay,\n");
printf("and can be obtained e.g., from Ethereal by using\n");
printf("Statistics -> RTP -> Show All Streams -> [select a stream] -> Save As\n\n");
printf("Usage:\n\n");
printf("%s RTP_infile dat_file RTP_outfile\n", argv[0]);
printf("where:\n");
printf("RTP_infile : RTP stream input file\n\n");
printf("dat_file : file with packet arrival times in ms\n\n");
printf("RTP_outfile : RTP stream output file\n\n");
return(0);
}
in_file=fopen(argv[1],"rb");
CHECK_NOT_NULL(in_file);
printf("Input file: %s\n",argv[1]);
dat_file=fopen(argv[2],"rb");
CHECK_NOT_NULL(dat_file);
printf("Dat-file: %s\n",argv[2]);
out_file=fopen(argv[3],"wb");
CHECK_NOT_NULL(out_file);
printf("Output file: %s\n\n",argv[3]);
time_vec = (arr_time *) malloc(sizeof(arr_time)*(filelen(dat_file)/sizeof(float)) + 1000); // add 1000 bytes to avoid (rare) strange error
if (time_vec==NULL) {
fprintf(stderr, "Error: could not allocate memory for reading dat file\n");
goto closing;
}
dat_len=0;
while(fread(&(time_vec[dat_len].time),sizeof(float),1,dat_file)>0) {
time_vec[dat_len].ix=dat_len;
dat_len++;
}
qsort(time_vec,dat_len,sizeof(arr_time),compare_arr_time);
rtp_vec = (unsigned char *) malloc(sizeof(unsigned char)*filelen(in_file));
if (rtp_vec==NULL) {
fprintf(stderr,"Error: could not allocate memory for reading rtp file\n");
goto closing;
}
// read file header and write directly to output file
fgets(firstline, FIRSTLINELEN, in_file);
fputs(firstline, out_file);
fread(firstline, 4+4+4+2+2, 1, in_file); // start_sec + start_usec + source + port + padding
fwrite(firstline, 4+4+4+2+2, 1, out_file);
// read all RTP packets into vector
rtp_len=0;
Npack=0;
len=(WebRtc_UWord16) fread(&rtp_vec[rtp_len], sizeof(unsigned char), 2, in_file); // read length of first packet
while(len==2) {
len = ntohs(*((WebRtc_UWord16 *)(rtp_vec + rtp_len)));
rtp_len += 2;
if(fread(&rtp_vec[rtp_len], sizeof(unsigned char), len-2, in_file)!=(unsigned) (len-2)) {
fprintf(stderr,"Error: currupt packet length\n");
goto closing;
}
rtp_len += len-2;
Npack++;
len=(WebRtc_UWord16) fread(&rtp_vec[rtp_len], sizeof(unsigned char), 2, in_file); // read length of next packet
}
packet_ptr = (unsigned char **) malloc(Npack*sizeof(unsigned char*));
packet_ptr[0]=rtp_vec;
k=1;
while(k<Npack) {
len = ntohs(*((WebRtc_UWord16 *) packet_ptr[k-1]));
packet_ptr[k]=packet_ptr[k-1]+len;
k++;
}
for(k=0; k<dat_len && k<Npack; k++) {
if(time_vec[k].time < FLT_MAX && time_vec[k].ix < Npack){
temp_packet = packet_ptr[time_vec[k].ix];
offset = (WebRtc_UWord32 *) (temp_packet+4);
if ( time_vec[k].time >= 0 ) {
*offset = htonl((WebRtc_UWord32) time_vec[k].time);
}
else {
*offset = htonl((WebRtc_UWord32) 0);
fprintf(stderr, "Warning: negative receive time in dat file transformed to 0.\n");
}
// write packet to file
fwrite(temp_packet, sizeof(unsigned char), ntohs(*((WebRtc_UWord16*) temp_packet)), out_file);
}
}
closing:
free(time_vec);
free(rtp_vec);
fclose(in_file);
fclose(dat_file);
fclose(out_file);
return(0);
}
int compare_arr_time(const void *xp, const void *yp) {
if(((arr_time *)xp)->time == ((arr_time *)yp)->time)
return(0);
else if(((arr_time *)xp)->time > ((arr_time *)yp)->time)
return(1);
return(-1);
}

View File

@ -1,95 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <algorithm>
#include <stdio.h>
#include <vector>
#include "NETEQTEST_RTPpacket.h"
/*********************/
/* Misc. definitions */
/*********************/
#define FIRSTLINELEN 40
int main(int argc, char* argv[])
{
if(argc < 4 || argc > 6)
{
printf("Usage: RTPtimeshift in.rtp out.rtp newStartTS [newStartSN [newStartArrTime]]\n");
exit(1);
}
FILE *inFile=fopen(argv[1],"rb");
if (!inFile)
{
printf("Cannot open input file %s\n", argv[1]);
return(-1);
}
printf("Input RTP file: %s\n",argv[1]);
FILE *outFile=fopen(argv[2],"wb");
if (!outFile)
{
printf("Cannot open output file %s\n", argv[2]);
return(-1);
}
printf("Output RTP file: %s\n\n",argv[2]);
// read file header and write directly to output file
char firstline[FIRSTLINELEN];
fgets(firstline, FIRSTLINELEN, inFile);
fputs(firstline, outFile);
fread(firstline, 4+4+4+2+2, 1, inFile); // start_sec + start_usec + source + port + padding
fwrite(firstline, 4+4+4+2+2, 1, outFile);
NETEQTEST_RTPpacket packet;
int packLen = packet.readFromFile(inFile);
if (packLen < 0)
{
exit(1);
}
// get new start TS and start SeqNo from arguments
WebRtc_UWord32 TSdiff = atoi(argv[3]) - packet.timeStamp();
WebRtc_UWord16 SNdiff = 0;
WebRtc_UWord32 ATdiff = 0;
if (argc > 4)
{
if (argv[4] >= 0)
SNdiff = atoi(argv[4]) - packet.sequenceNumber();
if (argc > 5)
{
if (argv[5] >= 0)
ATdiff = atoi(argv[5]) - packet.time();
}
}
while (packLen >= 0)
{
packet.setTimeStamp(packet.timeStamp() + TSdiff);
packet.setSequenceNumber(packet.sequenceNumber() + SNdiff);
packet.setTime(packet.time() + ATdiff);
packet.writeToFile(outFile);
packLen = packet.readFromFile(inFile);
}
fclose(inFile);
fclose(outFile);
return 0;
}

View File

@ -1,191 +0,0 @@
function outStruct = parse_delay_file(file)
fid = fopen(file, 'rb');
if fid == -1
error('Cannot open file %s', file);
end
textline = fgetl(fid);
if ~strncmp(textline, '#!NetEQ_Delay_Logging', 21)
error('Wrong file format');
end
ver = sscanf(textline, '#!NetEQ_Delay_Logging%d.%d');
if ~all(ver == [2; 0])
error('Wrong version of delay logging function')
end
start_pos = ftell(fid);
fseek(fid, -12, 'eof');
textline = fgetl(fid);
if ~strncmp(textline, 'End of file', 21)
error('File ending is not correct. Seems like the simulation ended abnormally.');
end
fseek(fid,-12-4, 'eof');
Npackets = fread(fid, 1, 'int32');
fseek(fid, start_pos, 'bof');
rtpts = zeros(Npackets, 1);
seqno = zeros(Npackets, 1);
pt = zeros(Npackets, 1);
plen = zeros(Npackets, 1);
recin_t = nan*ones(Npackets, 1);
decode_t = nan*ones(Npackets, 1);
playout_delay = zeros(Npackets, 1);
optbuf = zeros(Npackets, 1);
fs_ix = 1;
clock = 0;
ts_ix = 1;
ended = 0;
late_packets = 0;
fs_now = 8000;
last_decode_k = 0;
tot_expand = 0;
tot_accelerate = 0;
tot_preemptive = 0;
while not(ended)
signal = fread(fid, 1, '*int32');
switch signal
case 3 % NETEQ_DELAY_LOGGING_SIGNAL_CLOCK
clock = fread(fid, 1, '*float32');
% keep on reading batches of M until the signal is no longer "3"
% read int32 + float32 in one go
% this is to save execution time
temp = [3; 0];
M = 120;
while all(temp(1,:) == 3)
fp = ftell(fid);
temp = fread(fid, [2 M], '*int32');
end
% back up to last clock event
fseek(fid, fp - ftell(fid) + ...
(find(temp(1,:) ~= 3, 1 ) - 2) * 2 * 4 + 4, 'cof');
% read the last clock value
clock = fread(fid, 1, '*float32');
case 1 % NETEQ_DELAY_LOGGING_SIGNAL_RECIN
temp_ts = fread(fid, 1, 'uint32');
if late_packets > 0
temp_ix = ts_ix - 1;
while (temp_ix >= 1) && (rtpts(temp_ix) ~= temp_ts)
% TODO(hlundin): use matlab vector search instead?
temp_ix = temp_ix - 1;
end
if temp_ix >= 1
% the ts was found in the vector
late_packets = late_packets - 1;
else
temp_ix = ts_ix;
ts_ix = ts_ix + 1;
end
else
temp_ix = ts_ix;
ts_ix = ts_ix + 1;
end
rtpts(temp_ix) = temp_ts;
seqno(temp_ix) = fread(fid, 1, 'uint16');
pt(temp_ix) = fread(fid, 1, 'int32');
plen(temp_ix) = fread(fid, 1, 'int16');
recin_t(temp_ix) = clock;
case 2 % NETEQ_DELAY_LOGGING_SIGNAL_FLUSH
% do nothing
case 4 % NETEQ_DELAY_LOGGING_SIGNAL_EOF
ended = 1;
case 5 % NETEQ_DELAY_LOGGING_SIGNAL_DECODE
last_decode_ts = fread(fid, 1, 'uint32');
temp_delay = fread(fid, 1, 'uint16');
k = find(rtpts(1:(ts_ix - 1))==last_decode_ts,1,'last');
if ~isempty(k)
decode_t(k) = clock;
playout_delay(k) = temp_delay + ...
5 * fs_now / 8000; % add overlap length
last_decode_k = k;
end
case 6 % NETEQ_DELAY_LOGGING_SIGNAL_CHANGE_FS
fsvec(fs_ix) = fread(fid, 1, 'uint16');
fschange_ts(fs_ix) = last_decode_ts;
fs_now = fsvec(fs_ix);
fs_ix = fs_ix + 1;
case 7 % NETEQ_DELAY_LOGGING_SIGNAL_MERGE_INFO
playout_delay(last_decode_k) = playout_delay(last_decode_k) ...
+ fread(fid, 1, 'int32');
case 8 % NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO
temp = fread(fid, 1, 'int32');
if last_decode_k ~= 0
tot_expand = tot_expand + temp / (fs_now / 1000);
end
case 9 % NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO
temp = fread(fid, 1, 'int32');
if last_decode_k ~= 0
tot_accelerate = tot_accelerate + temp / (fs_now / 1000);
end
case 10 % NETEQ_DELAY_LOGGING_SIGNAL_PREEMPTIVE_INFO
temp = fread(fid, 1, 'int32');
if last_decode_k ~= 0
tot_preemptive = tot_preemptive + temp / (fs_now / 1000);
end
case 11 % NETEQ_DELAY_LOGGING_SIGNAL_OPTBUF
optbuf(last_decode_k) = fread(fid, 1, 'int32');
case 12 % NETEQ_DELAY_LOGGING_SIGNAL_DECODE_ONE_DESC
last_decode_ts = fread(fid, 1, 'uint32');
k = ts_ix - 1;
while (k >= 1) && (rtpts(k) ~= last_decode_ts)
% TODO(hlundin): use matlab vector search instead?
k = k - 1;
end
if k < 1
% packet not received yet
k = ts_ix;
rtpts(ts_ix) = last_decode_ts;
late_packets = late_packets + 1;
end
decode_t(k) = clock;
playout_delay(k) = fread(fid, 1, 'uint16') + ...
5 * fs_now / 8000; % add overlap length
last_decode_k = k;
end
end
fclose(fid);
outStruct = struct(...
'ts', rtpts, ...
'sn', seqno, ...
'pt', pt,...
'plen', plen,...
'arrival', recin_t,...
'decode', decode_t,...
'fs', fsvec(:),...
'fschange_ts', fschange_ts(:),...
'playout_delay', playout_delay,...
'tot_expand', tot_expand,...
'tot_accelerate', tot_accelerate,...
'tot_preemptive', tot_preemptive,...
'optbuf', optbuf);

View File

@ -1,187 +0,0 @@
function [delay_struct, delayvalues] = plot_neteq_delay(delayfile, varargin)
% InfoStruct = plot_neteq_delay(delayfile)
% InfoStruct = plot_neteq_delay(delayfile, 'skipdelay', skip_seconds)
%
% Henrik Lundin, 2006-11-17
% Henrik Lundin, 2011-05-17
%
try
s = parse_delay_file(delayfile);
catch
error(lasterr);
end
delayskip=0;
noplot=0;
arg_ptr=1;
delaypoints=[];
s.sn=unwrap_seqno(s.sn);
while arg_ptr+1 <= nargin
switch lower(varargin{arg_ptr})
case {'skipdelay', 'delayskip'}
% skip a number of seconds in the beginning when calculating delays
delayskip = varargin{arg_ptr+1};
arg_ptr = arg_ptr + 2;
case 'noplot'
noplot=1;
arg_ptr = arg_ptr + 1;
case {'get_delay', 'getdelay'}
% return a vector of delay values for the points in the given vector
delaypoints = varargin{arg_ptr+1};
arg_ptr = arg_ptr + 2;
otherwise
warning('Unknown switch %s\n', varargin{arg_ptr});
arg_ptr = arg_ptr + 1;
end
end
% find lost frames that were covered by one-descriptor decoding
one_desc_ix=find(isnan(s.arrival));
for k=1:length(one_desc_ix)
ix=find(s.ts==max(s.ts(s.ts(one_desc_ix(k))>s.ts)));
s.sn(one_desc_ix(k))=s.sn(ix)+1;
s.pt(one_desc_ix(k))=s.pt(ix);
s.arrival(one_desc_ix(k))=s.arrival(ix)+s.decode(one_desc_ix(k))-s.decode(ix);
end
% remove duplicate received frames that were never decoded (RED codec)
if length(unique(s.ts(isfinite(s.ts)))) < length(s.ts(isfinite(s.ts)))
ix=find(isfinite(s.decode));
s.sn=s.sn(ix);
s.ts=s.ts(ix);
s.arrival=s.arrival(ix);
s.playout_delay=s.playout_delay(ix);
s.pt=s.pt(ix);
s.optbuf=s.optbuf(ix);
plen=plen(ix);
s.decode=s.decode(ix);
end
% find non-unique sequence numbers
[~,un_ix]=unique(s.sn);
nonun_ix=setdiff(1:length(s.sn),un_ix);
if ~isempty(nonun_ix)
warning('RTP sequence numbers are in error');
end
% sort vectors
[s.sn,sort_ix]=sort(s.sn);
s.ts=s.ts(sort_ix);
s.arrival=s.arrival(sort_ix);
s.decode=s.decode(sort_ix);
s.playout_delay=s.playout_delay(sort_ix);
s.pt=s.pt(sort_ix);
send_t=s.ts-s.ts(1);
if length(s.fs)<1
warning('No info about sample rate found in file. Using default 8000.');
s.fs(1)=8000;
s.fschange_ts(1)=min(s.ts);
elseif s.fschange_ts(1)>min(s.ts)
s.fschange_ts(1)=min(s.ts);
end
end_ix=length(send_t);
for k=length(s.fs):-1:1
start_ix=find(s.ts==s.fschange_ts(k));
send_t(start_ix:end_ix)=send_t(start_ix:end_ix)/s.fs(k)*1000;
s.playout_delay(start_ix:end_ix)=s.playout_delay(start_ix:end_ix)/s.fs(k)*1000;
s.optbuf(start_ix:end_ix)=s.optbuf(start_ix:end_ix)/s.fs(k)*1000;
end_ix=start_ix-1;
end
tot_time=max(send_t)-min(send_t);
seq_ix=s.sn-min(s.sn)+1;
send_t=send_t+max(min(s.arrival-send_t),0);
plot_send_t=nan*ones(max(seq_ix),1);
plot_send_t(seq_ix)=send_t;
plot_nw_delay=nan*ones(max(seq_ix),1);
plot_nw_delay(seq_ix)=s.arrival-send_t;
cng_ix=find(s.pt~=13); % find those packets that are not CNG/SID
if noplot==0
h=plot(plot_send_t/1000,plot_nw_delay);
set(h,'color',0.75*[1 1 1]);
hold on
if any(s.optbuf~=0)
peak_ix=find(s.optbuf(cng_ix)<0); % peak mode is labeled with negative values
no_peak_ix=find(s.optbuf(cng_ix)>0); %setdiff(1:length(cng_ix),peak_ix);
h1=plot(send_t(cng_ix(peak_ix))/1000,...
s.arrival(cng_ix(peak_ix))+abs(s.optbuf(cng_ix(peak_ix)))-send_t(cng_ix(peak_ix)),...
'r.');
h2=plot(send_t(cng_ix(no_peak_ix))/1000,...
s.arrival(cng_ix(no_peak_ix))+abs(s.optbuf(cng_ix(no_peak_ix)))-send_t(cng_ix(no_peak_ix)),...
'g.');
set([h1, h2],'markersize',1)
end
%h=plot(send_t(seq_ix)/1000,s.decode+s.playout_delay-send_t(seq_ix));
h=plot(send_t(cng_ix)/1000,s.decode(cng_ix)+s.playout_delay(cng_ix)-send_t(cng_ix));
set(h,'linew',1.5);
hold off
ax1=axis;
axis tight
ax2=axis;
axis([ax2(1:3) ax1(4)])
end
% calculate delays and other parameters
delayskip_ix = find(send_t-send_t(1)>=delayskip*1000, 1 );
use_ix = intersect(cng_ix,... % use those that are not CNG/SID frames...
intersect(find(isfinite(s.decode)),... % ... that did arrive ...
(delayskip_ix:length(s.decode))')); % ... and are sent after delayskip seconds
mean_delay = mean(s.decode(use_ix)+s.playout_delay(use_ix)-send_t(use_ix));
neteq_delay = mean(s.decode(use_ix)+s.playout_delay(use_ix)-s.arrival(use_ix));
Npack=max(s.sn(delayskip_ix:end))-min(s.sn(delayskip_ix:end))+1;
nw_lossrate=(Npack-length(s.sn(delayskip_ix:end)))/Npack;
neteq_lossrate=(length(s.sn(delayskip_ix:end))-length(use_ix))/Npack;
delay_struct=struct('mean_delay',mean_delay,'neteq_delay',neteq_delay,...
'nw_lossrate',nw_lossrate,'neteq_lossrate',neteq_lossrate,...
'tot_expand',round(s.tot_expand),'tot_accelerate',round(s.tot_accelerate),...
'tot_preemptive',round(s.tot_preemptive),'tot_time',tot_time,...
'filename',delayfile,'units','ms','fs',unique(s.fs));
if not(isempty(delaypoints))
delayvalues=interp1(send_t(cng_ix),...
s.decode(cng_ix)+s.playout_delay(cng_ix)-send_t(cng_ix),...
delaypoints,'nearest',NaN);
else
delayvalues=[];
end
% SUBFUNCTIONS %
function y=unwrap_seqno(x)
jumps=find(abs((diff(x)-1))>65000);
while ~isempty(jumps)
n=jumps(1);
if x(n+1)-x(n) < 0
% negative jump
x(n+1:end)=x(n+1:end)+65536;
else
% positive jump
x(n+1:end)=x(n+1:end)-65536;
end
jumps=find(abs((diff(x(n+1:end))-1))>65000);
end
y=x;
return;

View File

@ -1,47 +0,0 @@
pcmu 0
gsmfr 3
g723 4
dvi4 125
pcma 8
g722 9
cn 13
g729 18
ipcmwb 97
speex8 98
speex16 99
eg711u 100
eg711a 101
ilbc 102
isac 103
isaclc 119
isacswb 104
avt 106
g722_1_16 108
g722_1_24 109
g722_1_32 110
g722_1c_24 84
g722_1c_32 85
g722_1c_48 86
//sc3 111
amr 112
gsmefr 113
g726_16 115
g726_24 116
g726_32 121
red 117
g726_40 118
cn_wb 105
cn_swb32 126
g729_1 107
//g729d 123
amrwb 120
pcm16b 93
pcm16b_wb 94
pcm16b_swb32khz 95
//pcm16b_swb48khz 96
//mpeg4aac 122
silk8 80
silk12 81
silk16 82
silk24 83
melpe 124

View File

@ -1,236 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INTERFACE_WEBRTC_CNG_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INTERFACE_WEBRTC_CNG_H_
#include "typedefs.h"
#ifdef __cplusplus
extern "C" {
#endif
#define WEBRTC_CNG_MAX_LPC_ORDER 12
#define WEBRTC_CNG_MAX_OUTSIZE_ORDER 640
/* Define Error codes */
/* 6100 Encoder */
#define CNG_ENCODER_MEMORY_ALLOCATION_FAILED 6110
#define CNG_ENCODER_NOT_INITIATED 6120
#define CNG_DISALLOWED_LPC_ORDER 6130
#define CNG_DISALLOWED_FRAME_SIZE 6140
#define CNG_DISALLOWED_SAMPLING_FREQUENCY 6150
/* 6200 Decoder */
#define CNG_DECODER_MEMORY_ALLOCATION_FAILED 6210
#define CNG_DECODER_NOT_INITIATED 6220
typedef struct WebRtcCngEncInst CNG_enc_inst;
typedef struct WebRtcCngDecInst CNG_dec_inst;
/****************************************************************************
* WebRtcCng_Version(...)
*
* These functions returns the version name (string must be at least
* 500 characters long)
*
* Output:
* - version : Pointer to character string
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_Version(WebRtc_Word8 *version);
/****************************************************************************
* WebRtcCng_AssignSizeEnc/Dec(...)
*
* These functions get the size needed for storing the instance for encoder
* and decoder, respectively
*
* Input/Output:
* - sizeinbytes : Pointer to integer where the size is returned
*
* Return value : 0
*/
WebRtc_Word16 WebRtcCng_AssignSizeEnc(int *sizeinbytes);
WebRtc_Word16 WebRtcCng_AssignSizeDec(int *sizeinbytes);
/****************************************************************************
* WebRtcCng_AssignEnc/Dec(...)
*
* These functions Assignes memory for the instances.
*
* Input:
* - CNG_inst_Addr : Adress to where to assign memory
* Output:
* - inst : Pointer to the instance that should be created
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_AssignEnc(CNG_enc_inst **inst, void *CNG_inst_Addr);
WebRtc_Word16 WebRtcCng_AssignDec(CNG_dec_inst **inst, void *CNG_inst_Addr);
/****************************************************************************
* WebRtcCng_CreateEnc/Dec(...)
*
* These functions create an instance to the specified structure
*
* Input:
* - XXX_inst : Pointer to created instance that should be created
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_CreateEnc(CNG_enc_inst **cng_inst);
WebRtc_Word16 WebRtcCng_CreateDec(CNG_dec_inst **cng_inst);
/****************************************************************************
* WebRtcCng_InitEnc/Dec(...)
*
* This function initializes a instance
*
* Input:
* - cng_inst : Instance that should be initialized
*
* - fs : 8000 for narrowband and 16000 for wideband
* - interval : generate SID data every interval ms
* - quality : Number of refl. coefs, maximum allowed is 12
*
* Output:
* - cng_inst : Initialized instance
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_InitEnc(CNG_enc_inst *cng_inst,
WebRtc_Word16 fs,
WebRtc_Word16 interval,
WebRtc_Word16 quality);
WebRtc_Word16 WebRtcCng_InitDec(CNG_dec_inst *cng_dec_inst);
/****************************************************************************
* WebRtcCng_FreeEnc/Dec(...)
*
* These functions frees the dynamic memory of a specified instance
*
* Input:
* - cng_inst : Pointer to created instance that should be freed
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_FreeEnc(CNG_enc_inst *cng_inst);
WebRtc_Word16 WebRtcCng_FreeDec(CNG_dec_inst *cng_inst);
/****************************************************************************
* WebRtcCng_Encode(...)
*
* These functions analyzes background noise
*
* Input:
* - cng_inst : Pointer to created instance
* - speech : Signal to be analyzed
* - nrOfSamples : Size of speech vector
* - forceSID : not zero to force SID frame and reset
*
* Output:
* - bytesOut : Nr of bytes to transmit, might be 0
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_Encode(CNG_enc_inst *cng_inst,
WebRtc_Word16 *speech,
WebRtc_Word16 nrOfSamples,
WebRtc_UWord8* SIDdata,
WebRtc_Word16 *bytesOut,
WebRtc_Word16 forceSID);
/****************************************************************************
* WebRtcCng_UpdateSid(...)
*
* These functions updates the CN state, when a new SID packet arrives
*
* Input:
* - cng_inst : Pointer to created instance that should be freed
* - SID : SID packet, all headers removed
* - length : Length in bytes of SID packet
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_UpdateSid(CNG_dec_inst *cng_inst,
WebRtc_UWord8 *SID,
WebRtc_Word16 length);
/****************************************************************************
* WebRtcCng_Generate(...)
*
* These functions generates CN data when needed
*
* Input:
* - cng_inst : Pointer to created instance that should be freed
* - outData : pointer to area to write CN data
* - nrOfSamples : How much data to generate
* - new_period : >0 if a new period of CNG, will reset history
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_Generate(CNG_dec_inst *cng_inst,
WebRtc_Word16 * outData,
WebRtc_Word16 nrOfSamples,
WebRtc_Word16 new_period);
/*****************************************************************************
* WebRtcCng_GetErrorCodeEnc/Dec(...)
*
* This functions can be used to check the error code of a CNG instance. When
* a function returns -1 a error code will be set for that instance. The
* function below extract the code of the last error that occurred in the
* specified instance.
*
* Input:
* - CNG_inst : CNG enc/dec instance
*
* Return value : Error code
*/
WebRtc_Word16 WebRtcCng_GetErrorCodeEnc(CNG_enc_inst *cng_inst);
WebRtc_Word16 WebRtcCng_GetErrorCodeDec(CNG_dec_inst *cng_inst);
#ifdef __cplusplus
}
#endif
#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INTERFACE_WEBRTC_CNG_H_

View File

@ -1,51 +0,0 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_ARM_MODE := arm
LOCAL_MODULE_CLASS := STATIC_LIBRARIES
LOCAL_MODULE := libwebrtc_cng
LOCAL_MODULE_TAGS := optional
LOCAL_GENERATED_SOURCES :=
LOCAL_SRC_FILES := webrtc_cng.c \
cng_helpfuns.c
# Flags passed to both C and C++ files.
MY_CFLAGS :=
MY_CFLAGS_C :=
MY_DEFS := '-DNO_TCMALLOC' \
'-DNO_HEAPCHECKER' \
'-DWEBRTC_TARGET_PC' \
'-DWEBRTC_LINUX' \
'-DWEBRTC_ANDROID' \
'-DANDROID'
LOCAL_CFLAGS := $(MY_CFLAGS_C) $(MY_CFLAGS) $(MY_DEFS)
# Include paths placed before CFLAGS/CPPFLAGS
LOCAL_C_INCLUDES := $(LOCAL_PATH)/../../../../../.. \
$(LOCAL_PATH)/../interface \
$(LOCAL_PATH)/../../../../../../common_audio/signal_processing_library/main/interface
# Flags passed to only C++ (and not C) files.
LOCAL_CPPFLAGS :=
LOCAL_LDFLAGS :=
LOCAL_STATIC_LIBRARIES :=
LOCAL_SHARED_LIBRARIES := libdl \
libstlport
LOCAL_ADDITIONAL_DEPENDENCIES :=
ifneq ($(MY_WEBRTC_NDK_BUILD),true)
include external/stlport/libstlport.mk
include $(BUILD_STATIC_LIBRARY)
endif

View File

@ -1,42 +0,0 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'includes': [
'../../../../../../common_settings.gypi', # Common settings
],
'targets': [
{
'target_name': 'CNG',
'type': '<(library)',
'dependencies': [
'../../../../../../common_audio/signal_processing_library/main/source/spl.gyp:spl',
],
'include_dirs': [
'../interface',
],
'direct_dependent_settings': {
'include_dirs': [
'../interface',
],
},
'sources': [
'../interface/webrtc_cng.h',
'webrtc_cng.c',
'cng_helpfuns.c',
'cng_helpfuns.h',
],
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:

View File

@ -1,64 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc_cng.h"
#include "signal_processing_library.h"
#include "typedefs.h"
#include "cng_helpfuns.h"
#ifdef __cplusplus
extern "C" {
#endif
void WebRtcCng_K2a16(
WebRtc_Word16 *k, /* Q15. */
int useOrder,
WebRtc_Word16 *a /* Q12. */
)
{
WebRtc_Word16 any[WEBRTC_SPL_MAX_LPC_ORDER+1];
WebRtc_Word16 *aptr, *aptr2, *anyptr;
G_CONST WebRtc_Word16 *kptr;
int m, i;
kptr = k;
*a = 4096; /* i.e., (Word16_MAX >> 3)+1 */
*any = *a;
a[1] = (*k+4) >> 3;
for( m=1; m<useOrder; m++ )
{
kptr++;
aptr = a;
aptr++;
aptr2 = &a[m];
anyptr = any;
anyptr++;
any[m+1] = (*kptr+4) >> 3;
for( i=0; i<m; i++ ) {
*anyptr++ = (*aptr++) + (WebRtc_Word16)( (( (WebRtc_Word32)(*aptr2--) * (WebRtc_Word32)*kptr )+16384) >> 15);
}
aptr = a;
anyptr = any;
for( i=0; i<(m+2); i++ ){
*aptr++ = *anyptr++;
}
}
}
#ifdef __cplusplus
}
#endif

View File

@ -1,28 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_SOURCE_CNG_HELPFUNS_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_SOURCE_CNG_HELPFUNS_H_
extern WebRtc_Word32 lpc_lagwinTbl_fixw32[WEBRTC_CNG_MAX_LPC_ORDER + 1];
#ifdef __cplusplus
extern "C" {
#endif
void WebRtcCng_K2a16(WebRtc_Word16 *k, int useOrder, WebRtc_Word16 *a);
#ifdef __cplusplus
}
#endif
#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_SOURCE_CNG_HELPFUNS_H_

View File

@ -1,735 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <string.h>
#include <stdlib.h>
#include "webrtc_cng.h"
#include "signal_processing_library.h"
#include "cng_helpfuns.h"
#include "stdio.h"
typedef struct WebRtcCngDecInst_t_ {
WebRtc_UWord32 dec_seed;
WebRtc_Word32 dec_target_energy;
WebRtc_Word32 dec_used_energy;
WebRtc_Word16 dec_target_reflCoefs[WEBRTC_CNG_MAX_LPC_ORDER+1];
WebRtc_Word16 dec_used_reflCoefs[WEBRTC_CNG_MAX_LPC_ORDER+1];
WebRtc_Word16 dec_filtstate[WEBRTC_CNG_MAX_LPC_ORDER+1];
WebRtc_Word16 dec_filtstateLow[WEBRTC_CNG_MAX_LPC_ORDER+1];
WebRtc_Word16 dec_Efiltstate[WEBRTC_CNG_MAX_LPC_ORDER+1];
WebRtc_Word16 dec_EfiltstateLow[WEBRTC_CNG_MAX_LPC_ORDER+1];
WebRtc_Word16 dec_order;
WebRtc_Word16 dec_target_scale_factor; /*Q29*/
WebRtc_Word16 dec_used_scale_factor; /*Q29*/
WebRtc_Word16 target_scale_factor; /* Q13 */
WebRtc_Word16 errorcode;
WebRtc_Word16 initflag;
} WebRtcCngDecInst_t;
typedef struct WebRtcCngEncInst_t_ {
WebRtc_Word16 enc_nrOfCoefs;
WebRtc_Word16 enc_sampfreq;
WebRtc_Word16 enc_interval;
WebRtc_Word16 enc_msSinceSID;
WebRtc_Word32 enc_Energy;
WebRtc_Word16 enc_reflCoefs[WEBRTC_CNG_MAX_LPC_ORDER+1];
WebRtc_Word32 enc_corrVector[WEBRTC_CNG_MAX_LPC_ORDER+1];
WebRtc_Word16 enc_filtstate[WEBRTC_CNG_MAX_LPC_ORDER+1];
WebRtc_Word16 enc_filtstateLow[WEBRTC_CNG_MAX_LPC_ORDER+1];
WebRtc_UWord32 enc_seed;
WebRtc_Word16 errorcode;
WebRtc_Word16 initflag;
} WebRtcCngEncInst_t;
const WebRtc_Word32 WebRtcCng_kDbov[94]={
1081109975, 858756178, 682134279, 541838517, 430397633, 341876992,
271562548, 215709799, 171344384, 136103682, 108110997, 85875618,
68213428, 54183852, 43039763, 34187699, 27156255, 21570980,
17134438, 13610368, 10811100, 8587562, 6821343, 5418385,
4303976, 3418770, 2715625, 2157098, 1713444, 1361037,
1081110, 858756, 682134, 541839, 430398, 341877,
271563, 215710, 171344, 136104, 108111, 85876,
68213, 54184, 43040, 34188, 27156, 21571,
17134, 13610, 10811, 8588, 6821, 5418,
4304, 3419, 2716, 2157, 1713, 1361,
1081, 859, 682, 542, 430, 342,
272, 216, 171, 136, 108, 86,
68, 54, 43, 34, 27, 22,
17, 14, 11, 9, 7, 5,
4, 3, 3, 2, 2, 1,
1, 1, 1, 1
};
const WebRtc_Word16 WebRtcCng_kCorrWindow[WEBRTC_CNG_MAX_LPC_ORDER] = {
32702, 32636, 32570, 32505, 32439, 32374,
32309, 32244, 32179, 32114, 32049, 31985
};
/****************************************************************************
* WebRtcCng_Version(...)
*
* These functions returns the version name (string must be at least
* 500 characters long)
*
* Output:
* - version : Pointer to character string
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_Version(WebRtc_Word8 *version)
{
strcpy((char*)version,(const char*)"1.2.0\n");
return(0);
}
/****************************************************************************
* WebRtcCng_AssignSizeEnc/Dec(...)
*
* These functions get the size needed for storing the instance for encoder
* and decoder, respectively
*
* Input/Output:
* - sizeinbytes : Pointer to integer where the size is returned
*
* Return value : 0
*/
WebRtc_Word16 WebRtcCng_AssignSizeEnc(int *sizeinbytes)
{
*sizeinbytes=sizeof(WebRtcCngEncInst_t)*2/sizeof(WebRtc_Word16);
return(0);
}
WebRtc_Word16 WebRtcCng_AssignSizeDec(int *sizeinbytes)
{
*sizeinbytes=sizeof(WebRtcCngDecInst_t)*2/sizeof(WebRtc_Word16);
return(0);
}
/****************************************************************************
* WebRtcCng_AssignEnc/Dec(...)
*
* These functions Assignes memory for the instances.
*
* Input:
* - CNG_inst_Addr : Adress to where to assign memory
* Output:
* - inst : Pointer to the instance that should be created
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_AssignEnc(CNG_enc_inst **inst, void *CNG_inst_Addr)
{
if (CNG_inst_Addr!=NULL) {
*inst = (CNG_enc_inst*)CNG_inst_Addr;
(*(WebRtcCngEncInst_t**) inst)->errorcode = 0;
(*(WebRtcCngEncInst_t**) inst)->initflag = 0;
return(0);
} else {
/* The memory could not be allocated */
return(-1);
}
}
WebRtc_Word16 WebRtcCng_AssignDec(CNG_dec_inst **inst, void *CNG_inst_Addr)
{
if (CNG_inst_Addr!=NULL) {
*inst = (CNG_dec_inst*)CNG_inst_Addr;
(*(WebRtcCngDecInst_t**) inst)->errorcode = 0;
(*(WebRtcCngDecInst_t**) inst)->initflag = 0;
return(0);
} else {
/* The memory could not be allocated */
return(-1);
}
}
/****************************************************************************
* WebRtcCng_CreateEnc/Dec(...)
*
* These functions create an instance to the specified structure
*
* Input:
* - XXX_inst : Pointer to created instance that should be created
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_CreateEnc(CNG_enc_inst **cng_inst)
{
*cng_inst=(CNG_enc_inst*)malloc(sizeof(WebRtcCngEncInst_t));
if(cng_inst!=NULL) {
(*(WebRtcCngEncInst_t**) cng_inst)->errorcode = 0;
(*(WebRtcCngEncInst_t**) cng_inst)->initflag = 0;
return(0);
}
else {
/* The memory could not be allocated */
return(-1);
}
}
WebRtc_Word16 WebRtcCng_CreateDec(CNG_dec_inst **cng_inst)
{
*cng_inst=(CNG_dec_inst*)malloc(sizeof(WebRtcCngDecInst_t));
if(cng_inst!=NULL) {
(*(WebRtcCngDecInst_t**) cng_inst)->errorcode = 0;
(*(WebRtcCngDecInst_t**) cng_inst)->initflag = 0;
return(0);
}
else {
/* The memory could not be allocated */
return(-1);
}
}
/****************************************************************************
* WebRtcCng_InitEnc/Dec(...)
*
* This function initializes a instance
*
* Input:
* - cng_inst : Instance that should be initialized
*
* - fs : 8000 for narrowband and 16000 for wideband
* - interval : generate SID data every interval ms
* - quality : TBD
*
* Output:
* - cng_inst : Initialized instance
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_InitEnc(CNG_enc_inst *cng_inst,
WebRtc_Word16 fs,
WebRtc_Word16 interval,
WebRtc_Word16 quality)
{
int i;
WebRtcCngEncInst_t* inst=(WebRtcCngEncInst_t*)cng_inst;
memset(inst, 0, sizeof(WebRtcCngEncInst_t));
/* Check LPC order */
if (quality>WEBRTC_CNG_MAX_LPC_ORDER) {
inst->errorcode = CNG_DISALLOWED_LPC_ORDER;
return (-1);
}
if (fs<=0) {
inst->errorcode = CNG_DISALLOWED_SAMPLING_FREQUENCY;
return (-1);
}
inst->enc_sampfreq=fs;
inst->enc_interval=interval;
inst->enc_nrOfCoefs=quality;
inst->enc_msSinceSID=0;
inst->enc_seed=7777; /*For debugging only*/
inst->enc_Energy=0;
for(i=0;i<(WEBRTC_CNG_MAX_LPC_ORDER+1);i++){
inst->enc_reflCoefs[i]=0;
inst->enc_corrVector[i]=0;
}
inst->initflag=1;
return(0);
}
WebRtc_Word16 WebRtcCng_InitDec(CNG_dec_inst *cng_inst)
{
int i;
WebRtcCngDecInst_t* inst=(WebRtcCngDecInst_t*)cng_inst;
memset(inst, 0, sizeof(WebRtcCngDecInst_t));
inst->dec_seed=7777; /*For debugging only*/
inst->dec_order=5;
inst->dec_target_scale_factor=0;
inst->dec_used_scale_factor=0;
for(i=0;i<(WEBRTC_CNG_MAX_LPC_ORDER+1);i++){
inst->dec_filtstate[i]=0;
inst->dec_target_reflCoefs[i]=0;
inst->dec_used_reflCoefs[i]=0;
}
inst->dec_target_reflCoefs[0]=0;
inst->dec_used_reflCoefs[0]=0;
inst ->dec_used_energy=0;
inst->initflag=1;
return(0);
}
/****************************************************************************
* WebRtcCng_FreeEnc/Dec(...)
*
* These functions frees the dynamic memory of a specified instance
*
* Input:
* - cng_inst : Pointer to created instance that should be freed
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_FreeEnc(CNG_enc_inst *cng_inst)
{
free(cng_inst);
return(0);
}
WebRtc_Word16 WebRtcCng_FreeDec(CNG_dec_inst *cng_inst)
{
free(cng_inst);
return(0);
}
/****************************************************************************
* WebRtcCng_Encode(...)
*
* These functions analyzes background noise
*
* Input:
* - cng_inst : Pointer to created instance
* - speech : Signal (noise) to be analyzed
* - nrOfSamples : Size of speech vector
* - bytesOut : Nr of bytes to transmit, might be 0
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_Encode(CNG_enc_inst *cng_inst,
WebRtc_Word16 *speech,
WebRtc_Word16 nrOfSamples,
WebRtc_UWord8* SIDdata,
WebRtc_Word16* bytesOut,
WebRtc_Word16 forceSID)
{
WebRtcCngEncInst_t* inst=(WebRtcCngEncInst_t*)cng_inst;
WebRtc_Word16 arCoefs[WEBRTC_CNG_MAX_LPC_ORDER+1];
WebRtc_Word32 corrVector[WEBRTC_CNG_MAX_LPC_ORDER+1];
WebRtc_Word16 refCs[WEBRTC_CNG_MAX_LPC_ORDER+1];
WebRtc_Word16 hanningW[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
WebRtc_Word16 ReflBeta=19661; /*0.6 in q15*/
WebRtc_Word16 ReflBetaComp=13107; /*0.4 in q15*/
WebRtc_Word32 outEnergy;
int outShifts;
int i, stab;
int acorrScale;
int index;
WebRtc_Word32 diff;
WebRtc_Word16 ind,factor;
WebRtc_Word32 *bptr, blo, bhi;
WebRtc_Word16 negate;
const WebRtc_Word16 *aptr;
WebRtc_Word16 speechBuf[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
/* check if encoder initiated */
if (inst->initflag != 1) {
inst->errorcode = CNG_ENCODER_NOT_INITIATED;
return (-1);
}
/* check framesize */
if (nrOfSamples>WEBRTC_CNG_MAX_OUTSIZE_ORDER) {
inst->errorcode = CNG_DISALLOWED_FRAME_SIZE;
return (-1);
}
for(i=0;i<nrOfSamples;i++){
speechBuf[i]=speech[i];
}
factor=nrOfSamples;
/* Calculate energy and a coefficients */
outEnergy =WebRtcSpl_Energy(speechBuf, nrOfSamples, &outShifts);
while(outShifts>0){
if(outShifts>5){ /*We can only do 5 shifts without destroying accuracy in division factor*/
outEnergy<<=(outShifts-5);
outShifts=5;
}
else{
factor/=2;
outShifts--;
}
}
outEnergy=WebRtcSpl_DivW32W16(outEnergy,factor);
if (outEnergy > 1){
/* Create Hanning Window */
WebRtcSpl_GetHanningWindow(hanningW, nrOfSamples/2);
for( i=0;i<(nrOfSamples/2);i++ )
hanningW[nrOfSamples-i-1]=hanningW[i];
WebRtcSpl_ElementwiseVectorMult(speechBuf, hanningW, speechBuf, nrOfSamples, 14);
WebRtcSpl_AutoCorrelation( speechBuf, nrOfSamples, inst->enc_nrOfCoefs, corrVector, &acorrScale );
if( *corrVector==0 )
*corrVector = WEBRTC_SPL_WORD16_MAX;
/* Adds the bandwidth expansion */
aptr = WebRtcCng_kCorrWindow;
bptr = corrVector;
// (zzz) lpc16_1 = 17+1+820+2+2 = 842 (ordo2=700)
for( ind=0; ind<inst->enc_nrOfCoefs; ind++ )
{
// The below code multiplies the 16 b corrWindow values (Q15) with
// the 32 b corrvector (Q0) and shifts the result down 15 steps.
negate = *bptr<0;
if( negate )
*bptr = -*bptr;
blo = (WebRtc_Word32)*aptr * (*bptr & 0xffff);
bhi = ((blo >> 16) & 0xffff) + ((WebRtc_Word32)(*aptr++) * ((*bptr >> 16) & 0xffff));
blo = (blo & 0xffff) | ((bhi & 0xffff) << 16);
*bptr = (( (bhi>>16) & 0x7fff) << 17) | ((WebRtc_UWord32)blo >> 15);
if( negate )
*bptr = -*bptr;
bptr++;
}
// end of bandwidth expansion
stab=WebRtcSpl_LevinsonDurbin(corrVector, arCoefs, refCs, inst->enc_nrOfCoefs);
if(!stab){
// disregard from this frame
*bytesOut=0;
return(0);
}
}
else {
for(i=0;i<inst->enc_nrOfCoefs; i++)
refCs[i]=0;
}
if(forceSID){
/*Read instantaneous values instead of averaged*/
for(i=0;i<inst->enc_nrOfCoefs;i++)
inst->enc_reflCoefs[i]=refCs[i];
inst->enc_Energy=outEnergy;
}
else{
/*Average history with new values*/
for(i=0;i<(inst->enc_nrOfCoefs);i++){
inst->enc_reflCoefs[i]=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(inst->enc_reflCoefs[i],ReflBeta,15);
inst->enc_reflCoefs[i]+=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(refCs[i],ReflBetaComp,15);
}
inst->enc_Energy=(outEnergy>>2)+(inst->enc_Energy>>1)+(inst->enc_Energy>>2);
}
if(inst->enc_Energy<1){
inst->enc_Energy=1;
}
if((inst->enc_msSinceSID>(inst->enc_interval-1))||forceSID){
/* Search for best dbov value */
/* Clumsy linear search that can be optimized since database is sorted */
index=0;
diff=WEBRTC_SPL_ABS_W32(inst->enc_Energy-WebRtcCng_kDbov[index]);
for(i=1;i<93;i++){
/* Always round downwards */
if((inst->enc_Energy-WebRtcCng_kDbov[i])>0){
index=i;
break;
}
}
if((i==93)&&(index==0))
index=94;
SIDdata[0]=index;
/* Quantize coefs with tweak for WebRtc implementation of RFC3389 */
if(inst->enc_nrOfCoefs==WEBRTC_CNG_MAX_LPC_ORDER){
for(i=0;i<inst->enc_nrOfCoefs;i++){
SIDdata[i+1]=((inst->enc_reflCoefs[i]+128)>>8); /* Q15 to Q7*/ /* +127 */
}
}else{
for(i=0;i<inst->enc_nrOfCoefs;i++){
SIDdata[i+1]=(127+((inst->enc_reflCoefs[i]+128)>>8)); /* Q15 to Q7*/ /* +127 */
}
}
inst->enc_msSinceSID=0;
*bytesOut=inst->enc_nrOfCoefs+1;
inst->enc_msSinceSID+=(1000*nrOfSamples)/inst->enc_sampfreq;
return(inst->enc_nrOfCoefs+1);
}else{
inst->enc_msSinceSID+=(1000*nrOfSamples)/inst->enc_sampfreq;
*bytesOut=0;
return(0);
}
}
/****************************************************************************
* WebRtcCng_UpdateSid(...)
*
* These functions updates the CN state, when a new SID packet arrives
*
* Input:
* - cng_inst : Pointer to created instance that should be freed
* - SID : SID packet, all headers removed
* - length : Length in bytes of SID packet
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_UpdateSid(CNG_dec_inst *cng_inst,
WebRtc_UWord8 *SID,
WebRtc_Word16 length)
{
WebRtcCngDecInst_t* inst=(WebRtcCngDecInst_t*)cng_inst;
WebRtc_Word16 refCs[WEBRTC_CNG_MAX_LPC_ORDER];
WebRtc_Word32 targetEnergy;
int i;
if (inst->initflag != 1) {
inst->errorcode = CNG_DECODER_NOT_INITIATED;
return (-1);
}
/*Throw away reflection coefficients of higher order than we can handle*/
if(length> (WEBRTC_CNG_MAX_LPC_ORDER+1))
length=WEBRTC_CNG_MAX_LPC_ORDER+1;
inst->dec_order=length-1;
if(SID[0]>93)
SID[0]=93;
targetEnergy=WebRtcCng_kDbov[SID[0]];
/* Take down target energy to 75% */
targetEnergy=targetEnergy>>1;
targetEnergy+=targetEnergy>>2;
inst->dec_target_energy=targetEnergy;
/* Reconstruct coeffs with tweak for WebRtc implementation of RFC3389 */
if(inst->dec_order==WEBRTC_CNG_MAX_LPC_ORDER){
for(i=0;i<(inst->dec_order);i++){
refCs[i]=SID[i+1]<<8; /* Q7 to Q15*/
inst->dec_target_reflCoefs[i]=refCs[i];
}
}else{
for(i=0;i<(inst->dec_order);i++){
refCs[i]=(SID[i+1]-127)<<8; /* Q7 to Q15*/
inst->dec_target_reflCoefs[i]=refCs[i];
}
}
for(i=(inst->dec_order);i<WEBRTC_CNG_MAX_LPC_ORDER;i++){
refCs[i]=0;
inst->dec_target_reflCoefs[i]=refCs[i];
}
return(0);
}
/****************************************************************************
* WebRtcCng_Generate(...)
*
* These functions generates CN data when needed
*
* Input:
* - cng_inst : Pointer to created instance that should be freed
* - outData : pointer to area to write CN data
* - nrOfSamples : How much data to generate
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcCng_Generate(CNG_dec_inst *cng_inst,
WebRtc_Word16 *outData,
WebRtc_Word16 nrOfSamples,
WebRtc_Word16 new_period)
{
WebRtcCngDecInst_t* inst=(WebRtcCngDecInst_t*)cng_inst;
int i;
WebRtc_Word16 excitation[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
WebRtc_Word16 low[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
WebRtc_Word16 lpPoly[WEBRTC_CNG_MAX_LPC_ORDER+1];
WebRtc_Word16 ReflBetaStd=26214; /*0.8 in q15*/
WebRtc_Word16 ReflBetaCompStd=6553; /*0.2in q15*/
WebRtc_Word16 ReflBetaNewP=19661; /*0.6 in q15*/
WebRtc_Word16 ReflBetaCompNewP=13107; /*0.4 in q15*/
WebRtc_Word16 Beta,BetaC, tmp1, tmp2, tmp3;
WebRtc_Word32 targetEnergy;
WebRtc_Word16 En;
WebRtc_Word16 temp16;
if (nrOfSamples>WEBRTC_CNG_MAX_OUTSIZE_ORDER) {
inst->errorcode = CNG_DISALLOWED_FRAME_SIZE;
return (-1);
}
if (new_period) {
inst->dec_used_scale_factor=inst->dec_target_scale_factor;
Beta=ReflBetaNewP;
BetaC=ReflBetaCompNewP;
} else {
Beta=ReflBetaStd;
BetaC=ReflBetaCompStd;
}
/*Here we use a 0.5 weighting, should possibly be modified to 0.6*/
tmp1=inst->dec_used_scale_factor<<2; /* Q13->Q15 */
tmp2=inst->dec_target_scale_factor<<2; /* Q13->Q15 */
tmp3=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp1,Beta,15);
tmp3+=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp2,BetaC,15);
inst->dec_used_scale_factor=tmp3>>2; /* Q15->Q13 */
inst->dec_used_energy=inst->dec_used_energy>>1;
inst->dec_used_energy+=inst->dec_target_energy>>1;
/* Do the same for the reflection coeffs */
for (i=0;i<WEBRTC_CNG_MAX_LPC_ORDER;i++) {
inst->dec_used_reflCoefs[i]=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(inst->dec_used_reflCoefs[i],Beta,15);
inst->dec_used_reflCoefs[i]+=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(inst->dec_target_reflCoefs[i],BetaC,15);
}
/* Compute the polynomial coefficients */
WebRtcCng_K2a16(inst->dec_used_reflCoefs, WEBRTC_CNG_MAX_LPC_ORDER, lpPoly);
/***/
targetEnergy=inst->dec_used_energy;
// Calculate scaling factor based on filter energy
En=8192; //1.0 in Q13
for (i=0; i<(WEBRTC_CNG_MAX_LPC_ORDER); i++) {
// Floating point value for reference
// E*=1.0-((float)inst->dec_used_reflCoefs[i]/32768.0)*((float)inst->dec_used_reflCoefs[i]/32768.0);
// Same in fixed point
temp16=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(inst->dec_used_reflCoefs[i],inst->dec_used_reflCoefs[i],15); // K(i).^2 in Q15
temp16=0x7fff - temp16; // 1 - K(i).^2 in Q15
En=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(En,temp16,15);
}
//float scaling= sqrt(E*inst->dec_target_energy/((1<<24)));
//Calculate sqrt(En*target_energy/exctiation energy)
targetEnergy=WebRtcSpl_Sqrt(inst->dec_used_energy);
En=(WebRtc_Word16)WebRtcSpl_Sqrt(En)<<6; //We are missing a factor sqrt(2) here
En=(En*3)>>1; //1.5 estimates sqrt(2)
inst->dec_used_scale_factor=(WebRtc_Word16)((En*targetEnergy)>>12);
/***/
/*Generate excitation*/
/*Excitation energy per sample is 2.^24 - Q13 N(0,1) */
for(i=0;i<nrOfSamples;i++){
excitation[i]=WebRtcSpl_RandN(&inst->dec_seed)>>1;
}
/*Scale to correct energy*/
WebRtcSpl_ScaleVector(excitation, excitation, inst->dec_used_scale_factor, nrOfSamples, 13);
WebRtcSpl_FilterAR(
lpPoly, /* Coefficients in Q12 */
WEBRTC_CNG_MAX_LPC_ORDER+1,
excitation, /* Speech samples */
nrOfSamples,
inst->dec_filtstate, /* State preservation */
WEBRTC_CNG_MAX_LPC_ORDER,
inst->dec_filtstateLow, /* State preservation */
WEBRTC_CNG_MAX_LPC_ORDER,
outData, /* Filtered speech samples */
low,
nrOfSamples
);
return(0);
}
/****************************************************************************
* WebRtcCng_GetErrorCodeEnc/Dec(...)
*
* This functions can be used to check the error code of a CNG instance. When
* a function returns -1 a error code will be set for that instance. The
* function below extract the code of the last error that occured in the
* specified instance.
*
* Input:
* - CNG_inst : CNG enc/dec instance
*
* Return value : Error code
*/
WebRtc_Word16 WebRtcCng_GetErrorCodeEnc(CNG_enc_inst *cng_inst)
{
/* typecast pointer to real structure */
WebRtcCngEncInst_t* inst=(WebRtcCngEncInst_t*)cng_inst;
return inst->errorcode;
}
WebRtc_Word16 WebRtcCng_GetErrorCodeDec(CNG_dec_inst *cng_inst)
{
/* typecast pointer to real structure */
WebRtcCngDecInst_t* inst=(WebRtcCngDecInst_t*)cng_inst;
return inst->errorcode;
}

View File

@ -1,225 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* CNG.cpp : Defines the entry point for the console application.
*/
#include <stdlib.h>
#include <string.h>
#include "stdafx.h"
#include "webrtc_cng.h"
#include "webrtc_vad.h"
CNG_enc_inst *e_inst;
CNG_dec_inst *d_inst;
VadInst *vinst;
//#define ASSIGN
short anaSpeech[WEBRTC_CNG_MAX_OUTSIZE_ORDER], genSpeech[WEBRTC_CNG_MAX_OUTSIZE_ORDER], state[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
unsigned char SIDpkt[114];
int main(int argc, char* argv[])
{
FILE * infile, *outfile, *statefile;
short res=0,errtype;
/*float time=0.0;*/
WebRtcVad_Create(&vinst);
WebRtcVad_Init(vinst);
short size;
int samps=0;
if (argc < 5){
printf("Usage:\n CNG.exe infile outfile samplingfreq(Hz) interval(ms) order\n\n");
return(0);
}
infile=fopen(argv[1],"rb");
if (infile==NULL){
printf("file %s does not exist\n",argv[1]);
return(0);
}
outfile=fopen(argv[2],"wb");
statefile=fopen("CNGVAD.d","wb");
if (outfile==NULL){
printf("file %s could not be created\n",argv[2]);
return(0);
}
unsigned int fs=16000;
short frameLen=fs/50;
#ifndef ASSIGN
res=WebRtcCng_CreateEnc(&e_inst);
if (res < 0) {
/* exit if returned with error */
errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
exit(EXIT_FAILURE);
}
res=WebRtcCng_CreateDec(&d_inst);
if (res < 0) {
/* exit if returned with error */
errtype=WebRtcCng_GetErrorCodeDec(d_inst);
fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
exit(EXIT_FAILURE);
}
#else
// Test the Assign-functions
int Esize, Dsize;
void *Eaddr, *Daddr;
res=WebRtcCng_AssignSizeEnc(&Esize);
res=WebRtcCng_AssignSizeDec(&Dsize);
Eaddr=malloc(Esize);
Daddr=malloc(Dsize);
res=WebRtcCng_AssignEnc(&e_inst, Eaddr);
if (res < 0) {
/* exit if returned with error */
errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
exit(EXIT_FAILURE);
}
res=WebRtcCng_AssignDec(&d_inst, Daddr);
if (res < 0) {
/* exit if returned with error */
errtype=WebRtcCng_GetErrorCodeDec(d_inst);
fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
exit(EXIT_FAILURE);
}
#endif
res=WebRtcCng_InitEnc(e_inst,atoi(argv[3]),atoi(argv[4]),atoi(argv[5]));
if (res < 0) {
/* exit if returned with error */
errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
exit(EXIT_FAILURE);
}
res=WebRtcCng_InitDec(d_inst);
if (res < 0) {
/* exit if returned with error */
errtype=WebRtcCng_GetErrorCodeDec(d_inst);
fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
exit(EXIT_FAILURE);
}
static bool firstSilent=true;
int numSamp=0;
int speech=0;
int silent=0;
long cnt=0;
while(fread(anaSpeech,2,frameLen,infile)==frameLen){
cnt++;
if (cnt==60){
cnt=60;
}
/* time+=(float)frameLen/fs;
numSamp+=frameLen;
float temp[640];
for(unsigned int j=0;j<frameLen;j++)
temp[j]=(float)anaSpeech[j]; */
// if(!WebRtcVad_Process(vinst, fs, anaSpeech, frameLen)){
if(1){ // Do CNG coding of entire file
// if(!((anaSpeech[0]==0)&&(anaSpeech[1]==0)&&(anaSpeech[2]==0))){
if(firstSilent){
res = WebRtcCng_Encode(e_inst, anaSpeech, frameLen/2, SIDpkt,&size,1);
if (res < 0) {
/* exit if returned with error */
errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
fprintf(stderr,"\n\n Error in encoder: %d.\n\n", errtype);
exit(EXIT_FAILURE);
}
firstSilent=false;
res=WebRtcCng_Encode(e_inst, &anaSpeech[frameLen/2], frameLen/2, SIDpkt,&size,1);
if (res < 0) {
/* exit if returned with error */
errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
fprintf(stderr,"\n\n Error in encoder: %d.\n\n", errtype);
exit(EXIT_FAILURE);
}
}
else{
res=WebRtcCng_Encode(e_inst, anaSpeech, frameLen/2, SIDpkt,&size,0);
if (res < 0) {
/* exit if returned with error */
errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
fprintf(stderr,"\n\n Error in encoder: %d.\n\n", errtype);
exit(EXIT_FAILURE);
}
res=WebRtcCng_Encode(e_inst, &anaSpeech[frameLen/2], frameLen/2, SIDpkt,&size,0);
if (res < 0) {
/* exit if returned with error */
errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
fprintf(stderr,"\n\n Error in encoder: %d.\n\n", errtype);
exit(EXIT_FAILURE);
}
}
if(size>0){
res=WebRtcCng_UpdateSid(d_inst,SIDpkt, size);
if (res < 0) {
/* exit if returned with error */
errtype=WebRtcCng_GetErrorCodeDec(d_inst);
fprintf(stderr,"\n\n Error in decoder: %d.\n\n", errtype);
exit(EXIT_FAILURE);
}
}
res=WebRtcCng_Generate(d_inst,genSpeech, frameLen,0);
if (res < 0) {
/* exit if returned with error */
errtype=WebRtcCng_GetErrorCodeDec(d_inst);
fprintf(stderr,"\n\n Error in decoder: %d.\n\n", errtype);
exit(EXIT_FAILURE);
}
memcpy(state,anaSpeech,2*frameLen);
}
else{
firstSilent=true;
memcpy(genSpeech,anaSpeech,2*frameLen);
memset(anaSpeech,0,frameLen*2);
memset(state,0,frameLen*2);
}
fwrite(genSpeech,2,frameLen,outfile);
fwrite(state,2,frameLen,statefile);
}
fclose(infile);
fclose(outfile);
fclose(statefile);
return 0;
}

View File

@ -1,18 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// stdafx.cpp : source file that includes just the standard includes
// CNG.pch will be the pre-compiled header
// stdafx.obj will contain the pre-compiled type information
#include "stdafx.h"
// TODO: reference any additional headers you need in STDAFX.H
// and not in this file

View File

@ -1,32 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// stdafx.h : include file for standard system include files,
// or project specific include files that are used frequently, but
// are changed infrequently
//
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_TEST_STDAFX_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_TEST_STDAFX_H_
#if _MSC_VER > 1000
#pragma once
#endif // _MSC_VER > 1000
#define WIN32_LEAN_AND_MEAN // Exclude rarely-used stuff from Windows headers
#include <stdio.h>
// TODO: reference additional headers your program requires here
//{{AFX_INSERT_LOCATION}}
// Microsoft Visual C++ will insert additional declarations immediately before the previous line.
#endif // !defined(AFX_STDAFX_H__DE2097A7_569B_42A0_A615_41BF352D6FFB__INCLUDED_)

View File

@ -1,148 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_AUDIO_CODING_CODECS_G711_MAIN_INTERFACE_G711_INTERFACE_H_
#define MODULES_AUDIO_CODING_CODECS_G711_MAIN_INTERFACE_G711_INTERFACE_H_
#include "typedefs.h"
// Comfort noise constants
#define G711_WEBRTC_SPEECH 1
#define G711_WEBRTC_CNG 2
#ifdef __cplusplus
extern "C" {
#endif
/****************************************************************************
* WebRtcG711_EncodeA(...)
*
* This function encodes a G711 A-law frame and inserts it into a packet.
* Input speech length has be of any length.
*
* Input:
* - state : Dummy state to make this codec look more like
* other codecs
* - speechIn : Input speech vector
* - len : Samples in speechIn
*
* Output:
* - encoded : The encoded data vector
*
* Return value : >0 - Length (in bytes) of coded data
* -1 - Error
*/
WebRtc_Word16 WebRtcG711_EncodeA(void *state,
WebRtc_Word16 *speechIn,
WebRtc_Word16 len,
WebRtc_Word16 *encoded);
/****************************************************************************
* WebRtcG711_EncodeU(...)
*
* This function encodes a G711 U-law frame and inserts it into a packet.
* Input speech length has be of any length.
*
* Input:
* - state : Dummy state to make this codec look more like
* other codecs
* - speechIn : Input speech vector
* - len : Samples in speechIn
*
* Output:
* - encoded : The encoded data vector
*
* Return value : >0 - Length (in bytes) of coded data
* -1 - Error
*/
WebRtc_Word16 WebRtcG711_EncodeU(void *state,
WebRtc_Word16 *speechIn,
WebRtc_Word16 len,
WebRtc_Word16 *encoded);
/****************************************************************************
* WebRtcG711_DecodeA(...)
*
* This function decodes a packet G711 A-law frame.
*
* Input:
* - state : Dummy state to make this codec look more like
* other codecs
* - encoded : Encoded data
* - len : Bytes in encoded vector
*
* Output:
* - decoded : The decoded vector
* - speechType : 1 normal, 2 CNG (for G711 it should
* always return 1 since G711 does not have a
* built-in DTX/CNG scheme)
*
* Return value : >0 - Samples in decoded vector
* -1 - Error
*/
WebRtc_Word16 WebRtcG711_DecodeA(void *state,
WebRtc_Word16 *encoded,
WebRtc_Word16 len,
WebRtc_Word16 *decoded,
WebRtc_Word16 *speechType);
/****************************************************************************
* WebRtcG711_DecodeU(...)
*
* This function decodes a packet G711 U-law frame.
*
* Input:
* - state : Dummy state to make this codec look more like
* other codecs
* - encoded : Encoded data
* - len : Bytes in encoded vector
*
* Output:
* - decoded : The decoded vector
* - speechType : 1 normal, 2 CNG (for G711 it should
* always return 1 since G711 does not have a
* built-in DTX/CNG scheme)
*
* Return value : >0 - Samples in decoded vector
* -1 - Error
*/
WebRtc_Word16 WebRtcG711_DecodeU(void *state,
WebRtc_Word16 *encoded,
WebRtc_Word16 len,
WebRtc_Word16 *decoded,
WebRtc_Word16 *speechType);
/**********************************************************************
* WebRtcG711_Version(...)
*
* This function gives the version string of the G.711 codec.
*
* Input:
* - lenBytes: the size of Allocated space (in Bytes) where
* the version number is written to (in string format).
*
* Output:
* - version: Pointer to a buffer where the version number is
* written to.
*
*/
WebRtc_Word16 WebRtcG711_Version(char* version, WebRtc_Word16 lenBytes);
#ifdef __cplusplus
}
#endif
#endif /* MODULES_AUDIO_CODING_CODECS_G711_MAIN_INTERFACE_G711_INTERFACE_H_ */

View File

@ -1,47 +0,0 @@
# This file is generated by gyp; do not edit. This means you!
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_ARM_MODE := arm
LOCAL_MODULE_CLASS := STATIC_LIBRARIES
LOCAL_MODULE := libwebrtc_g711
LOCAL_MODULE_TAGS := optional
LOCAL_GENERATED_SOURCES :=
LOCAL_SRC_FILES := g711_interface.c \
g711.c
# Flags passed to both C and C++ files.
MY_CFLAGS :=
MY_CFLAGS_C :=
MY_DEFS := '-DNO_TCMALLOC' \
'-DNO_HEAPCHECKER' \
'-DWEBRTC_TARGET_PC' \
'-DWEBRTC_LINUX' \
'-DWEBRTC_ANDROID' \
'-DANDROID'
LOCAL_CFLAGS := $(MY_CFLAGS_C) $(MY_CFLAGS) $(MY_DEFS)
# Include paths placed before CFLAGS/CPPFLAGS
LOCAL_C_INCLUDES := $(LOCAL_PATH)/../../../../../.. \
$(LOCAL_PATH)/../interface
# Flags passed to only C++ (and not C) files.
LOCAL_CPPFLAGS :=
LOCAL_LDFLAGS :=
LOCAL_STATIC_LIBRARIES :=
# Duplicate the static libraries to fix circular references
LOCAL_STATIC_LIBRARIES += $(LOCAL_STATIC_LIBRARIES)
LOCAL_SHARED_LIBRARIES := libcutils \
libdl \
libstlport
LOCAL_ADDITIONAL_DEPENDENCIES :=
ifneq ($(MY_WEBRTC_NDK_BUILD),true)
include external/stlport/libstlport.mk
include $(BUILD_STATIC_LIBRARY)
endif

View File

@ -1,83 +0,0 @@
/*
* SpanDSP - a series of DSP components for telephony
*
* g711.c - A-law and u-law transcoding routines
*
* Written by Steve Underwood <steveu@coppice.org>
*
* Copyright (C) 2006 Steve Underwood
*
* Despite my general liking of the GPL, I place this code in the
* public domain for the benefit of all mankind - even the slimy
* ones who might try to proprietize my work and use it to my
* detriment.
*
* $Id: g711.c,v 1.1 2006/06/07 15:46:39 steveu Exp $
*
* Modifications for WebRtc, 2011/04/28, by tlegrand:
* -Removed unused include files
* -Changed to use WebRtc types
* -Added option to run encoder bitexact with ITU-T reference implementation
*/
/*! \file */
#include "g711.h"
#include "typedefs.h"
/* Copied from the CCITT G.711 specification */
static const WebRtc_UWord8 ulaw_to_alaw_table[256] =
{
42, 43, 40, 41, 46, 47, 44, 45, 34, 35, 32, 33, 38, 39, 36, 37,
58, 59, 56, 57, 62, 63, 60, 61, 50, 51, 48, 49, 54, 55, 52, 53,
10, 11, 8, 9, 14, 15, 12, 13, 2, 3, 0, 1, 6, 7, 4, 26,
27, 24, 25, 30, 31, 28, 29, 18, 19, 16, 17, 22, 23, 20, 21, 106,
104, 105, 110, 111, 108, 109, 98, 99, 96, 97, 102, 103, 100, 101, 122, 120,
126, 127, 124, 125, 114, 115, 112, 113, 118, 119, 116, 117, 75, 73, 79, 77,
66, 67, 64, 65, 70, 71, 68, 69, 90, 91, 88, 89, 94, 95, 92, 93,
82, 82, 83, 83, 80, 80, 81, 81, 86, 86, 87, 87, 84, 84, 85, 85,
170, 171, 168, 169, 174, 175, 172, 173, 162, 163, 160, 161, 166, 167, 164, 165,
186, 187, 184, 185, 190, 191, 188, 189, 178, 179, 176, 177, 182, 183, 180, 181,
138, 139, 136, 137, 142, 143, 140, 141, 130, 131, 128, 129, 134, 135, 132, 154,
155, 152, 153, 158, 159, 156, 157, 146, 147, 144, 145, 150, 151, 148, 149, 234,
232, 233, 238, 239, 236, 237, 226, 227, 224, 225, 230, 231, 228, 229, 250, 248,
254, 255, 252, 253, 242, 243, 240, 241, 246, 247, 244, 245, 203, 201, 207, 205,
194, 195, 192, 193, 198, 199, 196, 197, 218, 219, 216, 217, 222, 223, 220, 221,
210, 210, 211, 211, 208, 208, 209, 209, 214, 214, 215, 215, 212, 212, 213, 213
};
/* These transcoding tables are copied from the CCITT G.711 specification. To achieve
optimal results, do not change them. */
static const WebRtc_UWord8 alaw_to_ulaw_table[256] =
{
42, 43, 40, 41, 46, 47, 44, 45, 34, 35, 32, 33, 38, 39, 36, 37,
57, 58, 55, 56, 61, 62, 59, 60, 49, 50, 47, 48, 53, 54, 51, 52,
10, 11, 8, 9, 14, 15, 12, 13, 2, 3, 0, 1, 6, 7, 4, 5,
26, 27, 24, 25, 30, 31, 28, 29, 18, 19, 16, 17, 22, 23, 20, 21,
98, 99, 96, 97, 102, 103, 100, 101, 93, 93, 92, 92, 95, 95, 94, 94,
116, 118, 112, 114, 124, 126, 120, 122, 106, 107, 104, 105, 110, 111, 108, 109,
72, 73, 70, 71, 76, 77, 74, 75, 64, 65, 63, 63, 68, 69, 66, 67,
86, 87, 84, 85, 90, 91, 88, 89, 79, 79, 78, 78, 82, 83, 80, 81,
170, 171, 168, 169, 174, 175, 172, 173, 162, 163, 160, 161, 166, 167, 164, 165,
185, 186, 183, 184, 189, 190, 187, 188, 177, 178, 175, 176, 181, 182, 179, 180,
138, 139, 136, 137, 142, 143, 140, 141, 130, 131, 128, 129, 134, 135, 132, 133,
154, 155, 152, 153, 158, 159, 156, 157, 146, 147, 144, 145, 150, 151, 148, 149,
226, 227, 224, 225, 230, 231, 228, 229, 221, 221, 220, 220, 223, 223, 222, 222,
244, 246, 240, 242, 252, 254, 248, 250, 234, 235, 232, 233, 238, 239, 236, 237,
200, 201, 198, 199, 204, 205, 202, 203, 192, 193, 191, 191, 196, 197, 194, 195,
214, 215, 212, 213, 218, 219, 216, 217, 207, 207, 206, 206, 210, 211, 208, 209
};
WebRtc_UWord8 alaw_to_ulaw(WebRtc_UWord8 alaw)
{
return alaw_to_ulaw_table[alaw];
}
/*- End of function --------------------------------------------------------*/
WebRtc_UWord8 ulaw_to_alaw(WebRtc_UWord8 ulaw)
{
return ulaw_to_alaw_table[ulaw];
}
/*- End of function --------------------------------------------------------*/
/*- End of file ------------------------------------------------------------*/

View File

@ -1,57 +0,0 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'includes': [
'../../../../../../common_settings.gypi', # Common settings
],
'targets': [
{
'target_name': 'G711',
'type': '<(library)',
'include_dirs': [
'../interface',
],
'direct_dependent_settings': {
'include_dirs': [
'../interface',
],
},
'sources': [
'../interface/g711_interface.h',
'g711_interface.c',
'g711.c',
'g711.h',
],
},
{
'target_name': 'g711_test',
'type': 'executable',
'dependencies': [
'G711',
],
'sources': [
'../testG711/testG711.cpp',
],
# 'conditions': [
# ['OS=="linux"', {
# 'cflags': [
# '-fexceptions', # enable exceptions
# ],
# }],
# ],
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:

View File

@ -1,382 +0,0 @@
/*
* SpanDSP - a series of DSP components for telephony
*
* g711.h - In line A-law and u-law conversion routines
*
* Written by Steve Underwood <steveu@coppice.org>
*
* Copyright (C) 2001 Steve Underwood
*
* Despite my general liking of the GPL, I place this code in the
* public domain for the benefit of all mankind - even the slimy
* ones who might try to proprietize my work and use it to my
* detriment.
*
* $Id: g711.h,v 1.1 2006/06/07 15:46:39 steveu Exp $
*
* Modifications for WebRtc, 2011/04/28, by tlegrand:
* -Changed to use WebRtc types
* -Changed __inline__ to __inline
* -Two changes to make implementation bitexact with ITU-T reference implementation
*/
/*! \file */
/*! \page g711_page A-law and mu-law handling
Lookup tables for A-law and u-law look attractive, until you consider the impact
on the CPU cache. If it causes a substantial area of your processor cache to get
hit too often, cache sloshing will severely slow things down. The main reason
these routines are slow in C, is the lack of direct access to the CPU's "find
the first 1" instruction. A little in-line assembler fixes that, and the
conversion routines can be faster than lookup tables, in most real world usage.
A "find the first 1" instruction is available on most modern CPUs, and is a
much underused feature.
If an assembly language method of bit searching is not available, these routines
revert to a method that can be a little slow, so the cache thrashing might not
seem so bad :(
Feel free to submit patches to add fast "find the first 1" support for your own
favourite processor.
Look up tables are used for transcoding between A-law and u-law, since it is
difficult to achieve the precise transcoding procedure laid down in the G.711
specification by other means.
*/
#if !defined(_G711_H_)
#define _G711_H_
#ifdef __cplusplus
extern "C" {
#endif
#include "typedefs.h"
#if defined(__i386__)
/*! \brief Find the bit position of the highest set bit in a word
\param bits The word to be searched
\return The bit number of the highest set bit, or -1 if the word is zero. */
static __inline__ int top_bit(unsigned int bits)
{
int res;
__asm__ __volatile__(" movl $-1,%%edx;\n"
" bsrl %%eax,%%edx;\n"
: "=d" (res)
: "a" (bits));
return res;
}
/*- End of function --------------------------------------------------------*/
/*! \brief Find the bit position of the lowest set bit in a word
\param bits The word to be searched
\return The bit number of the lowest set bit, or -1 if the word is zero. */
static __inline__ int bottom_bit(unsigned int bits)
{
int res;
__asm__ __volatile__(" movl $-1,%%edx;\n"
" bsfl %%eax,%%edx;\n"
: "=d" (res)
: "a" (bits));
return res;
}
/*- End of function --------------------------------------------------------*/
#elif defined(__x86_64__)
static __inline__ int top_bit(unsigned int bits)
{
int res;
__asm__ __volatile__(" movq $-1,%%rdx;\n"
" bsrq %%rax,%%rdx;\n"
: "=d" (res)
: "a" (bits));
return res;
}
/*- End of function --------------------------------------------------------*/
static __inline__ int bottom_bit(unsigned int bits)
{
int res;
__asm__ __volatile__(" movq $-1,%%rdx;\n"
" bsfq %%rax,%%rdx;\n"
: "=d" (res)
: "a" (bits));
return res;
}
/*- End of function --------------------------------------------------------*/
#else
static __inline int top_bit(unsigned int bits)
{
int i;
if (bits == 0)
return -1;
i = 0;
if (bits & 0xFFFF0000)
{
bits &= 0xFFFF0000;
i += 16;
}
if (bits & 0xFF00FF00)
{
bits &= 0xFF00FF00;
i += 8;
}
if (bits & 0xF0F0F0F0)
{
bits &= 0xF0F0F0F0;
i += 4;
}
if (bits & 0xCCCCCCCC)
{
bits &= 0xCCCCCCCC;
i += 2;
}
if (bits & 0xAAAAAAAA)
{
bits &= 0xAAAAAAAA;
i += 1;
}
return i;
}
/*- End of function --------------------------------------------------------*/
static __inline int bottom_bit(unsigned int bits)
{
int i;
if (bits == 0)
return -1;
i = 32;
if (bits & 0x0000FFFF)
{
bits &= 0x0000FFFF;
i -= 16;
}
if (bits & 0x00FF00FF)
{
bits &= 0x00FF00FF;
i -= 8;
}
if (bits & 0x0F0F0F0F)
{
bits &= 0x0F0F0F0F;
i -= 4;
}
if (bits & 0x33333333)
{
bits &= 0x33333333;
i -= 2;
}
if (bits & 0x55555555)
{
bits &= 0x55555555;
i -= 1;
}
return i;
}
/*- End of function --------------------------------------------------------*/
#endif
/* N.B. It is tempting to use look-up tables for A-law and u-law conversion.
* However, you should consider the cache footprint.
*
* A 64K byte table for linear to x-law and a 512 byte table for x-law to
* linear sound like peanuts these days, and shouldn't an array lookup be
* real fast? No! When the cache sloshes as badly as this one will, a tight
* calculation may be better. The messiest part is normally finding the
* segment, but a little inline assembly can fix that on an i386, x86_64 and
* many other modern processors.
*/
/*
* Mu-law is basically as follows:
*
* Biased Linear Input Code Compressed Code
* ------------------------ ---------------
* 00000001wxyza 000wxyz
* 0000001wxyzab 001wxyz
* 000001wxyzabc 010wxyz
* 00001wxyzabcd 011wxyz
* 0001wxyzabcde 100wxyz
* 001wxyzabcdef 101wxyz
* 01wxyzabcdefg 110wxyz
* 1wxyzabcdefgh 111wxyz
*
* Each biased linear code has a leading 1 which identifies the segment
* number. The value of the segment number is equal to 7 minus the number
* of leading 0's. The quantization interval is directly available as the
* four bits wxyz. * The trailing bits (a - h) are ignored.
*
* Ordinarily the complement of the resulting code word is used for
* transmission, and so the code word is complemented before it is returned.
*
* For further information see John C. Bellamy's Digital Telephony, 1982,
* John Wiley & Sons, pps 98-111 and 472-476.
*/
//#define ULAW_ZEROTRAP /* turn on the trap as per the MIL-STD */
#define ULAW_BIAS 0x84 /* Bias for linear code. */
/*! \brief Encode a linear sample to u-law
\param linear The sample to encode.
\return The u-law value.
*/
static __inline WebRtc_UWord8 linear_to_ulaw(int linear)
{
WebRtc_UWord8 u_val;
int mask;
int seg;
/* Get the sign and the magnitude of the value. */
if (linear < 0)
{
/* WebRtc, tlegrand: -1 added to get bitexact to reference implementation */
linear = ULAW_BIAS - linear - 1;
mask = 0x7F;
}
else
{
linear = ULAW_BIAS + linear;
mask = 0xFF;
}
seg = top_bit(linear | 0xFF) - 7;
/*
* Combine the sign, segment, quantization bits,
* and complement the code word.
*/
if (seg >= 8)
u_val = (WebRtc_UWord8) (0x7F ^ mask);
else
u_val = (WebRtc_UWord8) (((seg << 4) | ((linear >> (seg + 3)) & 0xF)) ^ mask);
#ifdef ULAW_ZEROTRAP
/* Optional ITU trap */
if (u_val == 0)
u_val = 0x02;
#endif
return u_val;
}
/*- End of function --------------------------------------------------------*/
/*! \brief Decode an u-law sample to a linear value.
\param ulaw The u-law sample to decode.
\return The linear value.
*/
static __inline WebRtc_Word16 ulaw_to_linear(WebRtc_UWord8 ulaw)
{
int t;
/* Complement to obtain normal u-law value. */
ulaw = ~ulaw;
/*
* Extract and bias the quantization bits. Then
* shift up by the segment number and subtract out the bias.
*/
t = (((ulaw & 0x0F) << 3) + ULAW_BIAS) << (((int) ulaw & 0x70) >> 4);
return (WebRtc_Word16) ((ulaw & 0x80) ? (ULAW_BIAS - t) : (t - ULAW_BIAS));
}
/*- End of function --------------------------------------------------------*/
/*
* A-law is basically as follows:
*
* Linear Input Code Compressed Code
* ----------------- ---------------
* 0000000wxyza 000wxyz
* 0000001wxyza 001wxyz
* 000001wxyzab 010wxyz
* 00001wxyzabc 011wxyz
* 0001wxyzabcd 100wxyz
* 001wxyzabcde 101wxyz
* 01wxyzabcdef 110wxyz
* 1wxyzabcdefg 111wxyz
*
* For further information see John C. Bellamy's Digital Telephony, 1982,
* John Wiley & Sons, pps 98-111 and 472-476.
*/
#define ALAW_AMI_MASK 0x55
/*! \brief Encode a linear sample to A-law
\param linear The sample to encode.
\return The A-law value.
*/
static __inline WebRtc_UWord8 linear_to_alaw(int linear)
{
int mask;
int seg;
if (linear >= 0)
{
/* Sign (bit 7) bit = 1 */
mask = ALAW_AMI_MASK | 0x80;
}
else
{
/* Sign (bit 7) bit = 0 */
mask = ALAW_AMI_MASK;
/* WebRtc, tlegrand: Changed from -8 to -1 to get bitexact to reference
* implementation */
linear = -linear - 1;
}
/* Convert the scaled magnitude to segment number. */
seg = top_bit(linear | 0xFF) - 7;
if (seg >= 8)
{
if (linear >= 0)
{
/* Out of range. Return maximum value. */
return (WebRtc_UWord8) (0x7F ^ mask);
}
/* We must be just a tiny step below zero */
return (WebRtc_UWord8) (0x00 ^ mask);
}
/* Combine the sign, segment, and quantization bits. */
return (WebRtc_UWord8) (((seg << 4) | ((linear >> ((seg) ? (seg + 3) : 4)) & 0x0F)) ^ mask);
}
/*- End of function --------------------------------------------------------*/
/*! \brief Decode an A-law sample to a linear value.
\param alaw The A-law sample to decode.
\return The linear value.
*/
static __inline WebRtc_Word16 alaw_to_linear(WebRtc_UWord8 alaw)
{
int i;
int seg;
alaw ^= ALAW_AMI_MASK;
i = ((alaw & 0x0F) << 4);
seg = (((int) alaw & 0x70) >> 4);
if (seg)
i = (i + 0x108) << (seg - 1);
else
i += 8;
return (WebRtc_Word16) ((alaw & 0x80) ? i : -i);
}
/*- End of function --------------------------------------------------------*/
/*! \brief Transcode from A-law to u-law, using the procedure defined in G.711.
\param alaw The A-law sample to transcode.
\return The best matching u-law value.
*/
WebRtc_UWord8 alaw_to_ulaw(WebRtc_UWord8 alaw);
/*! \brief Transcode from u-law to A-law, using the procedure defined in G.711.
\param alaw The u-law sample to transcode.
\return The best matching A-law value.
*/
WebRtc_UWord8 ulaw_to_alaw(WebRtc_UWord8 ulaw);
#ifdef __cplusplus
}
#endif
#endif
/*- End of file ------------------------------------------------------------*/

View File

@ -1,171 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <string.h>
#include "g711.h"
#include "g711_interface.h"
#include "typedefs.h"
WebRtc_Word16 WebRtcG711_EncodeA(void *state,
WebRtc_Word16 *speechIn,
WebRtc_Word16 len,
WebRtc_Word16 *encoded)
{
int n;
WebRtc_UWord16 tempVal, tempVal2;
// Set to avoid getting warnings
state = state;
// Sanity check of input length
if (len < 0) {
return (-1);
}
// Loop over all samples
for (n = 0; n < len; n++) {
tempVal = (WebRtc_UWord16)linear_to_alaw(speechIn[n]);
#ifdef WEBRTC_BIG_ENDIAN
if ((n & 0x1) == 1) {
encoded[n>>1]|=((WebRtc_UWord16)tempVal);
} else {
encoded[n>>1]=((WebRtc_UWord16)tempVal)<<8;
}
#else
if ((n & 0x1) == 1) {
tempVal2 |= ((WebRtc_UWord16) tempVal) << 8;
encoded[n >> 1] |= ((WebRtc_UWord16) tempVal) << 8;
} else {
tempVal2 = ((WebRtc_UWord16) tempVal);
encoded[n >> 1] = ((WebRtc_UWord16) tempVal);
}
#endif
}
return (len);
}
WebRtc_Word16 WebRtcG711_EncodeU(void *state,
WebRtc_Word16 *speechIn,
WebRtc_Word16 len,
WebRtc_Word16 *encoded)
{
int n;
WebRtc_UWord16 tempVal;
// Set to avoid getting warnings
state = state;
// Sanity check of input length
if (len < 0) {
return (-1);
}
// Loop over all samples
for (n = 0; n < len; n++) {
tempVal = (WebRtc_UWord16)linear_to_ulaw(speechIn[n]);
#ifdef WEBRTC_BIG_ENDIAN
if ((n & 0x1) == 1) {
encoded[n>>1]|=((WebRtc_UWord16)tempVal);
} else {
encoded[n>>1]=((WebRtc_UWord16)tempVal)<<8;
}
#else
if ((n & 0x1) == 1) {
encoded[n >> 1] |= ((WebRtc_UWord16) tempVal) << 8;
} else {
encoded[n >> 1] = ((WebRtc_UWord16) tempVal);
}
#endif
}
return (len);
}
WebRtc_Word16 WebRtcG711_DecodeA(void *state,
WebRtc_Word16 *encoded,
WebRtc_Word16 len,
WebRtc_Word16 *decoded,
WebRtc_Word16 *speechType)
{
int n;
WebRtc_UWord16 tempVal;
// Set to avoid getting warnings
state = state;
// Sanity check of input length
if (len < 0) {
return (-1);
}
for (n = 0; n < len; n++) {
#ifdef WEBRTC_BIG_ENDIAN
if ((n & 0x1) == 1) {
tempVal=((WebRtc_UWord16)encoded[n>>1] & 0xFF);
} else {
tempVal=((WebRtc_UWord16)encoded[n>>1] >> 8);
}
#else
if ((n & 0x1) == 1) {
tempVal = (encoded[n >> 1] >> 8);
} else {
tempVal = (encoded[n >> 1] & 0xFF);
}
#endif
decoded[n] = (WebRtc_Word16) alaw_to_linear(tempVal);
}
*speechType = 1;
return (len);
}
WebRtc_Word16 WebRtcG711_DecodeU(void *state,
WebRtc_Word16 *encoded,
WebRtc_Word16 len,
WebRtc_Word16 *decoded,
WebRtc_Word16 *speechType)
{
int n;
WebRtc_UWord16 tempVal;
// Set to avoid getting warnings
state = state;
// Sanity check of input length
if (len < 0) {
return (-1);
}
for (n = 0; n < len; n++) {
#ifdef WEBRTC_BIG_ENDIAN
if ((n & 0x1) == 1) {
tempVal=((WebRtc_UWord16)encoded[n>>1] & 0xFF);
} else {
tempVal=((WebRtc_UWord16)encoded[n>>1] >> 8);
}
#else
if ((n & 0x1) == 1) {
tempVal = (encoded[n >> 1] >> 8);
} else {
tempVal = (encoded[n >> 1] & 0xFF);
}
#endif
decoded[n] = (WebRtc_Word16) ulaw_to_linear(tempVal);
}
*speechType = 1;
return (len);
}
WebRtc_Word16 WebRtcG711_Version(char* version, WebRtc_Word16 lenBytes)
{
strncpy(version, "2.0.0", lenBytes);
return 0;
}

View File

@ -1,171 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* testG711.cpp : Defines the entry point for the console application.
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
/* include API */
#include "g711_interface.h"
/* Runtime statistics */
#include <time.h>
#define CLOCKS_PER_SEC_G711 1000
/* function for reading audio data from PCM file */
int readframe(WebRtc_Word16 *data, FILE *inp, int length) {
short k, rlen, status = 0;
rlen = (short)fread(data, sizeof(WebRtc_Word16), length, inp);
if (rlen < length) {
for (k = rlen; k < length; k++)
data[k] = 0;
status = 1;
}
return status;
}
int main(int argc, char* argv[])
{
char inname[80], outname[40], bitname[40];
FILE *inp, *outp, *bitp;
int framecnt, endfile;
WebRtc_Word16 framelength = 80;
int err;
/* Runtime statistics */
double starttime;
double runtime;
double length_file;
WebRtc_Word16 stream_len = 0;
WebRtc_Word16 shortdata[480];
WebRtc_Word16 decoded[480];
WebRtc_Word16 decoded2[480];
WebRtc_Word16 streamdata[500];
WebRtc_Word16 speechType[1];
char law[2];
char versionNumber[40];
/* handling wrong input arguments in the command line */
if ((argc!=5) && (argc!=6)) {
printf("\n\nWrong number of arguments or flag values.\n\n");
printf("\n");
printf("\nG.711 test application\n\n");
printf("Usage:\n\n");
printf("./testG711.exe framelength law infile outfile \n\n");
printf("framelength: Framelength in samples.\n");
printf("law : Coding law, A och u.\n");
printf("infile : Normal speech input file\n");
printf("outfile : Speech output file\n\n");
printf("outbits : Output bitstream file [optional]\n\n");
exit(0);
}
/* Get version and print */
WebRtcG711_Version(versionNumber, 40);
printf("-----------------------------------\n");
printf("G.711 version: %s\n\n", versionNumber);
/* Get frame length */
framelength = atoi(argv[1]);
/* Get compression law */
strcpy(law, argv[2]);
/* Get Input and Output files */
sscanf(argv[3], "%s", inname);
sscanf(argv[4], "%s", outname);
if (argc==6) {
sscanf(argv[5], "%s", bitname);
if ((bitp = fopen(bitname,"wb")) == NULL) {
printf(" G.711: Cannot read file %s.\n", bitname);
exit(1);
}
}
if ((inp = fopen(inname,"rb")) == NULL) {
printf(" G.711: Cannot read file %s.\n", inname);
exit(1);
}
if ((outp = fopen(outname,"wb")) == NULL) {
printf(" G.711: Cannot write file %s.\n", outname);
exit(1);
}
printf("\nInput: %s\nOutput: %s\n", inname, outname);
if (argc==6) {
printf("\nBitfile: %s\n", bitname);
}
starttime = clock()/(double)CLOCKS_PER_SEC_G711; /* Runtime statistics */
/* Initialize encoder and decoder */
framecnt= 0;
endfile = 0;
while (endfile == 0) {
framecnt++;
/* Read speech block */
endfile = readframe(shortdata, inp, framelength);
/* G.711 encoding */
if (!strcmp(law,"A")) {
/* A-law encoding */
stream_len = WebRtcG711_EncodeA(NULL, shortdata, framelength, streamdata);
if (argc==6){
/* Write bits to file */
fwrite(streamdata,sizeof(unsigned char),stream_len,bitp);
}
err = WebRtcG711_DecodeA(NULL, streamdata, stream_len, decoded, speechType);
} else if (!strcmp(law,"u")){
/* u-law encoding */
stream_len = WebRtcG711_EncodeU(NULL, shortdata, framelength, streamdata);
if (argc==6){
/* Write bits to file */
fwrite(streamdata,sizeof(unsigned char),stream_len,bitp);
}
err = WebRtcG711_DecodeU(NULL, streamdata, stream_len, decoded, speechType);
} else {
printf("Wrong law mode\n");
exit (1);
}
if (stream_len < 0 || err < 0) {
/* exit if returned with error */
printf("Error in encoder/decoder\n");
} else {
/* Write coded speech to file */
fwrite(decoded,sizeof(short),framelength,outp);
}
}
runtime = (double)(clock()/(double)CLOCKS_PER_SEC_G711-starttime);
length_file = ((double)framecnt*(double)framelength/8000);
printf("\n\nLength of speech file: %.1f s\n", length_file);
printf("Time to run G.711: %.2f s (%.2f %% of realtime)\n\n", runtime, (100*runtime/length_file));
printf("---------------------END----------------------\n");
fclose(inp);
fclose(outp);
return 0;
}

View File

@ -1,190 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_AUDIO_CODING_CODECS_G722_MAIN_INTERFACE_G722_INTERFACE_H_
#define MODULES_AUDIO_CODING_CODECS_G722_MAIN_INTERFACE_G722_INTERFACE_H_
#include "typedefs.h"
/*
* Solution to support multiple instances
*/
typedef struct WebRtcG722EncInst G722EncInst;
typedef struct WebRtcG722DecInst G722DecInst;
/*
* Comfort noise constants
*/
#define G722_WEBRTC_SPEECH 1
#define G722_WEBRTC_CNG 2
#ifdef __cplusplus
extern "C" {
#endif
/****************************************************************************
* WebRtcG722_CreateEncoder(...)
*
* Create memory used for G722 encoder
*
* Input:
* - G722enc_inst : G722 instance for encoder
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcG722_CreateEncoder(G722EncInst **G722enc_inst);
/****************************************************************************
* WebRtcG722_EncoderInit(...)
*
* This function initializes a G722 instance
*
* Input:
* - G722enc_inst : G722 instance, i.e. the user that should receive
* be initialized
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcG722_EncoderInit(G722EncInst *G722enc_inst);
/****************************************************************************
* WebRtcG722_FreeEncoder(...)
*
* Free the memory used for G722 encoder
*
* Input:
* - G722enc_inst : G722 instance for encoder
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst);
/****************************************************************************
* WebRtcG722_Encode(...)
*
* This function encodes G722 encoded data.
*
* Input:
* - G722enc_inst : G722 instance, i.e. the user that should encode
* a packet
* - speechIn : Input speech vector
* - len : Samples in speechIn
*
* Output:
* - encoded : The encoded data vector
*
* Return value : >0 - Length (in bytes) of coded data
* -1 - Error
*/
WebRtc_Word16 WebRtcG722_Encode(G722EncInst *G722enc_inst,
WebRtc_Word16 *speechIn,
WebRtc_Word16 len,
WebRtc_Word16 *encoded);
/****************************************************************************
* WebRtcG722_CreateDecoder(...)
*
* Create memory used for G722 encoder
*
* Input:
* - G722dec_inst : G722 instance for decoder
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst);
/****************************************************************************
* WebRtcG722_DecoderInit(...)
*
* This function initializes a G729 instance
*
* Input:
* - G729_decinst_t : G729 instance, i.e. the user that should receive
* be initialized
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcG722_DecoderInit(G722DecInst *G722dec_inst);
/****************************************************************************
* WebRtcG722_FreeDecoder(...)
*
* Free the memory used for G722 decoder
*
* Input:
* - G722dec_inst : G722 instance for decoder
*
* Return value : 0 - Ok
* -1 - Error
*/
WebRtc_Word16 WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst);
/****************************************************************************
* WebRtcG722_Decode(...)
*
* This function decodes a packet with G729 frame(s). Output speech length
* will be a multiple of 80 samples (80*frames/packet).
*
* Input:
* - G722dec_inst : G722 instance, i.e. the user that should decode
* a packet
* - encoded : Encoded G722 frame(s)
* - len : Bytes in encoded vector
*
* Output:
* - decoded : The decoded vector
* - speechType : 1 normal, 2 CNG (Since G722 does not have its own
* DTX/CNG scheme it should always return 1)
*
* Return value : >0 - Samples in decoded vector
* -1 - Error
*/
WebRtc_Word16 WebRtcG722_Decode(G722DecInst *G722dec_inst,
WebRtc_Word16 *encoded,
WebRtc_Word16 len,
WebRtc_Word16 *decoded,
WebRtc_Word16 *speechType);
/****************************************************************************
* WebRtcG722_Version(...)
*
* Get a string with the current version of the codec
*/
WebRtc_Word16 WebRtcG722_Version(char *versionStr, short len);
#ifdef __cplusplus
}
#endif
#endif /* MODULES_AUDIO_CODING_CODECS_G722_MAIN_INTERFACE_G722_INTERFACE_H_ */

View File

@ -1,53 +0,0 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_ARM_MODE := arm
LOCAL_MODULE_CLASS := STATIC_LIBRARIES
LOCAL_MODULE := libwebrtc_g722
LOCAL_MODULE_TAGS := optional
LOCAL_GENERATED_SOURCES :=
LOCAL_SRC_FILES := g722_interface.c \
g722_encode.c \
g722_decode.c
# Flags passed to both C and C++ files.
MY_CFLAGS :=
MY_CFLAGS_C :=
MY_DEFS := '-DNO_TCMALLOC' \
'-DNO_HEAPCHECKER' \
'-DWEBRTC_TARGET_PC' \
'-DWEBRTC_LINUX' \
'-DWEBRTC_THREAD_RR' \
'-DWEBRTC_ANDROID' \
'-DANDROID'
LOCAL_CFLAGS := $(MY_CFLAGS_C) $(MY_CFLAGS) $(MY_DEFS)
# Include paths placed before CFLAGS/CPPFLAGS
LOCAL_C_INCLUDES := $(LOCAL_PATH)/../../../../../.. \
$(LOCAL_PATH)/../interface
# Flags passed to only C++ (and not C) files.
LOCAL_CPPFLAGS :=
LOCAL_LDFLAGS :=
LOCAL_STATIC_LIBRARIES :=
LOCAL_SHARED_LIBRARIES := libcutils \
libdl \
libstlport
LOCAL_ADDITIONAL_DEPENDENCIES :=
ifneq ($(MY_WEBRTC_NDK_BUILD),true)
include external/stlport/libstlport.mk
include $(BUILD_STATIC_LIBRARY)
endif

View File

@ -1,56 +0,0 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'includes': [
'../../../../../../common_settings.gypi', # Common settings
],
'targets': [
{
'target_name': 'G722',
'type': '<(library)',
'include_dirs': [
'../interface',
],
'direct_dependent_settings': {
'include_dirs': [
'../interface',
],
},
'sources': [
'../interface/g722_interface.h',
'g722_interface.c',
'g722_encode.c',
'g722_decode.c',
'g722_enc_dec.h',
],
},
{
'target_name': 'G722Test',
'type': 'executable',
'dependencies': [
'G722',
],
'sources': [
'../testG722/testG722.cpp',
],
'conditions': [
['OS=="linux"', {
'cflags': [
'-fexceptions', # enable exceptions
],
}],
],
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:

View File

@ -1,407 +0,0 @@
/*
* SpanDSP - a series of DSP components for telephony
*
* g722_decode.c - The ITU G.722 codec, decode part.
*
* Written by Steve Underwood <steveu@coppice.org>
*
* Copyright (C) 2005 Steve Underwood
*
* Despite my general liking of the GPL, I place my own contributions
* to this code in the public domain for the benefit of all mankind -
* even the slimy ones who might try to proprietize my work and use it
* to my detriment.
*
* Based in part on a single channel G.722 codec which is:
*
* Copyright (c) CMU 1993
* Computer Science, Speech Group
* Chengxiang Lu and Alex Hauptmann
*
* $Id: g722_decode.c,v 1.15 2006/07/07 16:37:49 steveu Exp $
*
* Modifications for WebRtc, 2011/04/28, by tlegrand:
* -Removed usage of inttypes.h and tgmath.h
* -Changed to use WebRtc types
* -Changed __inline__ to __inline
* -Added saturation check on output
*/
/*! \file */
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <stdio.h>
#include <memory.h>
#include <stdlib.h>
#include "typedefs.h"
#include "g722_enc_dec.h"
#if !defined(FALSE)
#define FALSE 0
#endif
#if !defined(TRUE)
#define TRUE (!FALSE)
#endif
static __inline WebRtc_Word16 saturate(WebRtc_Word32 amp)
{
WebRtc_Word16 amp16;
/* Hopefully this is optimised for the common case - not clipping */
amp16 = (WebRtc_Word16) amp;
if (amp == amp16)
return amp16;
if (amp > WEBRTC_INT16_MAX)
return WEBRTC_INT16_MAX;
return WEBRTC_INT16_MIN;
}
/*- End of function --------------------------------------------------------*/
static void block4(g722_decode_state_t *s, int band, int d);
static void block4(g722_decode_state_t *s, int band, int d)
{
int wd1;
int wd2;
int wd3;
int i;
/* Block 4, RECONS */
s->band[band].d[0] = d;
s->band[band].r[0] = saturate(s->band[band].s + d);
/* Block 4, PARREC */
s->band[band].p[0] = saturate(s->band[band].sz + d);
/* Block 4, UPPOL2 */
for (i = 0; i < 3; i++)
s->band[band].sg[i] = s->band[band].p[i] >> 15;
wd1 = saturate(s->band[band].a[1] << 2);
wd2 = (s->band[band].sg[0] == s->band[band].sg[1]) ? -wd1 : wd1;
if (wd2 > 32767)
wd2 = 32767;
wd3 = (s->band[band].sg[0] == s->band[band].sg[2]) ? 128 : -128;
wd3 += (wd2 >> 7);
wd3 += (s->band[band].a[2]*32512) >> 15;
if (wd3 > 12288)
wd3 = 12288;
else if (wd3 < -12288)
wd3 = -12288;
s->band[band].ap[2] = wd3;
/* Block 4, UPPOL1 */
s->band[band].sg[0] = s->band[band].p[0] >> 15;
s->band[band].sg[1] = s->band[band].p[1] >> 15;
wd1 = (s->band[band].sg[0] == s->band[band].sg[1]) ? 192 : -192;
wd2 = (s->band[band].a[1]*32640) >> 15;
s->band[band].ap[1] = saturate(wd1 + wd2);
wd3 = saturate(15360 - s->band[band].ap[2]);
if (s->band[band].ap[1] > wd3)
s->band[band].ap[1] = wd3;
else if (s->band[band].ap[1] < -wd3)
s->band[band].ap[1] = -wd3;
/* Block 4, UPZERO */
wd1 = (d == 0) ? 0 : 128;
s->band[band].sg[0] = d >> 15;
for (i = 1; i < 7; i++)
{
s->band[band].sg[i] = s->band[band].d[i] >> 15;
wd2 = (s->band[band].sg[i] == s->band[band].sg[0]) ? wd1 : -wd1;
wd3 = (s->band[band].b[i]*32640) >> 15;
s->band[band].bp[i] = saturate(wd2 + wd3);
}
/* Block 4, DELAYA */
for (i = 6; i > 0; i--)
{
s->band[band].d[i] = s->band[band].d[i - 1];
s->band[band].b[i] = s->band[band].bp[i];
}
for (i = 2; i > 0; i--)
{
s->band[band].r[i] = s->band[band].r[i - 1];
s->band[band].p[i] = s->band[band].p[i - 1];
s->band[band].a[i] = s->band[band].ap[i];
}
/* Block 4, FILTEP */
wd1 = saturate(s->band[band].r[1] + s->band[band].r[1]);
wd1 = (s->band[band].a[1]*wd1) >> 15;
wd2 = saturate(s->band[band].r[2] + s->band[band].r[2]);
wd2 = (s->band[band].a[2]*wd2) >> 15;
s->band[band].sp = saturate(wd1 + wd2);
/* Block 4, FILTEZ */
s->band[band].sz = 0;
for (i = 6; i > 0; i--)
{
wd1 = saturate(s->band[band].d[i] + s->band[band].d[i]);
s->band[band].sz += (s->band[band].b[i]*wd1) >> 15;
}
s->band[band].sz = saturate(s->band[band].sz);
/* Block 4, PREDIC */
s->band[band].s = saturate(s->band[band].sp + s->band[band].sz);
}
/*- End of function --------------------------------------------------------*/
g722_decode_state_t *g722_decode_init(g722_decode_state_t *s, int rate, int options)
{
if (s == NULL)
{
if ((s = (g722_decode_state_t *) malloc(sizeof(*s))) == NULL)
return NULL;
}
memset(s, 0, sizeof(*s));
if (rate == 48000)
s->bits_per_sample = 6;
else if (rate == 56000)
s->bits_per_sample = 7;
else
s->bits_per_sample = 8;
if ((options & G722_SAMPLE_RATE_8000))
s->eight_k = TRUE;
if ((options & G722_PACKED) && s->bits_per_sample != 8)
s->packed = TRUE;
else
s->packed = FALSE;
s->band[0].det = 32;
s->band[1].det = 8;
return s;
}
/*- End of function --------------------------------------------------------*/
int g722_decode_release(g722_decode_state_t *s)
{
free(s);
return 0;
}
/*- End of function --------------------------------------------------------*/
int g722_decode(g722_decode_state_t *s, WebRtc_Word16 amp[],
const WebRtc_UWord8 g722_data[], int len)
{
static const int wl[8] = {-60, -30, 58, 172, 334, 538, 1198, 3042 };
static const int rl42[16] = {0, 7, 6, 5, 4, 3, 2, 1, 7, 6, 5, 4, 3, 2, 1, 0 };
static const int ilb[32] =
{
2048, 2093, 2139, 2186, 2233, 2282, 2332,
2383, 2435, 2489, 2543, 2599, 2656, 2714,
2774, 2834, 2896, 2960, 3025, 3091, 3158,
3228, 3298, 3371, 3444, 3520, 3597, 3676,
3756, 3838, 3922, 4008
};
static const int wh[3] = {0, -214, 798};
static const int rh2[4] = {2, 1, 2, 1};
static const int qm2[4] = {-7408, -1616, 7408, 1616};
static const int qm4[16] =
{
0, -20456, -12896, -8968,
-6288, -4240, -2584, -1200,
20456, 12896, 8968, 6288,
4240, 2584, 1200, 0
};
static const int qm5[32] =
{
-280, -280, -23352, -17560,
-14120, -11664, -9752, -8184,
-6864, -5712, -4696, -3784,
-2960, -2208, -1520, -880,
23352, 17560, 14120, 11664,
9752, 8184, 6864, 5712,
4696, 3784, 2960, 2208,
1520, 880, 280, -280
};
static const int qm6[64] =
{
-136, -136, -136, -136,
-24808, -21904, -19008, -16704,
-14984, -13512, -12280, -11192,
-10232, -9360, -8576, -7856,
-7192, -6576, -6000, -5456,
-4944, -4464, -4008, -3576,
-3168, -2776, -2400, -2032,
-1688, -1360, -1040, -728,
24808, 21904, 19008, 16704,
14984, 13512, 12280, 11192,
10232, 9360, 8576, 7856,
7192, 6576, 6000, 5456,
4944, 4464, 4008, 3576,
3168, 2776, 2400, 2032,
1688, 1360, 1040, 728,
432, 136, -432, -136
};
static const int qmf_coeffs[12] =
{
3, -11, 12, 32, -210, 951, 3876, -805, 362, -156, 53, -11,
};
int dlowt;
int rlow;
int ihigh;
int dhigh;
int rhigh;
int xout1;
int xout2;
int wd1;
int wd2;
int wd3;
int code;
int outlen;
int i;
int j;
outlen = 0;
rhigh = 0;
for (j = 0; j < len; )
{
if (s->packed)
{
/* Unpack the code bits */
if (s->in_bits < s->bits_per_sample)
{
s->in_buffer |= (g722_data[j++] << s->in_bits);
s->in_bits += 8;
}
code = s->in_buffer & ((1 << s->bits_per_sample) - 1);
s->in_buffer >>= s->bits_per_sample;
s->in_bits -= s->bits_per_sample;
}
else
{
code = g722_data[j++];
}
switch (s->bits_per_sample)
{
default:
case 8:
wd1 = code & 0x3F;
ihigh = (code >> 6) & 0x03;
wd2 = qm6[wd1];
wd1 >>= 2;
break;
case 7:
wd1 = code & 0x1F;
ihigh = (code >> 5) & 0x03;
wd2 = qm5[wd1];
wd1 >>= 1;
break;
case 6:
wd1 = code & 0x0F;
ihigh = (code >> 4) & 0x03;
wd2 = qm4[wd1];
break;
}
/* Block 5L, LOW BAND INVQBL */
wd2 = (s->band[0].det*wd2) >> 15;
/* Block 5L, RECONS */
rlow = s->band[0].s + wd2;
/* Block 6L, LIMIT */
if (rlow > 16383)
rlow = 16383;
else if (rlow < -16384)
rlow = -16384;
/* Block 2L, INVQAL */
wd2 = qm4[wd1];
dlowt = (s->band[0].det*wd2) >> 15;
/* Block 3L, LOGSCL */
wd2 = rl42[wd1];
wd1 = (s->band[0].nb*127) >> 7;
wd1 += wl[wd2];
if (wd1 < 0)
wd1 = 0;
else if (wd1 > 18432)
wd1 = 18432;
s->band[0].nb = wd1;
/* Block 3L, SCALEL */
wd1 = (s->band[0].nb >> 6) & 31;
wd2 = 8 - (s->band[0].nb >> 11);
wd3 = (wd2 < 0) ? (ilb[wd1] << -wd2) : (ilb[wd1] >> wd2);
s->band[0].det = wd3 << 2;
block4(s, 0, dlowt);
if (!s->eight_k)
{
/* Block 2H, INVQAH */
wd2 = qm2[ihigh];
dhigh = (s->band[1].det*wd2) >> 15;
/* Block 5H, RECONS */
rhigh = dhigh + s->band[1].s;
/* Block 6H, LIMIT */
if (rhigh > 16383)
rhigh = 16383;
else if (rhigh < -16384)
rhigh = -16384;
/* Block 2H, INVQAH */
wd2 = rh2[ihigh];
wd1 = (s->band[1].nb*127) >> 7;
wd1 += wh[wd2];
if (wd1 < 0)
wd1 = 0;
else if (wd1 > 22528)
wd1 = 22528;
s->band[1].nb = wd1;
/* Block 3H, SCALEH */
wd1 = (s->band[1].nb >> 6) & 31;
wd2 = 10 - (s->band[1].nb >> 11);
wd3 = (wd2 < 0) ? (ilb[wd1] << -wd2) : (ilb[wd1] >> wd2);
s->band[1].det = wd3 << 2;
block4(s, 1, dhigh);
}
if (s->itu_test_mode)
{
amp[outlen++] = (WebRtc_Word16) (rlow << 1);
amp[outlen++] = (WebRtc_Word16) (rhigh << 1);
}
else
{
if (s->eight_k)
{
amp[outlen++] = (WebRtc_Word16) (rlow << 1);
}
else
{
/* Apply the receive QMF */
for (i = 0; i < 22; i++)
s->x[i] = s->x[i + 2];
s->x[22] = rlow + rhigh;
s->x[23] = rlow - rhigh;
xout1 = 0;
xout2 = 0;
for (i = 0; i < 12; i++)
{
xout2 += s->x[2*i]*qmf_coeffs[i];
xout1 += s->x[2*i + 1]*qmf_coeffs[11 - i];
}
/* We shift by 12 to allow for the QMF filters (DC gain = 4096), less 1
to allow for the 15 bit input to the G.722 algorithm. */
/* WebRtc, tlegrand: added saturation */
amp[outlen++] = saturate(xout1 >> 11);
amp[outlen++] = saturate(xout2 >> 11);
}
}
}
return outlen;
}
/*- End of function --------------------------------------------------------*/
/*- End of file ------------------------------------------------------------*/

View File

@ -1,154 +0,0 @@
/*
* SpanDSP - a series of DSP components for telephony
*
* g722.h - The ITU G.722 codec.
*
* Written by Steve Underwood <steveu@coppice.org>
*
* Copyright (C) 2005 Steve Underwood
*
* Despite my general liking of the GPL, I place my own contributions
* to this code in the public domain for the benefit of all mankind -
* even the slimy ones who might try to proprietize my work and use it
* to my detriment.
*
* Based on a single channel G.722 codec which is:
*
***** Copyright (c) CMU 1993 *****
* Computer Science, Speech Group
* Chengxiang Lu and Alex Hauptmann
*
* $Id: g722.h,v 1.10 2006/06/16 12:45:53 steveu Exp $
*
* Modifications for WebRtc, 2011/04/28, by tlegrand:
* -Changed to use WebRtc types
* -Added new defines for minimum and maximum values of short int
*/
/*! \file */
#if !defined(_G722_ENC_DEC_H_)
#define _G722_ENC_DEC_H_
/*! \page g722_page G.722 encoding and decoding
\section g722_page_sec_1 What does it do?
The G.722 module is a bit exact implementation of the ITU G.722 specification for all three
specified bit rates - 64000bps, 56000bps and 48000bps. It passes the ITU tests.
To allow fast and flexible interworking with narrow band telephony, the encoder and decoder
support an option for the linear audio to be an 8k samples/second stream. In this mode the
codec is considerably faster, and still fully compatible with wideband terminals using G.722.
\section g722_page_sec_2 How does it work?
???.
*/
#define WEBRTC_INT16_MAX 32767
#define WEBRTC_INT16_MIN -32768
enum
{
G722_SAMPLE_RATE_8000 = 0x0001,
G722_PACKED = 0x0002
};
typedef struct
{
/*! TRUE if the operating in the special ITU test mode, with the band split filters
disabled. */
int itu_test_mode;
/*! TRUE if the G.722 data is packed */
int packed;
/*! TRUE if encode from 8k samples/second */
int eight_k;
/*! 6 for 48000kbps, 7 for 56000kbps, or 8 for 64000kbps. */
int bits_per_sample;
/*! Signal history for the QMF */
int x[24];
struct
{
int s;
int sp;
int sz;
int r[3];
int a[3];
int ap[3];
int p[3];
int d[7];
int b[7];
int bp[7];
int sg[7];
int nb;
int det;
} band[2];
unsigned int in_buffer;
int in_bits;
unsigned int out_buffer;
int out_bits;
} g722_encode_state_t;
typedef struct
{
/*! TRUE if the operating in the special ITU test mode, with the band split filters
disabled. */
int itu_test_mode;
/*! TRUE if the G.722 data is packed */
int packed;
/*! TRUE if decode to 8k samples/second */
int eight_k;
/*! 6 for 48000kbps, 7 for 56000kbps, or 8 for 64000kbps. */
int bits_per_sample;
/*! Signal history for the QMF */
int x[24];
struct
{
int s;
int sp;
int sz;
int r[3];
int a[3];
int ap[3];
int p[3];
int d[7];
int b[7];
int bp[7];
int sg[7];
int nb;
int det;
} band[2];
unsigned int in_buffer;
int in_bits;
unsigned int out_buffer;
int out_bits;
} g722_decode_state_t;
#ifdef __cplusplus
extern "C" {
#endif
g722_encode_state_t *g722_encode_init(g722_encode_state_t *s, int rate, int options);
int g722_encode_release(g722_encode_state_t *s);
int g722_encode(g722_encode_state_t *s,
WebRtc_UWord8 g722_data[],
const WebRtc_Word16 amp[],
int len);
g722_decode_state_t *g722_decode_init(g722_decode_state_t *s, int rate, int options);
int g722_decode_release(g722_decode_state_t *s);
int g722_decode(g722_decode_state_t *s,
WebRtc_Word16 amp[],
const WebRtc_UWord8 g722_data[],
int len);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -1,433 +0,0 @@
/*
* SpanDSP - a series of DSP components for telephony
*
* g722_encode.c - The ITU G.722 codec, encode part.
*
* Written by Steve Underwood <steveu@coppice.org>
*
* Copyright (C) 2005 Steve Underwood
*
* All rights reserved.
*
* Despite my general liking of the GPL, I place my own contributions
* to this code in the public domain for the benefit of all mankind -
* even the slimy ones who might try to proprietize my work and use it
* to my detriment.
*
* Based on a single channel 64kbps only G.722 codec which is:
*
***** Copyright (c) CMU 1993 *****
* Computer Science, Speech Group
* Chengxiang Lu and Alex Hauptmann
*
* $Id: g722_encode.c,v 1.14 2006/07/07 16:37:49 steveu Exp $
*
* Modifications for WebRtc, 2011/04/28, by tlegrand:
* -Removed usage of inttypes.h and tgmath.h
* -Changed to use WebRtc types
* -Added option to run encoder bitexact with ITU-T reference implementation
*/
/*! \file */
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <stdio.h>
#include <memory.h>
#include <stdlib.h>
#include "typedefs.h"
#include "g722_enc_dec.h"
#if !defined(FALSE)
#define FALSE 0
#endif
#if !defined(TRUE)
#define TRUE (!FALSE)
#endif
static __inline WebRtc_Word16 saturate(WebRtc_Word32 amp)
{
WebRtc_Word16 amp16;
/* Hopefully this is optimised for the common case - not clipping */
amp16 = (WebRtc_Word16) amp;
if (amp == amp16)
return amp16;
if (amp > WEBRTC_INT16_MAX)
return WEBRTC_INT16_MAX;
return WEBRTC_INT16_MIN;
}
/*- End of function --------------------------------------------------------*/
static void block4(g722_encode_state_t *s, int band, int d)
{
int wd1;
int wd2;
int wd3;
int i;
/* Block 4, RECONS */
s->band[band].d[0] = d;
s->band[band].r[0] = saturate(s->band[band].s + d);
/* Block 4, PARREC */
s->band[band].p[0] = saturate(s->band[band].sz + d);
/* Block 4, UPPOL2 */
for (i = 0; i < 3; i++)
s->band[band].sg[i] = s->band[band].p[i] >> 15;
wd1 = saturate(s->band[band].a[1] << 2);
wd2 = (s->band[band].sg[0] == s->band[band].sg[1]) ? -wd1 : wd1;
if (wd2 > 32767)
wd2 = 32767;
wd3 = (wd2 >> 7) + ((s->band[band].sg[0] == s->band[band].sg[2]) ? 128 : -128);
wd3 += (s->band[band].a[2]*32512) >> 15;
if (wd3 > 12288)
wd3 = 12288;
else if (wd3 < -12288)
wd3 = -12288;
s->band[band].ap[2] = wd3;
/* Block 4, UPPOL1 */
s->band[band].sg[0] = s->band[band].p[0] >> 15;
s->band[band].sg[1] = s->band[band].p[1] >> 15;
wd1 = (s->band[band].sg[0] == s->band[band].sg[1]) ? 192 : -192;
wd2 = (s->band[band].a[1]*32640) >> 15;
s->band[band].ap[1] = saturate(wd1 + wd2);
wd3 = saturate(15360 - s->band[band].ap[2]);
if (s->band[band].ap[1] > wd3)
s->band[band].ap[1] = wd3;
else if (s->band[band].ap[1] < -wd3)
s->band[band].ap[1] = -wd3;
/* Block 4, UPZERO */
wd1 = (d == 0) ? 0 : 128;
s->band[band].sg[0] = d >> 15;
for (i = 1; i < 7; i++)
{
s->band[band].sg[i] = s->band[band].d[i] >> 15;
wd2 = (s->band[band].sg[i] == s->band[band].sg[0]) ? wd1 : -wd1;
wd3 = (s->band[band].b[i]*32640) >> 15;
s->band[band].bp[i] = saturate(wd2 + wd3);
}
/* Block 4, DELAYA */
for (i = 6; i > 0; i--)
{
s->band[band].d[i] = s->band[band].d[i - 1];
s->band[band].b[i] = s->band[band].bp[i];
}
for (i = 2; i > 0; i--)
{
s->band[band].r[i] = s->band[band].r[i - 1];
s->band[band].p[i] = s->band[band].p[i - 1];
s->band[band].a[i] = s->band[band].ap[i];
}
/* Block 4, FILTEP */
wd1 = saturate(s->band[band].r[1] + s->band[band].r[1]);
wd1 = (s->band[band].a[1]*wd1) >> 15;
wd2 = saturate(s->band[band].r[2] + s->band[band].r[2]);
wd2 = (s->band[band].a[2]*wd2) >> 15;
s->band[band].sp = saturate(wd1 + wd2);
/* Block 4, FILTEZ */
s->band[band].sz = 0;
for (i = 6; i > 0; i--)
{
wd1 = saturate(s->band[band].d[i] + s->band[band].d[i]);
s->band[band].sz += (s->band[band].b[i]*wd1) >> 15;
}
s->band[band].sz = saturate(s->band[band].sz);
/* Block 4, PREDIC */
s->band[band].s = saturate(s->band[band].sp + s->band[band].sz);
}
/*- End of function --------------------------------------------------------*/
g722_encode_state_t *g722_encode_init(g722_encode_state_t *s, int rate, int options)
{
if (s == NULL)
{
if ((s = (g722_encode_state_t *) malloc(sizeof(*s))) == NULL)
return NULL;
}
memset(s, 0, sizeof(*s));
if (rate == 48000)
s->bits_per_sample = 6;
else if (rate == 56000)
s->bits_per_sample = 7;
else
s->bits_per_sample = 8;
if ((options & G722_SAMPLE_RATE_8000))
s->eight_k = TRUE;
if ((options & G722_PACKED) && s->bits_per_sample != 8)
s->packed = TRUE;
else
s->packed = FALSE;
s->band[0].det = 32;
s->band[1].det = 8;
return s;
}
/*- End of function --------------------------------------------------------*/
int g722_encode_release(g722_encode_state_t *s)
{
free(s);
return 0;
}
/*- End of function --------------------------------------------------------*/
/* WebRtc, tlegrand:
* Only define the following if bit-exactness with reference implementation
* is needed. Will only have any effect if input signal is saturated.
*/
//#define RUN_LIKE_REFERENCE_G722
#ifdef RUN_LIKE_REFERENCE_G722
WebRtc_Word16 limitValues (WebRtc_Word16 rl)
{
WebRtc_Word16 yl;
yl = (rl > 16383) ? 16383 : ((rl < -16384) ? -16384 : rl);
return (yl);
}
#endif
int g722_encode(g722_encode_state_t *s, WebRtc_UWord8 g722_data[],
const WebRtc_Word16 amp[], int len)
{
static const int q6[32] =
{
0, 35, 72, 110, 150, 190, 233, 276,
323, 370, 422, 473, 530, 587, 650, 714,
786, 858, 940, 1023, 1121, 1219, 1339, 1458,
1612, 1765, 1980, 2195, 2557, 2919, 0, 0
};
static const int iln[32] =
{
0, 63, 62, 31, 30, 29, 28, 27,
26, 25, 24, 23, 22, 21, 20, 19,
18, 17, 16, 15, 14, 13, 12, 11,
10, 9, 8, 7, 6, 5, 4, 0
};
static const int ilp[32] =
{
0, 61, 60, 59, 58, 57, 56, 55,
54, 53, 52, 51, 50, 49, 48, 47,
46, 45, 44, 43, 42, 41, 40, 39,
38, 37, 36, 35, 34, 33, 32, 0
};
static const int wl[8] =
{
-60, -30, 58, 172, 334, 538, 1198, 3042
};
static const int rl42[16] =
{
0, 7, 6, 5, 4, 3, 2, 1, 7, 6, 5, 4, 3, 2, 1, 0
};
static const int ilb[32] =
{
2048, 2093, 2139, 2186, 2233, 2282, 2332,
2383, 2435, 2489, 2543, 2599, 2656, 2714,
2774, 2834, 2896, 2960, 3025, 3091, 3158,
3228, 3298, 3371, 3444, 3520, 3597, 3676,
3756, 3838, 3922, 4008
};
static const int qm4[16] =
{
0, -20456, -12896, -8968,
-6288, -4240, -2584, -1200,
20456, 12896, 8968, 6288,
4240, 2584, 1200, 0
};
static const int qm2[4] =
{
-7408, -1616, 7408, 1616
};
static const int qmf_coeffs[12] =
{
3, -11, 12, 32, -210, 951, 3876, -805, 362, -156, 53, -11,
};
static const int ihn[3] = {0, 1, 0};
static const int ihp[3] = {0, 3, 2};
static const int wh[3] = {0, -214, 798};
static const int rh2[4] = {2, 1, 2, 1};
int dlow;
int dhigh;
int el;
int wd;
int wd1;
int ril;
int wd2;
int il4;
int ih2;
int wd3;
int eh;
int mih;
int i;
int j;
/* Low and high band PCM from the QMF */
int xlow;
int xhigh;
int g722_bytes;
/* Even and odd tap accumulators */
int sumeven;
int sumodd;
int ihigh;
int ilow;
int code;
g722_bytes = 0;
xhigh = 0;
for (j = 0; j < len; )
{
if (s->itu_test_mode)
{
xlow =
xhigh = amp[j++] >> 1;
}
else
{
if (s->eight_k)
{
/* We shift by 1 to allow for the 15 bit input to the G.722 algorithm. */
xlow = amp[j++] >> 1;
}
else
{
/* Apply the transmit QMF */
/* Shuffle the buffer down */
for (i = 0; i < 22; i++)
s->x[i] = s->x[i + 2];
s->x[22] = amp[j++];
s->x[23] = amp[j++];
/* Discard every other QMF output */
sumeven = 0;
sumodd = 0;
for (i = 0; i < 12; i++)
{
sumodd += s->x[2*i]*qmf_coeffs[i];
sumeven += s->x[2*i + 1]*qmf_coeffs[11 - i];
}
/* We shift by 12 to allow for the QMF filters (DC gain = 4096), plus 1
to allow for us summing two filters, plus 1 to allow for the 15 bit
input to the G.722 algorithm. */
xlow = (sumeven + sumodd) >> 14;
xhigh = (sumeven - sumodd) >> 14;
#ifdef RUN_LIKE_REFERENCE_G722
/* The following lines are only used to verify bit-exactness
* with reference implementation of G.722. Higher precision
* is achieved without limiting the values.
*/
xlow = limitValues(xlow);
xhigh = limitValues(xhigh);
#endif
}
}
/* Block 1L, SUBTRA */
el = saturate(xlow - s->band[0].s);
/* Block 1L, QUANTL */
wd = (el >= 0) ? el : -(el + 1);
for (i = 1; i < 30; i++)
{
wd1 = (q6[i]*s->band[0].det) >> 12;
if (wd < wd1)
break;
}
ilow = (el < 0) ? iln[i] : ilp[i];
/* Block 2L, INVQAL */
ril = ilow >> 2;
wd2 = qm4[ril];
dlow = (s->band[0].det*wd2) >> 15;
/* Block 3L, LOGSCL */
il4 = rl42[ril];
wd = (s->band[0].nb*127) >> 7;
s->band[0].nb = wd + wl[il4];
if (s->band[0].nb < 0)
s->band[0].nb = 0;
else if (s->band[0].nb > 18432)
s->band[0].nb = 18432;
/* Block 3L, SCALEL */
wd1 = (s->band[0].nb >> 6) & 31;
wd2 = 8 - (s->band[0].nb >> 11);
wd3 = (wd2 < 0) ? (ilb[wd1] << -wd2) : (ilb[wd1] >> wd2);
s->band[0].det = wd3 << 2;
block4(s, 0, dlow);
if (s->eight_k)
{
/* Just leave the high bits as zero */
code = (0xC0 | ilow) >> (8 - s->bits_per_sample);
}
else
{
/* Block 1H, SUBTRA */
eh = saturate(xhigh - s->band[1].s);
/* Block 1H, QUANTH */
wd = (eh >= 0) ? eh : -(eh + 1);
wd1 = (564*s->band[1].det) >> 12;
mih = (wd >= wd1) ? 2 : 1;
ihigh = (eh < 0) ? ihn[mih] : ihp[mih];
/* Block 2H, INVQAH */
wd2 = qm2[ihigh];
dhigh = (s->band[1].det*wd2) >> 15;
/* Block 3H, LOGSCH */
ih2 = rh2[ihigh];
wd = (s->band[1].nb*127) >> 7;
s->band[1].nb = wd + wh[ih2];
if (s->band[1].nb < 0)
s->band[1].nb = 0;
else if (s->band[1].nb > 22528)
s->band[1].nb = 22528;
/* Block 3H, SCALEH */
wd1 = (s->band[1].nb >> 6) & 31;
wd2 = 10 - (s->band[1].nb >> 11);
wd3 = (wd2 < 0) ? (ilb[wd1] << -wd2) : (ilb[wd1] >> wd2);
s->band[1].det = wd3 << 2;
block4(s, 1, dhigh);
code = ((ihigh << 6) | ilow) >> (8 - s->bits_per_sample);
}
if (s->packed)
{
/* Pack the code bits */
s->out_buffer |= (code << s->out_bits);
s->out_bits += s->bits_per_sample;
if (s->out_bits >= 8)
{
g722_data[g722_bytes++] = (WebRtc_UWord8) (s->out_buffer & 0xFF);
s->out_bits -= 8;
s->out_buffer >>= 8;
}
}
else
{
g722_data[g722_bytes++] = (WebRtc_UWord8) code;
}
}
return g722_bytes;
}
/*- End of function --------------------------------------------------------*/
/*- End of file ------------------------------------------------------------*/

View File

@ -1,115 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stdlib.h>
#include <string.h>
#include "g722_interface.h"
#include "g722_enc_dec.h"
#include "typedefs.h"
WebRtc_Word16 WebRtcG722_CreateEncoder(G722EncInst **G722enc_inst)
{
*G722enc_inst=(G722EncInst*)malloc(sizeof(g722_encode_state_t));
if (*G722enc_inst!=NULL) {
return(0);
} else {
return(-1);
}
}
WebRtc_Word16 WebRtcG722_EncoderInit(G722EncInst *G722enc_inst)
{
// Create and/or reset the G.722 encoder
// Bitrate 64 kbps and wideband mode (2)
G722enc_inst = (G722EncInst *) g722_encode_init(
(g722_encode_state_t*) G722enc_inst, 64000, 2);
if (G722enc_inst == NULL) {
return -1;
} else {
return 0;
}
}
WebRtc_Word16 WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst)
{
// Free encoder memory
return g722_encode_release((g722_encode_state_t*) G722enc_inst);
}
WebRtc_Word16 WebRtcG722_Encode(G722EncInst *G722enc_inst,
WebRtc_Word16 *speechIn,
WebRtc_Word16 len,
WebRtc_Word16 *encoded)
{
unsigned char *codechar = (unsigned char*) encoded;
// Encode the input speech vector
return g722_encode((g722_encode_state_t*) G722enc_inst,
codechar, speechIn, len);
}
WebRtc_Word16 WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst)
{
*G722dec_inst=(G722DecInst*)malloc(sizeof(g722_decode_state_t));
if (*G722dec_inst!=NULL) {
return(0);
} else {
return(-1);
}
}
WebRtc_Word16 WebRtcG722_DecoderInit(G722DecInst *G722dec_inst)
{
// Create and/or reset the G.722 decoder
// Bitrate 64 kbps and wideband mode (2)
G722dec_inst = (G722DecInst *) g722_decode_init(
(g722_decode_state_t*) G722dec_inst, 64000, 2);
if (G722dec_inst == NULL) {
return -1;
} else {
return 0;
}
}
WebRtc_Word16 WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst)
{
// Free encoder memory
return g722_decode_release((g722_decode_state_t*) G722dec_inst);
}
WebRtc_Word16 WebRtcG722_Decode(G722DecInst *G722dec_inst,
WebRtc_Word16 *encoded,
WebRtc_Word16 len,
WebRtc_Word16 *decoded,
WebRtc_Word16 *speechType)
{
// Decode the G.722 encoder stream
*speechType=G722_WEBRTC_SPEECH;
return g722_decode((g722_decode_state_t*) G722dec_inst,
decoded, (WebRtc_UWord8*) encoded, len);
}
WebRtc_Word16 WebRtcG722_Version(char *versionStr, short len)
{
// Get version string
char version[30] = "2.0.0\n";
if (strlen(version) < (unsigned int)len)
{
strcpy(versionStr, version);
return 0;
}
else
{
return -1;
}
}

View File

@ -1,157 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* testG722.cpp : Defines the entry point for the console application.
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "typedefs.h"
/* include API */
#include "g722_interface.h"
/* Runtime statistics */
#include <time.h>
#define CLOCKS_PER_SEC_G722 100000
// Forward declaration
typedef struct WebRtcG722EncInst G722EncInst;
typedef struct WebRtcG722DecInst G722DecInst;
/* function for reading audio data from PCM file */
int readframe(WebRtc_Word16 *data, FILE *inp, int length)
{
short k, rlen, status = 0;
rlen = (short)fread(data, sizeof(WebRtc_Word16), length, inp);
if (rlen < length) {
for (k = rlen; k < length; k++)
data[k] = 0;
status = 1;
}
return status;
}
int main(int argc, char* argv[])
{
char inname[60], outbit[40], outname[40];
FILE *inp, *outbitp, *outp;
int framecnt, endfile;
WebRtc_Word16 framelength = 160;
G722EncInst *G722enc_inst;
G722DecInst *G722dec_inst;
int err;
/* Runtime statistics */
double starttime;
double runtime;
double length_file;
WebRtc_Word16 stream_len = 0;
WebRtc_Word16 shortdata[960];
WebRtc_Word16 decoded[960];
WebRtc_Word16 streamdata[80*3];
WebRtc_Word16 speechType[1];
/* handling wrong input arguments in the command line */
if (argc!=5) {
printf("\n\nWrong number of arguments or flag values.\n\n");
printf("\n");
printf("Usage:\n\n");
printf("./testG722.exe framelength infile outbitfile outspeechfile \n\n");
printf("with:\n");
printf("framelength : Framelength in samples.\n\n");
printf("infile : Normal speech input file\n\n");
printf("outbitfile : Bitstream output file\n\n");
printf("outspeechfile: Speech output file\n\n");
exit(0);
}
/* Get frame length */
framelength = atoi(argv[1]);
/* Get Input and Output files */
sscanf(argv[2], "%s", inname);
sscanf(argv[3], "%s", outbit);
sscanf(argv[4], "%s", outname);
if ((inp = fopen(inname,"rb")) == NULL) {
printf(" G.722: Cannot read file %s.\n", inname);
exit(1);
}
if ((outbitp = fopen(outbit,"wb")) == NULL) {
printf(" G.722: Cannot write file %s.\n", outbit);
exit(1);
}
if ((outp = fopen(outname,"wb")) == NULL) {
printf(" G.722: Cannot write file %s.\n", outname);
exit(1);
}
printf("\nInput:%s\nOutput bitstream:%s\nOutput:%s\n", inname, outbit, outname);
/* Create and init */
WebRtcG722_CreateEncoder((G722EncInst **)&G722enc_inst);
WebRtcG722_CreateDecoder((G722DecInst **)&G722dec_inst);
WebRtcG722_EncoderInit((G722EncInst *)G722enc_inst);
WebRtcG722_DecoderInit((G722DecInst *)G722dec_inst);
/* Initialize encoder and decoder */
framecnt = 0;
endfile = 0;
while (endfile == 0) {
framecnt++;
/* Read speech block */
endfile = readframe(shortdata, inp, framelength);
/* Start clock before call to encoder and decoder */
starttime = clock()/(double)CLOCKS_PER_SEC_G722;
/* G.722 encoding + decoding */
stream_len = WebRtcG722_Encode((G722EncInst *)G722enc_inst, shortdata, framelength, streamdata);
err = WebRtcG722_Decode((G722DecInst *)G722dec_inst, streamdata, stream_len, decoded, speechType);
/* Stop clock after call to encoder and decoder */
runtime += (double)((clock()/(double)CLOCKS_PER_SEC_G722)-starttime);
if (stream_len < 0 || err < 0) {
/* exit if returned with error */
printf("Error in encoder/decoder\n");
} else {
/* Write coded bits to file */
fwrite(streamdata,sizeof(short),stream_len/2,outbitp);
/* Write coded speech to file */
fwrite(decoded,sizeof(short),framelength,outp);
}
}
WebRtcG722_FreeEncoder((G722EncInst *)G722enc_inst);
WebRtcG722_FreeDecoder((G722DecInst *)G722dec_inst);
length_file = ((double)framecnt*(double)framelength/16000);
printf("\n\nLength of speech file: %.1f s\n", length_file);
printf("Time to run G.722: %.2f s (%.2f %% of realtime)\n\n", runtime, (100*runtime/length_file));
printf("---------------------END----------------------\n");
fclose(inp);
fclose(outbitp);
fclose(outp);
return 0;
}

View File

@ -1,3 +0,0 @@
tlegrand@google.com
turajs@google.com
jks@google.com

View File

@ -1,106 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_MAIN_INTERFACE_PCM16B_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_MAIN_INTERFACE_PCM16B_H_
/*
* Define the fixpoint numeric formats
*/
#include "typedefs.h"
#ifdef __cplusplus
extern "C" {
#endif
/****************************************************************************
* WebRtcPcm16b_EncodeW16(...)
*
* "Encode" a sample vector to 16 bit linear (Encoded standard is big endian)
*
* Input:
* - speechIn16b : Input speech vector
* - len : Number of samples in speech vector
*
* Output:
* - speechOut16b : Encoded data vector (big endian 16 bit)
*
* Returned value : Size in bytes of speechOut16b
*/
WebRtc_Word16 WebRtcPcm16b_EncodeW16(WebRtc_Word16 *speechIn16b,
WebRtc_Word16 len,
WebRtc_Word16 *speechOut16b);
/****************************************************************************
* WebRtcPcm16b_Encode(...)
*
* "Encode" a sample vector to 16 bit linear (Encoded standard is big endian)
*
* Input:
* - speech16b : Input speech vector
* - len : Number of samples in speech vector
*
* Output:
* - speech8b : Encoded data vector (big endian 16 bit)
*
* Returned value : Size in bytes of speech8b
*/
WebRtc_Word16 WebRtcPcm16b_Encode(WebRtc_Word16 *speech16b,
WebRtc_Word16 len,
unsigned char *speech8b);
/****************************************************************************
* WebRtcPcm16b_DecodeW16(...)
*
* "Decode" a vector to 16 bit linear (Encoded standard is big endian)
*
* Input:
* - speechIn16b : Encoded data vector (big endian 16 bit)
* - len : Number of bytes in speechIn16b
*
* Output:
* - speechOut16b : Decoded speech vector
*
* Returned value : Samples in speechOut16b
*/
WebRtc_Word16 WebRtcPcm16b_DecodeW16(void *inst,
WebRtc_Word16 *speechIn16b,
WebRtc_Word16 len,
WebRtc_Word16 *speechOut16b,
WebRtc_Word16* speechType);
/****************************************************************************
* WebRtcPcm16b_Decode(...)
*
* "Decode" a vector to 16 bit linear (Encoded standard is big endian)
*
* Input:
* - speech8b : Encoded data vector (big endian 16 bit)
* - len : Number of bytes in speech8b
*
* Output:
* - speech16b : Decoded speech vector
*
* Returned value : Samples in speech16b
*/
WebRtc_Word16 WebRtcPcm16b_Decode(unsigned char *speech8b,
WebRtc_Word16 len,
WebRtc_Word16 *speech16b);
#ifdef __cplusplus
}
#endif
#endif /* PCM16B */

View File

@ -1,51 +0,0 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_ARM_MODE := arm
LOCAL_MODULE_CLASS := STATIC_LIBRARIES
LOCAL_MODULE := libwebrtc_pcm16b
LOCAL_MODULE_TAGS := optional
LOCAL_GENERATED_SOURCES :=
LOCAL_SRC_FILES := pcm16b.c
# Flags passed to both C and C++ files.
MY_CFLAGS :=
MY_CFLAGS_C :=
MY_DEFS := '-DNO_TCMALLOC' \
'-DNO_HEAPCHECKER' \
'-DWEBRTC_TARGET_PC' \
'-DWEBRTC_LINUX' \
'-DWEBRTC_THREAD_RR' \
'-DWEBRTC_ANDROID' \
'-DANDROID'
LOCAL_CFLAGS := $(MY_CFLAGS_C) $(MY_CFLAGS) $(MY_DEFS)
# Include paths placed before CFLAGS/CPPFLAGS
LOCAL_C_INCLUDES := $(LOCAL_PATH)/../../../../../.. \
$(LOCAL_PATH)/../interface
# Flags passed to only C++ (and not C) files.
LOCAL_CPPFLAGS :=
LOCAL_LDFLAGS :=
LOCAL_STATIC_LIBRARIES :=
LOCAL_SHARED_LIBRARIES := libcutils \
libdl \
libstlport
LOCAL_ADDITIONAL_DEPENDENCIES :=
ifneq ($(MY_WEBRTC_NDK_BUILD),true)
include external/stlport/libstlport.mk
include $(BUILD_STATIC_LIBRARY)
endif

Some files were not shown because too many files have changed in this diff Show More