WebRtc_Word32 -> int32_t in audio_processing/

BUG=314

Review URL: https://webrtc-codereview.appspot.com/1307004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3809 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pbos@webrtc.org 2013-04-10 07:50:54 +00:00
parent 557e92515d
commit b7192b8247
37 changed files with 1189 additions and 1191 deletions

View File

@ -1361,7 +1361,7 @@ static void ComfortNoise(AecCore* aec, float efw[2][PART_LEN1],
int i, num; int i, num;
float rand[PART_LEN]; float rand[PART_LEN];
float noise, noiseAvg, tmp, tmpAvg; float noise, noiseAvg, tmp, tmpAvg;
WebRtc_Word16 randW16[PART_LEN]; int16_t randW16[PART_LEN];
complex_t u[PART_LEN1]; complex_t u[PART_LEN1];
const float pi2 = 6.28318530717959f; const float pi2 = 6.28318530717959f;

View File

@ -80,7 +80,7 @@ struct AecCore {
int mult; // sampling frequency multiple int mult; // sampling frequency multiple
int sampFreq; int sampFreq;
WebRtc_UWord32 seed; uint32_t seed;
float mu; // stepsize float mu; // stepsize
float errThresh; // error threshold float errThresh; // error threshold

View File

@ -45,7 +45,7 @@ int webrtc_aec_instance_count = 0;
// (controlled by knownDelay) // (controlled by knownDelay)
static int EstBufDelay(aecpc_t *aecInst); static int EstBufDelay(aecpc_t *aecInst);
WebRtc_Word32 WebRtcAec_Create(void **aecInst) int32_t WebRtcAec_Create(void **aecInst)
{ {
aecpc_t *aecpc; aecpc_t *aecpc;
if (aecInst == NULL) { if (aecInst == NULL) {
@ -106,7 +106,7 @@ WebRtc_Word32 WebRtcAec_Create(void **aecInst)
return 0; return 0;
} }
WebRtc_Word32 WebRtcAec_Free(void *aecInst) int32_t WebRtcAec_Free(void *aecInst)
{ {
aecpc_t *aecpc = aecInst; aecpc_t *aecpc = aecInst;
@ -130,7 +130,7 @@ WebRtc_Word32 WebRtcAec_Free(void *aecInst)
return 0; return 0;
} }
WebRtc_Word32 WebRtcAec_Init(void *aecInst, WebRtc_Word32 sampFreq, WebRtc_Word32 scSampFreq) int32_t WebRtcAec_Init(void *aecInst, int32_t sampFreq, int32_t scSampFreq)
{ {
aecpc_t *aecpc = aecInst; aecpc_t *aecpc = aecInst;
AecConfig aecConfig; AecConfig aecConfig;
@ -226,11 +226,11 @@ WebRtc_Word32 WebRtcAec_Init(void *aecInst, WebRtc_Word32 sampFreq, WebRtc_Word3
} }
// only buffer L band for farend // only buffer L band for farend
WebRtc_Word32 WebRtcAec_BufferFarend(void *aecInst, const WebRtc_Word16 *farend, int32_t WebRtcAec_BufferFarend(void *aecInst, const int16_t *farend,
WebRtc_Word16 nrOfSamples) int16_t nrOfSamples)
{ {
aecpc_t *aecpc = aecInst; aecpc_t *aecpc = aecInst;
WebRtc_Word32 retVal = 0; int32_t retVal = 0;
int newNrOfSamples = (int) nrOfSamples; int newNrOfSamples = (int) nrOfSamples;
short newFarend[MAX_RESAMP_LEN]; short newFarend[MAX_RESAMP_LEN];
const int16_t* farend_ptr = farend; const int16_t* farend_ptr = farend;
@ -304,12 +304,13 @@ WebRtc_Word32 WebRtcAec_BufferFarend(void *aecInst, const WebRtc_Word16 *farend,
return retVal; return retVal;
} }
WebRtc_Word32 WebRtcAec_Process(void *aecInst, const WebRtc_Word16 *nearend, int32_t WebRtcAec_Process(void *aecInst, const int16_t *nearend,
const WebRtc_Word16 *nearendH, WebRtc_Word16 *out, WebRtc_Word16 *outH, const int16_t *nearendH, int16_t *out, int16_t *outH,
WebRtc_Word16 nrOfSamples, WebRtc_Word16 msInSndCardBuf, WebRtc_Word32 skew) int16_t nrOfSamples, int16_t msInSndCardBuf,
int32_t skew)
{ {
aecpc_t *aecpc = aecInst; aecpc_t *aecpc = aecInst;
WebRtc_Word32 retVal = 0; int32_t retVal = 0;
short i; short i;
short nBlocks10ms; short nBlocks10ms;
short nFrames; short nFrames;
@ -689,7 +690,7 @@ int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std) {
return 0; return 0;
} }
WebRtc_Word32 WebRtcAec_get_error_code(void *aecInst) int32_t WebRtcAec_get_error_code(void *aecInst)
{ {
aecpc_t *aecpc = aecInst; aecpc_t *aecpc = aecInst;

View File

@ -35,10 +35,10 @@ enum {
}; };
typedef struct { typedef struct {
WebRtc_Word16 nlpMode; // default kAecNlpModerate int16_t nlpMode; // default kAecNlpModerate
WebRtc_Word16 skewMode; // default kAecFalse int16_t skewMode; // default kAecFalse
WebRtc_Word16 metricsMode; // default kAecFalse int16_t metricsMode; // default kAecFalse
int delay_logging; // default kAecFalse int delay_logging; // default kAecFalse
//float realSkew; //float realSkew;
} AecConfig; } AecConfig;
@ -73,10 +73,10 @@ extern "C" {
* *
* Outputs Description * Outputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* WebRtc_Word32 return 0: OK * int32_t return 0: OK
* -1: error * -1: error
*/ */
WebRtc_Word32 WebRtcAec_Create(void **aecInst); int32_t WebRtcAec_Create(void **aecInst);
/* /*
* This function releases the memory allocated by WebRtcAec_Create(). * This function releases the memory allocated by WebRtcAec_Create().
@ -87,10 +87,10 @@ WebRtc_Word32 WebRtcAec_Create(void **aecInst);
* *
* Outputs Description * Outputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* WebRtc_Word32 return 0: OK * int32_t return 0: OK
* -1: error * -1: error
*/ */
WebRtc_Word32 WebRtcAec_Free(void *aecInst); int32_t WebRtcAec_Free(void *aecInst);
/* /*
* Initializes an AEC instance. * Initializes an AEC instance.
@ -98,17 +98,15 @@ WebRtc_Word32 WebRtcAec_Free(void *aecInst);
* Inputs Description * Inputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* void *aecInst Pointer to the AEC instance * void *aecInst Pointer to the AEC instance
* WebRtc_Word32 sampFreq Sampling frequency of data * int32_t sampFreq Sampling frequency of data
* WebRtc_Word32 scSampFreq Soundcard sampling frequency * int32_t scSampFreq Soundcard sampling frequency
* *
* Outputs Description * Outputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* WebRtc_Word32 return 0: OK * int32_t return 0: OK
* -1: error * -1: error
*/ */
WebRtc_Word32 WebRtcAec_Init(void *aecInst, int32_t WebRtcAec_Init(void *aecInst, int32_t sampFreq, int32_t scSampFreq);
WebRtc_Word32 sampFreq,
WebRtc_Word32 scSampFreq);
/* /*
* Inserts an 80 or 160 sample block of data into the farend buffer. * Inserts an 80 or 160 sample block of data into the farend buffer.
@ -116,18 +114,18 @@ WebRtc_Word32 WebRtcAec_Init(void *aecInst,
* Inputs Description * Inputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* void *aecInst Pointer to the AEC instance * void *aecInst Pointer to the AEC instance
* WebRtc_Word16 *farend In buffer containing one frame of * int16_t *farend In buffer containing one frame of
* farend signal for L band * farend signal for L band
* WebRtc_Word16 nrOfSamples Number of samples in farend buffer * int16_t nrOfSamples Number of samples in farend buffer
* *
* Outputs Description * Outputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* WebRtc_Word32 return 0: OK * int32_t return 0: OK
* -1: error * -1: error
*/ */
WebRtc_Word32 WebRtcAec_BufferFarend(void *aecInst, int32_t WebRtcAec_BufferFarend(void *aecInst,
const WebRtc_Word16 *farend, const int16_t *farend,
WebRtc_Word16 nrOfSamples); int16_t nrOfSamples);
/* /*
* Runs the echo canceller on an 80 or 160 sample blocks of data. * Runs the echo canceller on an 80 or 160 sample blocks of data.
@ -135,34 +133,34 @@ WebRtc_Word32 WebRtcAec_BufferFarend(void *aecInst,
* Inputs Description * Inputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* void *aecInst Pointer to the AEC instance * void *aecInst Pointer to the AEC instance
* WebRtc_Word16 *nearend In buffer containing one frame of * int16_t *nearend In buffer containing one frame of
* nearend+echo signal for L band * nearend+echo signal for L band
* WebRtc_Word16 *nearendH In buffer containing one frame of * int16_t *nearendH In buffer containing one frame of
* nearend+echo signal for H band * nearend+echo signal for H band
* WebRtc_Word16 nrOfSamples Number of samples in nearend buffer * int16_t nrOfSamples Number of samples in nearend buffer
* WebRtc_Word16 msInSndCardBuf Delay estimate for sound card and * int16_t msInSndCardBuf Delay estimate for sound card and
* system buffers * system buffers
* WebRtc_Word16 skew Difference between number of samples played * int16_t skew Difference between number of samples played
* and recorded at the soundcard (for clock skew * and recorded at the soundcard (for clock skew
* compensation) * compensation)
* *
* Outputs Description * Outputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* WebRtc_Word16 *out Out buffer, one frame of processed nearend * int16_t *out Out buffer, one frame of processed nearend
* for L band * for L band
* WebRtc_Word16 *outH Out buffer, one frame of processed nearend * int16_t *outH Out buffer, one frame of processed nearend
* for H band * for H band
* WebRtc_Word32 return 0: OK * int32_t return 0: OK
* -1: error * -1: error
*/ */
WebRtc_Word32 WebRtcAec_Process(void *aecInst, int32_t WebRtcAec_Process(void *aecInst,
const WebRtc_Word16 *nearend, const int16_t *nearend,
const WebRtc_Word16 *nearendH, const int16_t *nearendH,
WebRtc_Word16 *out, int16_t *out,
WebRtc_Word16 *outH, int16_t *outH,
WebRtc_Word16 nrOfSamples, int16_t nrOfSamples,
WebRtc_Word16 msInSndCardBuf, int16_t msInSndCardBuf,
WebRtc_Word32 skew); int32_t skew);
/* /*
* This function enables the user to set certain parameters on-the-fly. * This function enables the user to set certain parameters on-the-fly.
@ -238,9 +236,9 @@ int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std);
* *
* Outputs Description * Outputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* WebRtc_Word32 return 11000-11100: error code * int32_t return 11000-11100: error code
*/ */
WebRtc_Word32 WebRtcAec_get_error_code(void *aecInst); int32_t WebRtcAec_get_error_code(void *aecInst);
// Returns a pointer to the low level AEC handle. // Returns a pointer to the low level AEC handle.
// //

File diff suppressed because it is too large Load Diff

View File

@ -27,8 +27,8 @@
#endif #endif
typedef struct { typedef struct {
WebRtc_Word16 real; int16_t real;
WebRtc_Word16 imag; int16_t imag;
} complex16_t; } complex16_t;
typedef struct { typedef struct {
@ -43,86 +43,86 @@ typedef struct {
RingBuffer* nearCleanFrameBuf; RingBuffer* nearCleanFrameBuf;
RingBuffer* outFrameBuf; RingBuffer* outFrameBuf;
WebRtc_Word16 farBuf[FAR_BUF_LEN]; int16_t farBuf[FAR_BUF_LEN];
WebRtc_Word16 mult; int16_t mult;
WebRtc_UWord32 seed; uint32_t seed;
// Delay estimation variables // Delay estimation variables
void* delay_estimator_farend; void* delay_estimator_farend;
void* delay_estimator; void* delay_estimator;
WebRtc_UWord16 currentDelay; uint16_t currentDelay;
// Far end history variables // Far end history variables
// TODO(bjornv): Replace |far_history| with ring_buffer. // TODO(bjornv): Replace |far_history| with ring_buffer.
uint16_t far_history[PART_LEN1 * MAX_DELAY]; uint16_t far_history[PART_LEN1 * MAX_DELAY];
int far_history_pos; int far_history_pos;
int far_q_domains[MAX_DELAY]; int far_q_domains[MAX_DELAY];
WebRtc_Word16 nlpFlag; int16_t nlpFlag;
WebRtc_Word16 fixedDelay; int16_t fixedDelay;
WebRtc_UWord32 totCount; uint32_t totCount;
WebRtc_Word16 dfaCleanQDomain; int16_t dfaCleanQDomain;
WebRtc_Word16 dfaCleanQDomainOld; int16_t dfaCleanQDomainOld;
WebRtc_Word16 dfaNoisyQDomain; int16_t dfaNoisyQDomain;
WebRtc_Word16 dfaNoisyQDomainOld; int16_t dfaNoisyQDomainOld;
WebRtc_Word16 nearLogEnergy[MAX_BUF_LEN]; int16_t nearLogEnergy[MAX_BUF_LEN];
WebRtc_Word16 farLogEnergy; int16_t farLogEnergy;
WebRtc_Word16 echoAdaptLogEnergy[MAX_BUF_LEN]; int16_t echoAdaptLogEnergy[MAX_BUF_LEN];
WebRtc_Word16 echoStoredLogEnergy[MAX_BUF_LEN]; int16_t echoStoredLogEnergy[MAX_BUF_LEN];
// The extra 16 or 32 bytes in the following buffers are for alignment based // The extra 16 or 32 bytes in the following buffers are for alignment based
// Neon code. // Neon code.
// It's designed this way since the current GCC compiler can't align a // It's designed this way since the current GCC compiler can't align a
// buffer in 16 or 32 byte boundaries properly. // buffer in 16 or 32 byte boundaries properly.
WebRtc_Word16 channelStored_buf[PART_LEN1 + 8]; int16_t channelStored_buf[PART_LEN1 + 8];
WebRtc_Word16 channelAdapt16_buf[PART_LEN1 + 8]; int16_t channelAdapt16_buf[PART_LEN1 + 8];
WebRtc_Word32 channelAdapt32_buf[PART_LEN1 + 8]; int32_t channelAdapt32_buf[PART_LEN1 + 8];
WebRtc_Word16 xBuf_buf[PART_LEN2 + 16]; // farend int16_t xBuf_buf[PART_LEN2 + 16]; // farend
WebRtc_Word16 dBufClean_buf[PART_LEN2 + 16]; // nearend int16_t dBufClean_buf[PART_LEN2 + 16]; // nearend
WebRtc_Word16 dBufNoisy_buf[PART_LEN2 + 16]; // nearend int16_t dBufNoisy_buf[PART_LEN2 + 16]; // nearend
WebRtc_Word16 outBuf_buf[PART_LEN + 8]; int16_t outBuf_buf[PART_LEN + 8];
// Pointers to the above buffers // Pointers to the above buffers
WebRtc_Word16 *channelStored; int16_t *channelStored;
WebRtc_Word16 *channelAdapt16; int16_t *channelAdapt16;
WebRtc_Word32 *channelAdapt32; int32_t *channelAdapt32;
WebRtc_Word16 *xBuf; int16_t *xBuf;
WebRtc_Word16 *dBufClean; int16_t *dBufClean;
WebRtc_Word16 *dBufNoisy; int16_t *dBufNoisy;
WebRtc_Word16 *outBuf; int16_t *outBuf;
WebRtc_Word32 echoFilt[PART_LEN1]; int32_t echoFilt[PART_LEN1];
WebRtc_Word16 nearFilt[PART_LEN1]; int16_t nearFilt[PART_LEN1];
WebRtc_Word32 noiseEst[PART_LEN1]; int32_t noiseEst[PART_LEN1];
int noiseEstTooLowCtr[PART_LEN1]; int noiseEstTooLowCtr[PART_LEN1];
int noiseEstTooHighCtr[PART_LEN1]; int noiseEstTooHighCtr[PART_LEN1];
WebRtc_Word16 noiseEstCtr; int16_t noiseEstCtr;
WebRtc_Word16 cngMode; int16_t cngMode;
WebRtc_Word32 mseAdaptOld; int32_t mseAdaptOld;
WebRtc_Word32 mseStoredOld; int32_t mseStoredOld;
WebRtc_Word32 mseThreshold; int32_t mseThreshold;
WebRtc_Word16 farEnergyMin; int16_t farEnergyMin;
WebRtc_Word16 farEnergyMax; int16_t farEnergyMax;
WebRtc_Word16 farEnergyMaxMin; int16_t farEnergyMaxMin;
WebRtc_Word16 farEnergyVAD; int16_t farEnergyVAD;
WebRtc_Word16 farEnergyMSE; int16_t farEnergyMSE;
int currentVADValue; int currentVADValue;
WebRtc_Word16 vadUpdateCount; int16_t vadUpdateCount;
WebRtc_Word16 startupState; int16_t startupState;
WebRtc_Word16 mseChannelCount; int16_t mseChannelCount;
WebRtc_Word16 supGain; int16_t supGain;
WebRtc_Word16 supGainOld; int16_t supGainOld;
WebRtc_Word16 supGainErrParamA; int16_t supGainErrParamA;
WebRtc_Word16 supGainErrParamD; int16_t supGainErrParamD;
WebRtc_Word16 supGainErrParamDiffAB; int16_t supGainErrParamDiffAB;
WebRtc_Word16 supGainErrParamDiffBD; int16_t supGainErrParamDiffBD;
struct RealFFT* real_fft; struct RealFFT* real_fft;
@ -195,7 +195,7 @@ int WebRtcAecm_Control(AecmCore_t *aecm, int delay, int nlpFlag);
// - aecm : Initialized instance // - aecm : Initialized instance
// //
void WebRtcAecm_InitEchoPathCore(AecmCore_t* aecm, void WebRtcAecm_InitEchoPathCore(AecmCore_t* aecm,
const WebRtc_Word16* echo_path); const int16_t* echo_path);
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
// WebRtcAecm_ProcessFrame(...) // WebRtcAecm_ProcessFrame(...)
@ -215,10 +215,10 @@ void WebRtcAecm_InitEchoPathCore(AecmCore_t* aecm,
// - out : Out buffer, one frame of nearend signal : // - out : Out buffer, one frame of nearend signal :
// //
// //
int WebRtcAecm_ProcessFrame(AecmCore_t * aecm, const WebRtc_Word16 * farend, int WebRtcAecm_ProcessFrame(AecmCore_t * aecm, const int16_t * farend,
const WebRtc_Word16 * nearendNoisy, const int16_t * nearendNoisy,
const WebRtc_Word16 * nearendClean, const int16_t * nearendClean,
WebRtc_Word16 * out); int16_t * out);
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
// WebRtcAecm_ProcessBlock(...) // WebRtcAecm_ProcessBlock(...)
@ -238,10 +238,10 @@ int WebRtcAecm_ProcessFrame(AecmCore_t * aecm, const WebRtc_Word16 * farend,
// - out : Out buffer, one block of nearend signal : // - out : Out buffer, one block of nearend signal :
// //
// //
int WebRtcAecm_ProcessBlock(AecmCore_t * aecm, const WebRtc_Word16 * farend, int WebRtcAecm_ProcessBlock(AecmCore_t * aecm, const int16_t * farend,
const WebRtc_Word16 * nearendNoisy, const int16_t * nearendNoisy,
const WebRtc_Word16 * noisyClean, const int16_t * noisyClean,
WebRtc_Word16 * out); int16_t * out);
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
// WebRtcAecm_BufferFarFrame() // WebRtcAecm_BufferFarFrame()
@ -254,7 +254,7 @@ int WebRtcAecm_ProcessBlock(AecmCore_t * aecm, const WebRtc_Word16 * farend,
// - farLen : Length of frame // - farLen : Length of frame
// //
void WebRtcAecm_BufferFarFrame(AecmCore_t * const aecm, void WebRtcAecm_BufferFarFrame(AecmCore_t * const aecm,
const WebRtc_Word16 * const farend, const int16_t * const farend,
const int farLen); const int farLen);
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
@ -269,7 +269,7 @@ void WebRtcAecm_BufferFarFrame(AecmCore_t * const aecm,
// - knownDelay : known delay // - knownDelay : known delay
// //
void WebRtcAecm_FetchFarFrame(AecmCore_t * const aecm, void WebRtcAecm_FetchFarFrame(AecmCore_t * const aecm,
WebRtc_Word16 * const farend, int16_t * const farend,
const int farLen, const int knownDelay); const int farLen, const int knownDelay);
/////////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////////
@ -278,17 +278,17 @@ void WebRtcAecm_FetchFarFrame(AecmCore_t * const aecm,
// //
typedef void (*CalcLinearEnergies)( typedef void (*CalcLinearEnergies)(
AecmCore_t* aecm, AecmCore_t* aecm,
const WebRtc_UWord16* far_spectrum, const uint16_t* far_spectrum,
WebRtc_Word32* echoEst, int32_t* echoEst,
WebRtc_UWord32* far_energy, uint32_t* far_energy,
WebRtc_UWord32* echo_energy_adapt, uint32_t* echo_energy_adapt,
WebRtc_UWord32* echo_energy_stored); uint32_t* echo_energy_stored);
extern CalcLinearEnergies WebRtcAecm_CalcLinearEnergies; extern CalcLinearEnergies WebRtcAecm_CalcLinearEnergies;
typedef void (*StoreAdaptiveChannel)( typedef void (*StoreAdaptiveChannel)(
AecmCore_t* aecm, AecmCore_t* aecm,
const WebRtc_UWord16* far_spectrum, const uint16_t* far_spectrum,
WebRtc_Word32* echo_est); int32_t* echo_est);
extern StoreAdaptiveChannel WebRtcAecm_StoreAdaptiveChannel; extern StoreAdaptiveChannel WebRtcAecm_StoreAdaptiveChannel;
typedef void (*ResetAdaptiveChannel)(AecmCore_t* aecm); typedef void (*ResetAdaptiveChannel)(AecmCore_t* aecm);
@ -296,17 +296,17 @@ extern ResetAdaptiveChannel WebRtcAecm_ResetAdaptiveChannel;
typedef void (*WindowAndFFT)( typedef void (*WindowAndFFT)(
AecmCore_t* aecm, AecmCore_t* aecm,
WebRtc_Word16* fft, int16_t* fft,
const WebRtc_Word16* time_signal, const int16_t* time_signal,
complex16_t* freq_signal, complex16_t* freq_signal,
int time_signal_scaling); int time_signal_scaling);
extern WindowAndFFT WebRtcAecm_WindowAndFFT; extern WindowAndFFT WebRtcAecm_WindowAndFFT;
typedef void (*InverseFFTAndWindow)( typedef void (*InverseFFTAndWindow)(
AecmCore_t* aecm, AecmCore_t* aecm,
WebRtc_Word16* fft, complex16_t* efw, int16_t* fft, complex16_t* efw,
WebRtc_Word16* output, int16_t* output,
const WebRtc_Word16* nearendClean); const int16_t* nearendClean);
extern InverseFFTAndWindow WebRtcAecm_InverseFFTAndWindow; extern InverseFFTAndWindow WebRtcAecm_InverseFFTAndWindow;
// For the above function pointers, functions for generic platforms are declared // For the above function pointers, functions for generic platforms are declared
@ -314,27 +314,27 @@ extern InverseFFTAndWindow WebRtcAecm_InverseFFTAndWindow;
// are declared below and defined in file aecm_core_neon.s. // are declared below and defined in file aecm_core_neon.s.
#if (defined WEBRTC_DETECT_ARM_NEON) || defined (WEBRTC_ARCH_ARM_NEON) #if (defined WEBRTC_DETECT_ARM_NEON) || defined (WEBRTC_ARCH_ARM_NEON)
void WebRtcAecm_WindowAndFFTNeon(AecmCore_t* aecm, void WebRtcAecm_WindowAndFFTNeon(AecmCore_t* aecm,
WebRtc_Word16* fft, int16_t* fft,
const WebRtc_Word16* time_signal, const int16_t* time_signal,
complex16_t* freq_signal, complex16_t* freq_signal,
int time_signal_scaling); int time_signal_scaling);
void WebRtcAecm_InverseFFTAndWindowNeon(AecmCore_t* aecm, void WebRtcAecm_InverseFFTAndWindowNeon(AecmCore_t* aecm,
WebRtc_Word16* fft, int16_t* fft,
complex16_t* efw, complex16_t* efw,
WebRtc_Word16* output, int16_t* output,
const WebRtc_Word16* nearendClean); const int16_t* nearendClean);
void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore_t* aecm, void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore_t* aecm,
const WebRtc_UWord16* far_spectrum, const uint16_t* far_spectrum,
WebRtc_Word32* echo_est, int32_t* echo_est,
WebRtc_UWord32* far_energy, uint32_t* far_energy,
WebRtc_UWord32* echo_energy_adapt, uint32_t* echo_energy_adapt,
WebRtc_UWord32* echo_energy_stored); uint32_t* echo_energy_stored);
void WebRtcAecm_StoreAdaptiveChannelNeon(AecmCore_t* aecm, void WebRtcAecm_StoreAdaptiveChannelNeon(AecmCore_t* aecm,
const WebRtc_UWord16* far_spectrum, const uint16_t* far_spectrum,
WebRtc_Word32* echo_est); int32_t* echo_est);
void WebRtcAecm_ResetAdaptiveChannelNeon(AecmCore_t* aecm); void WebRtcAecm_ResetAdaptiveChannelNeon(AecmCore_t* aecm);
#endif #endif

View File

@ -24,8 +24,8 @@ GLOBAL_FUNCTION WebRtcAecm_StoreAdaptiveChannelNeon
GLOBAL_FUNCTION WebRtcAecm_ResetAdaptiveChannelNeon GLOBAL_FUNCTION WebRtcAecm_ResetAdaptiveChannelNeon
@ void WebRtcAecm_WindowAndFFTNeon(AecmCore_t* aecm, @ void WebRtcAecm_WindowAndFFTNeon(AecmCore_t* aecm,
@ WebRtc_Word16* fft, @ int16_t* fft,
@ const WebRtc_Word16* time_signal, @ const int16_t* time_signal,
@ complex16_t* freq_signal, @ complex16_t* freq_signal,
@ int time_signal_scaling); @ int time_signal_scaling);
.align 2 .align 2
@ -81,10 +81,10 @@ LOOP_PART_LEN2:
pop {r4, r5, r6, pc} pop {r4, r5, r6, pc}
@ void WebRtcAecm_InverseFFTAndWindowNeon(AecmCore_t* aecm, @ void WebRtcAecm_InverseFFTAndWindowNeon(AecmCore_t* aecm,
@ WebRtc_Word16* fft, @ int16_t* fft,
@ complex16_t* efw, @ complex16_t* efw,
@ WebRtc_Word16* output, @ int16_t* output,
@ const WebRtc_Word16* nearendClean); @ const int16_t* nearendClean);
.align 2 .align 2
DEFINE_FUNCTION WebRtcAecm_InverseFFTAndWindowNeon DEFINE_FUNCTION WebRtcAecm_InverseFFTAndWindowNeon
push {r4-r8, lr} push {r4-r8, lr}
@ -197,11 +197,11 @@ END:
pop {r4-r8, pc} pop {r4-r8, pc}
@ void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore_t* aecm, @ void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore_t* aecm,
@ const WebRtc_UWord16* far_spectrum, @ const uint16_t* far_spectrum,
@ WebRtc_Word32* echo_est, @ int32_t* echo_est,
@ WebRtc_UWord32* far_energy, @ uint32_t* far_energy,
@ WebRtc_UWord32* echo_energy_adapt, @ uint32_t* echo_energy_adapt,
@ WebRtc_UWord32* echo_energy_stored); @ uint32_t* echo_energy_stored);
.align 2 .align 2
DEFINE_FUNCTION WebRtcAecm_CalcLinearEnergiesNeon DEFINE_FUNCTION WebRtcAecm_CalcLinearEnergiesNeon
push {r4-r7} push {r4-r7}

View File

@ -19,7 +19,7 @@
// generating script and makefile, to replace these C functions. // generating script and makefile, to replace these C functions.
// Square root of Hanning window in Q14. // Square root of Hanning window in Q14.
const ALIGN8_BEG WebRtc_Word16 WebRtcAecm_kSqrtHanning[] ALIGN8_END = { const ALIGN8_BEG int16_t WebRtcAecm_kSqrtHanning[] ALIGN8_END = {
0, 0,
399, 798, 1196, 1594, 1990, 2386, 2780, 3172, 399, 798, 1196, 1594, 1990, 2386, 2780, 3172,
3562, 3951, 4337, 4720, 5101, 5478, 5853, 6224, 3562, 3951, 4337, 4720, 5101, 5478, 5853, 6224,
@ -32,7 +32,7 @@ const ALIGN8_BEG WebRtc_Word16 WebRtcAecm_kSqrtHanning[] ALIGN8_END = {
}; };
// Square root of Hanning window in Q14, in reversed order. // Square root of Hanning window in Q14, in reversed order.
static const ALIGN8_BEG WebRtc_Word16 kSqrtHanningReversed[] ALIGN8_END = { static const ALIGN8_BEG int16_t kSqrtHanningReversed[] ALIGN8_END = {
16384, 16373, 16354, 16325, 16286, 16237, 16179, 16111, 16384, 16373, 16354, 16325, 16286, 16237, 16179, 16111,
16034, 15947, 15851, 15746, 15631, 15506, 15373, 15231, 16034, 15947, 15851, 15746, 15631, 15506, 15373, 15231,
15079, 14918, 14749, 14571, 14384, 14189, 13985, 13773, 15079, 14918, 14749, 14571, 14384, 14189, 13985, 13773,
@ -44,8 +44,8 @@ static const ALIGN8_BEG WebRtc_Word16 kSqrtHanningReversed[] ALIGN8_END = {
}; };
void WebRtcAecm_WindowAndFFTNeon(AecmCore_t* aecm, void WebRtcAecm_WindowAndFFTNeon(AecmCore_t* aecm,
WebRtc_Word16* fft, int16_t* fft,
const WebRtc_Word16* time_signal, const int16_t* time_signal,
complex16_t* freq_signal, complex16_t* freq_signal,
int time_signal_scaling) { int time_signal_scaling) {
int i = 0; int i = 0;
@ -115,10 +115,10 @@ void WebRtcAecm_WindowAndFFTNeon(AecmCore_t* aecm,
} }
void WebRtcAecm_InverseFFTAndWindowNeon(AecmCore_t* aecm, void WebRtcAecm_InverseFFTAndWindowNeon(AecmCore_t* aecm,
WebRtc_Word16* fft, int16_t* fft,
complex16_t* efw, complex16_t* efw,
WebRtc_Word16* output, int16_t* output,
const WebRtc_Word16* nearendClean) { const int16_t* nearendClean) {
int i, j, outCFFT; int i, j, outCFFT;
assert((uintptr_t)efw % 32 == 0); assert((uintptr_t)efw % 32 == 0);
@ -161,7 +161,7 @@ void WebRtcAecm_InverseFFTAndWindowNeon(AecmCore_t* aecm,
outCFFT = WebRtcSpl_RealInverseFFT(aecm->real_fft, fft, (int16_t*)efw); outCFFT = WebRtcSpl_RealInverseFFT(aecm->real_fft, fft, (int16_t*)efw);
int32x4_t tmp32x4_2; int32x4_t tmp32x4_2;
__asm __volatile("vdup.32 %q0, %1" : "=w"(tmp32x4_2) : "r"((WebRtc_Word32) __asm __volatile("vdup.32 %q0, %1" : "=w"(tmp32x4_2) : "r"((int32_t)
(outCFFT - aecm->dfaCleanQDomain))); (outCFFT - aecm->dfaCleanQDomain)));
for (i = 0; i < PART_LEN; i += 4) { for (i = 0; i < PART_LEN; i += 4) {
int16x4_t tmp16x4_0; int16x4_t tmp16x4_0;
@ -169,18 +169,18 @@ void WebRtcAecm_InverseFFTAndWindowNeon(AecmCore_t* aecm,
int32x4_t tmp32x4_0; int32x4_t tmp32x4_0;
int32x4_t tmp32x4_1; int32x4_t tmp32x4_1;
//efw[i].real = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND( //efw[i].real = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
// efw[i].real, WebRtcAecm_kSqrtHanning[i], 14); // efw[i].real, WebRtcAecm_kSqrtHanning[i], 14);
__asm __volatile("vld1.16 %P0, [%1, :64]" : "=w"(tmp16x4_0) : "r"(&efw[i].real)); __asm __volatile("vld1.16 %P0, [%1, :64]" : "=w"(tmp16x4_0) : "r"(&efw[i].real));
__asm __volatile("vld1.16 %P0, [%1, :64]" : "=w"(tmp16x4_1) : "r"(&WebRtcAecm_kSqrtHanning[i])); __asm __volatile("vld1.16 %P0, [%1, :64]" : "=w"(tmp16x4_1) : "r"(&WebRtcAecm_kSqrtHanning[i]));
__asm __volatile("vmull.s16 %q0, %P1, %P2" : "=w"(tmp32x4_0) : "w"(tmp16x4_0), "w"(tmp16x4_1)); __asm __volatile("vmull.s16 %q0, %P1, %P2" : "=w"(tmp32x4_0) : "w"(tmp16x4_0), "w"(tmp16x4_1));
__asm __volatile("vrshr.s32 %q0, %q1, #14" : "=w"(tmp32x4_0) : "0"(tmp32x4_0)); __asm __volatile("vrshr.s32 %q0, %q1, #14" : "=w"(tmp32x4_0) : "0"(tmp32x4_0));
//tmp32no1 = WEBRTC_SPL_SHIFT_W32((WebRtc_Word32)efw[i].real, //tmp32no1 = WEBRTC_SPL_SHIFT_W32((int32_t)efw[i].real,
// outCFFT - aecm->dfaCleanQDomain); // outCFFT - aecm->dfaCleanQDomain);
__asm __volatile("vshl.s32 %q0, %q1, %q2" : "=w"(tmp32x4_0) : "0"(tmp32x4_0), "w"(tmp32x4_2)); __asm __volatile("vshl.s32 %q0, %q1, %q2" : "=w"(tmp32x4_0) : "0"(tmp32x4_0), "w"(tmp32x4_2));
//efw[i].real = (WebRtc_Word16)WEBRTC_SPL_SAT(WEBRTC_SPL_WORD16_MAX, //efw[i].real = (int16_t)WEBRTC_SPL_SAT(WEBRTC_SPL_WORD16_MAX,
// tmp32no1 + aecm->outBuf[i], WEBRTC_SPL_WORD16_MIN); // tmp32no1 + aecm->outBuf[i], WEBRTC_SPL_WORD16_MIN);
// output[i] = efw[i].real; // output[i] = efw[i].real;
__asm __volatile("vld1.16 %P0, [%1, :64]" : "=w"(tmp16x4_0) : "r"(&aecm->outBuf[i])); __asm __volatile("vld1.16 %P0, [%1, :64]" : "=w"(tmp16x4_0) : "r"(&aecm->outBuf[i]));
@ -199,7 +199,7 @@ void WebRtcAecm_InverseFFTAndWindowNeon(AecmCore_t* aecm,
// tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, outCFFT - aecm->dfaCleanQDomain); // tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, outCFFT - aecm->dfaCleanQDomain);
__asm __volatile("vshl.s32 %q0, %q1, %q2" : "=w"(tmp32x4_0) : "0"(tmp32x4_0), "w"(tmp32x4_2)); __asm __volatile("vshl.s32 %q0, %q1, %q2" : "=w"(tmp32x4_0) : "0"(tmp32x4_0), "w"(tmp32x4_2));
// aecm->outBuf[i] = (WebRtc_Word16)WEBRTC_SPL_SAT( // aecm->outBuf[i] = (int16_t)WEBRTC_SPL_SAT(
// WEBRTC_SPL_WORD16_MAX, tmp32no1, WEBRTC_SPL_WORD16_MIN); // WEBRTC_SPL_WORD16_MAX, tmp32no1, WEBRTC_SPL_WORD16_MIN);
__asm __volatile("vqmovn.s32 %P0, %q1" : "=w"(tmp16x4_0) : "w"(tmp32x4_0)); __asm __volatile("vqmovn.s32 %P0, %q1" : "=w"(tmp16x4_0) : "w"(tmp32x4_0));
__asm __volatile("vst1.16 %P0, [%1, :64]" : : "w"(tmp16x4_0), "r"(&aecm->outBuf[i])); __asm __volatile("vst1.16 %P0, [%1, :64]" : : "w"(tmp16x4_0), "r"(&aecm->outBuf[i]));
@ -228,16 +228,16 @@ void WebRtcAecm_InverseFFTAndWindowNeon(AecmCore_t* aecm,
} }
void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore_t* aecm, void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore_t* aecm,
const WebRtc_UWord16* far_spectrum, const uint16_t* far_spectrum,
WebRtc_Word32* echo_est, int32_t* echo_est,
WebRtc_UWord32* far_energy, uint32_t* far_energy,
WebRtc_UWord32* echo_energy_adapt, uint32_t* echo_energy_adapt,
WebRtc_UWord32* echo_energy_stored) { uint32_t* echo_energy_stored) {
int i; int i;
register WebRtc_UWord32 far_energy_r; register uint32_t far_energy_r;
register WebRtc_UWord32 echo_energy_stored_r; register uint32_t echo_energy_stored_r;
register WebRtc_UWord32 echo_energy_adapt_r; register uint32_t echo_energy_adapt_r;
assert((uintptr_t)echo_est % 32 == 0); assert((uintptr_t)echo_est % 32 == 0);
assert((uintptr_t)(aecm->channelStored) % 16 == 0); assert((uintptr_t)(aecm->channelStored) % 16 == 0);
@ -250,7 +250,7 @@ void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore_t* aecm,
__asm __volatile("vmov.i32 q9, #0" : : : "q9"); // echo_energy_adapt __asm __volatile("vmov.i32 q9, #0" : : : "q9"); // echo_energy_adapt
for (i = 0; i < PART_LEN - 7; i += 8) { for (i = 0; i < PART_LEN - 7; i += 8) {
// far_energy += (WebRtc_UWord32)(far_spectrum[i]); // far_energy += (uint32_t)(far_spectrum[i]);
__asm __volatile("vld1.16 {d26, d27}, [%0]" : : "r"(&far_spectrum[i]) : "q13"); __asm __volatile("vld1.16 {d26, d27}, [%0]" : : "r"(&far_spectrum[i]) : "q13");
__asm __volatile("vaddw.u16 q14, q14, d26" : : : "q14", "q13"); __asm __volatile("vaddw.u16 q14, q14, d26" : : : "q14", "q13");
__asm __volatile("vaddw.u16 q14, q14, d27" : : : "q14", "q13"); __asm __volatile("vaddw.u16 q14, q14, d27" : : : "q14", "q13");
@ -263,7 +263,7 @@ void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore_t* aecm,
__asm __volatile("vst1.32 {d20, d21, d22, d23}, [%0, :256]" : : "r"(&echo_est[i]): __asm __volatile("vst1.32 {d20, d21, d22, d23}, [%0, :256]" : : "r"(&echo_est[i]):
"q10", "q11"); "q10", "q11");
// echo_energy_stored += (WebRtc_UWord32)echoEst[i]; // echo_energy_stored += (uint32_t)echoEst[i];
__asm __volatile("vadd.u32 q8, q10" : : : "q10", "q8"); __asm __volatile("vadd.u32 q8, q10" : : : "q10", "q8");
__asm __volatile("vadd.u32 q8, q11" : : : "q11", "q8"); __asm __volatile("vadd.u32 q8, q11" : : : "q11", "q8");
@ -290,15 +290,15 @@ void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore_t* aecm,
// Get estimated echo energies for adaptive channel and stored channel. // Get estimated echo energies for adaptive channel and stored channel.
echo_est[i] = WEBRTC_SPL_MUL_16_U16(aecm->channelStored[i], far_spectrum[i]); echo_est[i] = WEBRTC_SPL_MUL_16_U16(aecm->channelStored[i], far_spectrum[i]);
*echo_energy_stored = echo_energy_stored_r + (WebRtc_UWord32)echo_est[i]; *echo_energy_stored = echo_energy_stored_r + (uint32_t)echo_est[i];
*far_energy = far_energy_r + (WebRtc_UWord32)(far_spectrum[i]); *far_energy = far_energy_r + (uint32_t)(far_spectrum[i]);
*echo_energy_adapt = echo_energy_adapt_r + WEBRTC_SPL_UMUL_16_16( *echo_energy_adapt = echo_energy_adapt_r + WEBRTC_SPL_UMUL_16_16(
aecm->channelAdapt16[i], far_spectrum[i]); aecm->channelAdapt16[i], far_spectrum[i]);
} }
void WebRtcAecm_StoreAdaptiveChannelNeon(AecmCore_t* aecm, void WebRtcAecm_StoreAdaptiveChannelNeon(AecmCore_t* aecm,
const WebRtc_UWord16* far_spectrum, const uint16_t* far_spectrum,
WebRtc_Word32* echo_est) { int32_t* echo_est) {
int i; int i;
assert((uintptr_t)echo_est % 32 == 0); assert((uintptr_t)echo_est % 32 == 0);
@ -331,7 +331,7 @@ void WebRtcAecm_ResetAdaptiveChannelNeon(AecmCore_t* aecm) {
for (i = 0; i < PART_LEN - 7; i += 8) { for (i = 0; i < PART_LEN - 7; i += 8) {
// aecm->channelAdapt16[i] = aecm->channelStored[i]; // aecm->channelAdapt16[i] = aecm->channelStored[i];
// aecm->channelAdapt32[i] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) // aecm->channelAdapt32[i] = WEBRTC_SPL_LSHIFT_W32((int32_t)
// aecm->channelStored[i], 16); // aecm->channelStored[i], 16);
__asm __volatile("vld1.16 {d24, d25}, [%0, :128]" : : __asm __volatile("vld1.16 {d24, d25}, [%0, :128]" : :
"r"(&aecm->channelStored[i]) : "q12"); "r"(&aecm->channelStored[i]) : "q12");
@ -344,5 +344,5 @@ void WebRtcAecm_ResetAdaptiveChannelNeon(AecmCore_t* aecm) {
} }
aecm->channelAdapt16[i] = aecm->channelStored[i]; aecm->channelAdapt16[i] = aecm->channelStored[i];
aecm->channelAdapt32[i] = WEBRTC_SPL_LSHIFT_W32( aecm->channelAdapt32[i] = WEBRTC_SPL_LSHIFT_W32(
(WebRtc_Word32)aecm->channelStored[i], 16); (int32_t)aecm->channelStored[i], 16);
} }

View File

@ -57,7 +57,7 @@ typedef struct
int delayChange; int delayChange;
short lastDelayDiff; short lastDelayDiff;
WebRtc_Word16 echoMode; int16_t echoMode;
#ifdef AEC_DEBUG #ifdef AEC_DEBUG
FILE *bufFile; FILE *bufFile;
@ -80,7 +80,7 @@ static int WebRtcAecm_EstBufDelay(aecmob_t *aecmInst, short msInSndCardBuf);
// Stuffs the farend buffer if the estimated delay is too large // Stuffs the farend buffer if the estimated delay is too large
static int WebRtcAecm_DelayComp(aecmob_t *aecmInst); static int WebRtcAecm_DelayComp(aecmob_t *aecmInst);
WebRtc_Word32 WebRtcAecm_Create(void **aecmInst) int32_t WebRtcAecm_Create(void **aecmInst)
{ {
aecmob_t *aecm; aecmob_t *aecm;
if (aecmInst == NULL) if (aecmInst == NULL)
@ -130,7 +130,7 @@ WebRtc_Word32 WebRtcAecm_Create(void **aecmInst)
return 0; return 0;
} }
WebRtc_Word32 WebRtcAecm_Free(void *aecmInst) int32_t WebRtcAecm_Free(void *aecmInst)
{ {
aecmob_t *aecm = aecmInst; aecmob_t *aecm = aecmInst;
@ -157,7 +157,7 @@ WebRtc_Word32 WebRtcAecm_Free(void *aecmInst)
return 0; return 0;
} }
WebRtc_Word32 WebRtcAecm_Init(void *aecmInst, WebRtc_Word32 sampFreq) int32_t WebRtcAecm_Init(void *aecmInst, int32_t sampFreq)
{ {
aecmob_t *aecm = aecmInst; aecmob_t *aecm = aecmInst;
AecmConfig aecConfig; AecmConfig aecConfig;
@ -220,11 +220,11 @@ WebRtc_Word32 WebRtcAecm_Init(void *aecmInst, WebRtc_Word32 sampFreq)
return 0; return 0;
} }
WebRtc_Word32 WebRtcAecm_BufferFarend(void *aecmInst, const WebRtc_Word16 *farend, int32_t WebRtcAecm_BufferFarend(void *aecmInst, const int16_t *farend,
WebRtc_Word16 nrOfSamples) int16_t nrOfSamples)
{ {
aecmob_t *aecm = aecmInst; aecmob_t *aecm = aecmInst;
WebRtc_Word32 retVal = 0; int32_t retVal = 0;
if (aecm == NULL) if (aecm == NULL)
{ {
@ -260,12 +260,12 @@ WebRtc_Word32 WebRtcAecm_BufferFarend(void *aecmInst, const WebRtc_Word16 *faren
return retVal; return retVal;
} }
WebRtc_Word32 WebRtcAecm_Process(void *aecmInst, const WebRtc_Word16 *nearendNoisy, int32_t WebRtcAecm_Process(void *aecmInst, const int16_t *nearendNoisy,
const WebRtc_Word16 *nearendClean, WebRtc_Word16 *out, const int16_t *nearendClean, int16_t *out,
WebRtc_Word16 nrOfSamples, WebRtc_Word16 msInSndCardBuf) int16_t nrOfSamples, int16_t msInSndCardBuf)
{ {
aecmob_t *aecm = aecmInst; aecmob_t *aecm = aecmInst;
WebRtc_Word32 retVal = 0; int32_t retVal = 0;
short i; short i;
short nmbrOfFilledBuffers; short nmbrOfFilledBuffers;
short nBlocks10ms; short nBlocks10ms;
@ -477,7 +477,7 @@ WebRtc_Word32 WebRtcAecm_Process(void *aecmInst, const WebRtc_Word16 *nearendNoi
return retVal; return retVal;
} }
WebRtc_Word32 WebRtcAecm_set_config(void *aecmInst, AecmConfig config) int32_t WebRtcAecm_set_config(void *aecmInst, AecmConfig config)
{ {
aecmob_t *aecm = aecmInst; aecmob_t *aecm = aecmInst;
@ -559,7 +559,7 @@ WebRtc_Word32 WebRtcAecm_set_config(void *aecmInst, AecmConfig config)
return 0; return 0;
} }
WebRtc_Word32 WebRtcAecm_get_config(void *aecmInst, AecmConfig *config) int32_t WebRtcAecm_get_config(void *aecmInst, AecmConfig *config)
{ {
aecmob_t *aecm = aecmInst; aecmob_t *aecm = aecmInst;
@ -586,12 +586,12 @@ WebRtc_Word32 WebRtcAecm_get_config(void *aecmInst, AecmConfig *config)
return 0; return 0;
} }
WebRtc_Word32 WebRtcAecm_InitEchoPath(void* aecmInst, int32_t WebRtcAecm_InitEchoPath(void* aecmInst,
const void* echo_path, const void* echo_path,
size_t size_bytes) size_t size_bytes)
{ {
aecmob_t *aecm = aecmInst; aecmob_t *aecm = aecmInst;
const WebRtc_Word16* echo_path_ptr = echo_path; const int16_t* echo_path_ptr = echo_path;
if (aecmInst == NULL) { if (aecmInst == NULL) {
return -1; return -1;
@ -617,12 +617,12 @@ WebRtc_Word32 WebRtcAecm_InitEchoPath(void* aecmInst,
return 0; return 0;
} }
WebRtc_Word32 WebRtcAecm_GetEchoPath(void* aecmInst, int32_t WebRtcAecm_GetEchoPath(void* aecmInst,
void* echo_path, void* echo_path,
size_t size_bytes) size_t size_bytes)
{ {
aecmob_t *aecm = aecmInst; aecmob_t *aecm = aecmInst;
WebRtc_Word16* echo_path_ptr = echo_path; int16_t* echo_path_ptr = echo_path;
if (aecmInst == NULL) { if (aecmInst == NULL) {
return -1; return -1;
@ -649,10 +649,10 @@ WebRtc_Word32 WebRtcAecm_GetEchoPath(void* aecmInst,
size_t WebRtcAecm_echo_path_size_bytes() size_t WebRtcAecm_echo_path_size_bytes()
{ {
return (PART_LEN1 * sizeof(WebRtc_Word16)); return (PART_LEN1 * sizeof(int16_t));
} }
WebRtc_Word32 WebRtcAecm_get_error_code(void *aecmInst) int32_t WebRtcAecm_get_error_code(void *aecmInst)
{ {
aecmob_t *aecm = aecmInst; aecmob_t *aecm = aecmInst;

View File

@ -31,8 +31,8 @@ enum {
#define AECM_BAD_PARAMETER_WARNING 12100 #define AECM_BAD_PARAMETER_WARNING 12100
typedef struct { typedef struct {
WebRtc_Word16 cngMode; // AECM_FALSE, AECM_TRUE (default) int16_t cngMode; // AECM_FALSE, AECM_TRUE (default)
WebRtc_Word16 echoMode; // 0, 1, 2, 3 (default), 4 int16_t echoMode; // 0, 1, 2, 3 (default), 4
} AecmConfig; } AecmConfig;
#ifdef __cplusplus #ifdef __cplusplus
@ -50,10 +50,10 @@ extern "C" {
* *
* Outputs Description * Outputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* WebRtc_Word32 return 0: OK * int32_t return 0: OK
* -1: error * -1: error
*/ */
WebRtc_Word32 WebRtcAecm_Create(void **aecmInst); int32_t WebRtcAecm_Create(void **aecmInst);
/* /*
* This function releases the memory allocated by WebRtcAecm_Create() * This function releases the memory allocated by WebRtcAecm_Create()
@ -64,10 +64,10 @@ WebRtc_Word32 WebRtcAecm_Create(void **aecmInst);
* *
* Outputs Description * Outputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* WebRtc_Word32 return 0: OK * int32_t return 0: OK
* -1: error * -1: error
*/ */
WebRtc_Word32 WebRtcAecm_Free(void *aecmInst); int32_t WebRtcAecm_Free(void *aecmInst);
/* /*
* Initializes an AECM instance. * Initializes an AECM instance.
@ -75,15 +75,14 @@ WebRtc_Word32 WebRtcAecm_Free(void *aecmInst);
* Inputs Description * Inputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* void *aecmInst Pointer to the AECM instance * void *aecmInst Pointer to the AECM instance
* WebRtc_Word32 sampFreq Sampling frequency of data * int32_t sampFreq Sampling frequency of data
* *
* Outputs Description * Outputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* WebRtc_Word32 return 0: OK * int32_t return 0: OK
* -1: error * -1: error
*/ */
WebRtc_Word32 WebRtcAecm_Init(void* aecmInst, int32_t WebRtcAecm_Init(void* aecmInst, int32_t sampFreq);
WebRtc_Word32 sampFreq);
/* /*
* Inserts an 80 or 160 sample block of data into the farend buffer. * Inserts an 80 or 160 sample block of data into the farend buffer.
@ -91,18 +90,18 @@ WebRtc_Word32 WebRtcAecm_Init(void* aecmInst,
* Inputs Description * Inputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* void *aecmInst Pointer to the AECM instance * void *aecmInst Pointer to the AECM instance
* WebRtc_Word16 *farend In buffer containing one frame of * int16_t *farend In buffer containing one frame of
* farend signal * farend signal
* WebRtc_Word16 nrOfSamples Number of samples in farend buffer * int16_t nrOfSamples Number of samples in farend buffer
* *
* Outputs Description * Outputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* WebRtc_Word32 return 0: OK * int32_t return 0: OK
* -1: error * -1: error
*/ */
WebRtc_Word32 WebRtcAecm_BufferFarend(void* aecmInst, int32_t WebRtcAecm_BufferFarend(void* aecmInst,
const WebRtc_Word16* farend, const int16_t* farend,
WebRtc_Word16 nrOfSamples); int16_t nrOfSamples);
/* /*
* Runs the AECM on an 80 or 160 sample blocks of data. * Runs the AECM on an 80 or 160 sample blocks of data.
@ -110,31 +109,31 @@ WebRtc_Word32 WebRtcAecm_BufferFarend(void* aecmInst,
* Inputs Description * Inputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* void *aecmInst Pointer to the AECM instance * void *aecmInst Pointer to the AECM instance
* WebRtc_Word16 *nearendNoisy In buffer containing one frame of * int16_t *nearendNoisy In buffer containing one frame of
* reference nearend+echo signal. If * reference nearend+echo signal. If
* noise reduction is active, provide * noise reduction is active, provide
* the noisy signal here. * the noisy signal here.
* WebRtc_Word16 *nearendClean In buffer containing one frame of * int16_t *nearendClean In buffer containing one frame of
* nearend+echo signal. If noise * nearend+echo signal. If noise
* reduction is active, provide the * reduction is active, provide the
* clean signal here. Otherwise pass a * clean signal here. Otherwise pass a
* NULL pointer. * NULL pointer.
* WebRtc_Word16 nrOfSamples Number of samples in nearend buffer * int16_t nrOfSamples Number of samples in nearend buffer
* WebRtc_Word16 msInSndCardBuf Delay estimate for sound card and * int16_t msInSndCardBuf Delay estimate for sound card and
* system buffers * system buffers
* *
* Outputs Description * Outputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* WebRtc_Word16 *out Out buffer, one frame of processed nearend * int16_t *out Out buffer, one frame of processed nearend
* WebRtc_Word32 return 0: OK * int32_t return 0: OK
* -1: error * -1: error
*/ */
WebRtc_Word32 WebRtcAecm_Process(void* aecmInst, int32_t WebRtcAecm_Process(void* aecmInst,
const WebRtc_Word16* nearendNoisy, const int16_t* nearendNoisy,
const WebRtc_Word16* nearendClean, const int16_t* nearendClean,
WebRtc_Word16* out, int16_t* out,
WebRtc_Word16 nrOfSamples, int16_t nrOfSamples,
WebRtc_Word16 msInSndCardBuf); int16_t msInSndCardBuf);
/* /*
* This function enables the user to set certain parameters on-the-fly * This function enables the user to set certain parameters on-the-fly
@ -147,11 +146,10 @@ WebRtc_Word32 WebRtcAecm_Process(void* aecmInst,
* *
* Outputs Description * Outputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* WebRtc_Word32 return 0: OK * int32_t return 0: OK
* -1: error * -1: error
*/ */
WebRtc_Word32 WebRtcAecm_set_config(void* aecmInst, int32_t WebRtcAecm_set_config(void* aecmInst, AecmConfig config);
AecmConfig config);
/* /*
* This function enables the user to set certain parameters on-the-fly * This function enables the user to set certain parameters on-the-fly
@ -164,11 +162,10 @@ WebRtc_Word32 WebRtcAecm_set_config(void* aecmInst,
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* AecmConfig *config Pointer to the config instance that * AecmConfig *config Pointer to the config instance that
* all properties will be written to * all properties will be written to
* WebRtc_Word32 return 0: OK * int32_t return 0: OK
* -1: error * -1: error
*/ */
WebRtc_Word32 WebRtcAecm_get_config(void *aecmInst, int32_t WebRtcAecm_get_config(void *aecmInst, AecmConfig *config);
AecmConfig *config);
/* /*
* This function enables the user to set the echo path on-the-fly. * This function enables the user to set the echo path on-the-fly.
@ -181,12 +178,12 @@ WebRtc_Word32 WebRtcAecm_get_config(void *aecmInst,
* *
* Outputs Description * Outputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* WebRtc_Word32 return 0: OK * int32_t return 0: OK
* -1: error * -1: error
*/ */
WebRtc_Word32 WebRtcAecm_InitEchoPath(void* aecmInst, int32_t WebRtcAecm_InitEchoPath(void* aecmInst,
const void* echo_path, const void* echo_path,
size_t size_bytes); size_t size_bytes);
/* /*
* This function enables the user to get the currently used echo path * This function enables the user to get the currently used echo path
@ -200,12 +197,12 @@ WebRtc_Word32 WebRtcAecm_InitEchoPath(void* aecmInst,
* *
* Outputs Description * Outputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* WebRtc_Word32 return 0: OK * int32_t return 0: OK
* -1: error * -1: error
*/ */
WebRtc_Word32 WebRtcAecm_GetEchoPath(void* aecmInst, int32_t WebRtcAecm_GetEchoPath(void* aecmInst,
void* echo_path, void* echo_path,
size_t size_bytes); size_t size_bytes);
/* /*
* This function enables the user to get the echo path size in bytes * This function enables the user to get the echo path size in bytes
@ -225,9 +222,9 @@ size_t WebRtcAecm_echo_path_size_bytes();
* *
* Outputs Description * Outputs Description
* ------------------------------------------------------------------- * -------------------------------------------------------------------
* WebRtc_Word32 return 11000-11100: error code * int32_t return 11000-11100: error code
*/ */
WebRtc_Word32 WebRtcAecm_get_error_code(void *aecmInst); int32_t WebRtcAecm_get_error_code(void *aecmInst);
#ifdef __cplusplus #ifdef __cplusplus
} }

View File

@ -25,21 +25,21 @@
#include "analog_agc.h" #include "analog_agc.h"
/* The slope of in Q13*/ /* The slope of in Q13*/
static const WebRtc_Word16 kSlope1[8] = {21793, 12517, 7189, 4129, 2372, 1362, 472, 78}; static const int16_t kSlope1[8] = {21793, 12517, 7189, 4129, 2372, 1362, 472, 78};
/* The offset in Q14 */ /* The offset in Q14 */
static const WebRtc_Word16 kOffset1[8] = {25395, 23911, 22206, 20737, 19612, 18805, 17951, static const int16_t kOffset1[8] = {25395, 23911, 22206, 20737, 19612, 18805, 17951,
17367}; 17367};
/* The slope of in Q13*/ /* The slope of in Q13*/
static const WebRtc_Word16 kSlope2[8] = {2063, 1731, 1452, 1218, 1021, 857, 597, 337}; static const int16_t kSlope2[8] = {2063, 1731, 1452, 1218, 1021, 857, 597, 337};
/* The offset in Q14 */ /* The offset in Q14 */
static const WebRtc_Word16 kOffset2[8] = {18432, 18379, 18290, 18177, 18052, 17920, 17670, static const int16_t kOffset2[8] = {18432, 18379, 18290, 18177, 18052, 17920, 17670,
17286}; 17286};
static const WebRtc_Word16 kMuteGuardTimeMs = 8000; static const int16_t kMuteGuardTimeMs = 8000;
static const WebRtc_Word16 kInitCheck = 42; static const int16_t kInitCheck = 42;
/* Default settings if config is not used */ /* Default settings if config is not used */
#define AGC_DEFAULT_TARGET_LEVEL 3 #define AGC_DEFAULT_TARGET_LEVEL 3
@ -72,12 +72,12 @@ static const WebRtc_Word16 kInitCheck = 42;
* fprintf(1, '\t%i, %i, %i, %i,\n', round(10.^(linspace(0,10,32)/20) * 2^12)); * fprintf(1, '\t%i, %i, %i, %i,\n', round(10.^(linspace(0,10,32)/20) * 2^12));
*/ */
/* Q12 */ /* Q12 */
static const WebRtc_UWord16 kGainTableAnalog[GAIN_TBL_LEN] = {4096, 4251, 4412, 4579, 4752, static const uint16_t kGainTableAnalog[GAIN_TBL_LEN] = {4096, 4251, 4412, 4579, 4752,
4932, 5118, 5312, 5513, 5722, 5938, 6163, 6396, 6638, 6889, 7150, 7420, 7701, 7992, 4932, 5118, 5312, 5513, 5722, 5938, 6163, 6396, 6638, 6889, 7150, 7420, 7701, 7992,
8295, 8609, 8934, 9273, 9623, 9987, 10365, 10758, 11165, 11587, 12025, 12480, 12953}; 8295, 8609, 8934, 9273, 9623, 9987, 10365, 10758, 11165, 11587, 12025, 12480, 12953};
/* Gain/Suppression tables for virtual Mic (in Q10) */ /* Gain/Suppression tables for virtual Mic (in Q10) */
static const WebRtc_UWord16 kGainTableVirtualMic[128] = {1052, 1081, 1110, 1141, 1172, 1204, static const uint16_t kGainTableVirtualMic[128] = {1052, 1081, 1110, 1141, 1172, 1204,
1237, 1271, 1305, 1341, 1378, 1416, 1454, 1494, 1535, 1577, 1620, 1664, 1710, 1757, 1237, 1271, 1305, 1341, 1378, 1416, 1454, 1494, 1535, 1577, 1620, 1664, 1710, 1757,
1805, 1854, 1905, 1957, 2010, 2065, 2122, 2180, 2239, 2301, 2364, 2428, 2495, 2563, 1805, 1854, 1905, 1957, 2010, 2065, 2122, 2180, 2239, 2301, 2364, 2428, 2495, 2563,
2633, 2705, 2779, 2855, 2933, 3013, 3096, 3180, 3267, 3357, 3449, 3543, 3640, 3739, 2633, 2705, 2779, 2855, 2933, 3013, 3096, 3180, 3267, 3357, 3449, 3543, 3640, 3739,
@ -88,7 +88,7 @@ static const WebRtc_UWord16 kGainTableVirtualMic[128] = {1052, 1081, 1110, 1141,
16055, 16494, 16945, 17409, 17885, 18374, 18877, 19393, 19923, 20468, 21028, 21603, 16055, 16494, 16945, 17409, 17885, 18374, 18877, 19393, 19923, 20468, 21028, 21603,
22194, 22801, 23425, 24065, 24724, 25400, 26095, 26808, 27541, 28295, 29069, 29864, 22194, 22801, 23425, 24065, 24724, 25400, 26095, 26808, 27541, 28295, 29069, 29864,
30681, 31520, 32382}; 30681, 31520, 32382};
static const WebRtc_UWord16 kSuppressionTableVirtualMic[128] = {1024, 1006, 988, 970, 952, static const uint16_t kSuppressionTableVirtualMic[128] = {1024, 1006, 988, 970, 952,
935, 918, 902, 886, 870, 854, 839, 824, 809, 794, 780, 766, 752, 739, 726, 713, 700, 935, 918, 902, 886, 870, 854, 839, 824, 809, 794, 780, 766, 752, 739, 726, 713, 700,
687, 675, 663, 651, 639, 628, 616, 605, 594, 584, 573, 563, 553, 543, 533, 524, 514, 687, 675, 663, 651, 639, 628, 616, 605, 594, 584, 573, 563, 553, 543, 533, 524, 514,
505, 496, 487, 478, 470, 461, 453, 445, 437, 429, 421, 414, 406, 399, 392, 385, 378, 505, 496, 487, 478, 470, 461, 453, 445, 437, 429, 421, 414, 406, 399, 392, 385, 378,
@ -102,7 +102,7 @@ static const WebRtc_UWord16 kSuppressionTableVirtualMic[128] = {1024, 1006, 988,
* Matlab code * Matlab code
* targetLevelTable = fprintf('%d,\t%d,\t%d,\t%d,\n', round((32767*10.^(-(0:63)'/20)).^2*16/2^7) */ * targetLevelTable = fprintf('%d,\t%d,\t%d,\t%d,\n', round((32767*10.^(-(0:63)'/20)).^2*16/2^7) */
static const WebRtc_Word32 kTargetLevelTable[64] = {134209536, 106606424, 84680493, 67264106, static const int32_t kTargetLevelTable[64] = {134209536, 106606424, 84680493, 67264106,
53429779, 42440782, 33711911, 26778323, 21270778, 16895980, 13420954, 10660642, 53429779, 42440782, 33711911, 26778323, 21270778, 16895980, 13420954, 10660642,
8468049, 6726411, 5342978, 4244078, 3371191, 2677832, 2127078, 1689598, 1342095, 8468049, 6726411, 5342978, 4244078, 3371191, 2677832, 2127078, 1689598, 1342095,
1066064, 846805, 672641, 534298, 424408, 337119, 267783, 212708, 168960, 134210, 1066064, 846805, 672641, 534298, 424408, 337119, 267783, 212708, 168960, 134210,
@ -110,13 +110,13 @@ static const WebRtc_Word32 kTargetLevelTable[64] = {134209536, 106606424, 846804
6726, 5343, 4244, 3371, 2678, 2127, 1690, 1342, 1066, 847, 673, 534, 424, 337, 268, 6726, 5343, 4244, 3371, 2678, 2127, 1690, 1342, 1066, 847, 673, 534, 424, 337, 268,
213, 169, 134, 107, 85, 67}; 213, 169, 134, 107, 85, 67};
int WebRtcAgc_AddMic(void *state, WebRtc_Word16 *in_mic, WebRtc_Word16 *in_mic_H, int WebRtcAgc_AddMic(void *state, int16_t *in_mic, int16_t *in_mic_H,
WebRtc_Word16 samples) int16_t samples)
{ {
WebRtc_Word32 nrg, max_nrg, sample, tmp32; int32_t nrg, max_nrg, sample, tmp32;
WebRtc_Word32 *ptr; int32_t *ptr;
WebRtc_UWord16 targetGainIdx, gain; uint16_t targetGainIdx, gain;
WebRtc_Word16 i, n, L, M, subFrames, tmp16, tmp_speech[16]; int16_t i, n, L, M, subFrames, tmp16, tmp_speech[16];
Agc_t *stt; Agc_t *stt;
stt = (Agc_t *)state; stt = (Agc_t *)state;
@ -205,10 +205,10 @@ int WebRtcAgc_AddMic(void *state, WebRtc_Word16 *in_mic, WebRtc_Word16 *in_mic_H
assert(stt->maxLevel > stt->maxAnalog); assert(stt->maxLevel > stt->maxAnalog);
/* Q1 */ /* Q1 */
tmp16 = (WebRtc_Word16)(stt->micVol - stt->maxAnalog); tmp16 = (int16_t)(stt->micVol - stt->maxAnalog);
tmp32 = WEBRTC_SPL_MUL_16_16(GAIN_TBL_LEN - 1, tmp16); tmp32 = WEBRTC_SPL_MUL_16_16(GAIN_TBL_LEN - 1, tmp16);
tmp16 = (WebRtc_Word16)(stt->maxLevel - stt->maxAnalog); tmp16 = (int16_t)(stt->maxLevel - stt->maxAnalog);
targetGainIdx = (WebRtc_UWord16)WEBRTC_SPL_DIV(tmp32, tmp16); targetGainIdx = (uint16_t)WEBRTC_SPL_DIV(tmp32, tmp16);
assert(targetGainIdx < GAIN_TBL_LEN); assert(targetGainIdx < GAIN_TBL_LEN);
/* Increment through the table towards the target gain. /* Increment through the table towards the target gain.
@ -238,7 +238,7 @@ int WebRtcAgc_AddMic(void *state, WebRtc_Word16 *in_mic, WebRtc_Word16 *in_mic_H
in_mic[i] = -32768; in_mic[i] = -32768;
} else } else
{ {
in_mic[i] = (WebRtc_Word16)sample; in_mic[i] = (int16_t)sample;
} }
// For higher band // For higher band
@ -254,7 +254,7 @@ int WebRtcAgc_AddMic(void *state, WebRtc_Word16 *in_mic, WebRtc_Word16 *in_mic_H
in_mic_H[i] = -32768; in_mic_H[i] = -32768;
} else } else
{ {
in_mic_H[i] = (WebRtc_Word16)sample; in_mic_H[i] = (int16_t)sample;
} }
} }
} }
@ -327,10 +327,10 @@ int WebRtcAgc_AddMic(void *state, WebRtc_Word16 *in_mic, WebRtc_Word16 *in_mic_H
return 0; return 0;
} }
int WebRtcAgc_AddFarend(void *state, const WebRtc_Word16 *in_far, WebRtc_Word16 samples) int WebRtcAgc_AddFarend(void *state, const int16_t *in_far, int16_t samples)
{ {
WebRtc_Word32 errHandle = 0; int32_t errHandle = 0;
WebRtc_Word16 i, subFrames; int16_t i, subFrames;
Agc_t *stt; Agc_t *stt;
stt = (Agc_t *)state; stt = (Agc_t *)state;
@ -393,22 +393,22 @@ int WebRtcAgc_AddFarend(void *state, const WebRtc_Word16 *in_far, WebRtc_Word16
return errHandle; return errHandle;
} }
int WebRtcAgc_VirtualMic(void *agcInst, WebRtc_Word16 *in_near, WebRtc_Word16 *in_near_H, int WebRtcAgc_VirtualMic(void *agcInst, int16_t *in_near, int16_t *in_near_H,
WebRtc_Word16 samples, WebRtc_Word32 micLevelIn, int16_t samples, int32_t micLevelIn,
WebRtc_Word32 *micLevelOut) int32_t *micLevelOut)
{ {
WebRtc_Word32 tmpFlt, micLevelTmp, gainIdx; int32_t tmpFlt, micLevelTmp, gainIdx;
WebRtc_UWord16 gain; uint16_t gain;
WebRtc_Word16 ii; int16_t ii;
Agc_t *stt; Agc_t *stt;
WebRtc_UWord32 nrg; uint32_t nrg;
WebRtc_Word16 sampleCntr; int16_t sampleCntr;
WebRtc_UWord32 frameNrg = 0; uint32_t frameNrg = 0;
WebRtc_UWord32 frameNrgLimit = 5500; uint32_t frameNrgLimit = 5500;
WebRtc_Word16 numZeroCrossing = 0; int16_t numZeroCrossing = 0;
const WebRtc_Word16 kZeroCrossingLowLim = 15; const int16_t kZeroCrossingLowLim = 15;
const WebRtc_Word16 kZeroCrossingHighLim = 20; const int16_t kZeroCrossingHighLim = 20;
stt = (Agc_t *)agcInst; stt = (Agc_t *)agcInst;
@ -507,7 +507,7 @@ int WebRtcAgc_VirtualMic(void *agcInst, WebRtc_Word16 *in_near, WebRtc_Word16 *i
gain = kSuppressionTableVirtualMic[127 - gainIdx]; gain = kSuppressionTableVirtualMic[127 - gainIdx];
} }
} }
in_near[ii] = (WebRtc_Word16)tmpFlt; in_near[ii] = (int16_t)tmpFlt;
if (stt->fs == 32000) if (stt->fs == 32000)
{ {
tmpFlt = WEBRTC_SPL_MUL_16_U16(in_near_H[ii], gain); tmpFlt = WEBRTC_SPL_MUL_16_U16(in_near_H[ii], gain);
@ -520,7 +520,7 @@ int WebRtcAgc_VirtualMic(void *agcInst, WebRtc_Word16 *in_near, WebRtc_Word16 *i
{ {
tmpFlt = -32768; tmpFlt = -32768;
} }
in_near_H[ii] = (WebRtc_Word16)tmpFlt; in_near_H[ii] = (int16_t)tmpFlt;
} }
} }
/* Set the level we (finally) used */ /* Set the level we (finally) used */
@ -538,7 +538,7 @@ int WebRtcAgc_VirtualMic(void *agcInst, WebRtc_Word16 *in_near, WebRtc_Word16 *i
void WebRtcAgc_UpdateAgcThresholds(Agc_t *stt) void WebRtcAgc_UpdateAgcThresholds(Agc_t *stt)
{ {
WebRtc_Word16 tmp16; int16_t tmp16;
#ifdef MIC_LEVEL_FEEDBACK #ifdef MIC_LEVEL_FEEDBACK
int zeros; int zeros;
@ -552,7 +552,7 @@ void WebRtcAgc_UpdateAgcThresholds(Agc_t *stt)
/* Set analog target level in envelope dBOv scale */ /* Set analog target level in envelope dBOv scale */
tmp16 = (DIFF_REF_TO_ANALOG * stt->compressionGaindB) + ANALOG_TARGET_LEVEL_2; tmp16 = (DIFF_REF_TO_ANALOG * stt->compressionGaindB) + ANALOG_TARGET_LEVEL_2;
tmp16 = WebRtcSpl_DivW32W16ResW16((WebRtc_Word32)tmp16, ANALOG_TARGET_LEVEL); tmp16 = WebRtcSpl_DivW32W16ResW16((int32_t)tmp16, ANALOG_TARGET_LEVEL);
stt->analogTarget = DIGITAL_REF_AT_0_COMP_GAIN + tmp16; stt->analogTarget = DIGITAL_REF_AT_0_COMP_GAIN + tmp16;
if (stt->analogTarget < DIGITAL_REF_AT_0_COMP_GAIN) if (stt->analogTarget < DIGITAL_REF_AT_0_COMP_GAIN)
{ {
@ -587,14 +587,14 @@ void WebRtcAgc_UpdateAgcThresholds(Agc_t *stt)
stt->lowerLimit = stt->startLowerLimit; stt->lowerLimit = stt->startLowerLimit;
} }
void WebRtcAgc_SaturationCtrl(Agc_t *stt, WebRtc_UWord8 *saturated, WebRtc_Word32 *env) void WebRtcAgc_SaturationCtrl(Agc_t *stt, uint8_t *saturated, int32_t *env)
{ {
WebRtc_Word16 i, tmpW16; int16_t i, tmpW16;
/* Check if the signal is saturated */ /* Check if the signal is saturated */
for (i = 0; i < 10; i++) for (i = 0; i < 10; i++)
{ {
tmpW16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(env[i], 20); tmpW16 = (int16_t)WEBRTC_SPL_RSHIFT_W32(env[i], 20);
if (tmpW16 > 875) if (tmpW16 > 875)
{ {
stt->envSum += tmpW16; stt->envSum += tmpW16;
@ -608,15 +608,15 @@ void WebRtcAgc_SaturationCtrl(Agc_t *stt, WebRtc_UWord8 *saturated, WebRtc_Word3
} }
/* stt->envSum *= 0.99; */ /* stt->envSum *= 0.99; */
stt->envSum = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(stt->envSum, stt->envSum = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(stt->envSum,
(WebRtc_Word16)32440, 15); (int16_t)32440, 15);
} }
void WebRtcAgc_ZeroCtrl(Agc_t *stt, WebRtc_Word32 *inMicLevel, WebRtc_Word32 *env) void WebRtcAgc_ZeroCtrl(Agc_t *stt, int32_t *inMicLevel, int32_t *env)
{ {
WebRtc_Word16 i; int16_t i;
WebRtc_Word32 tmp32 = 0; int32_t tmp32 = 0;
WebRtc_Word32 midVal; int32_t midVal;
/* Is the input signal zero? */ /* Is the input signal zero? */
for (i = 0; i < 10; i++) for (i = 0; i < 10; i++)
@ -682,8 +682,8 @@ void WebRtcAgc_SpeakerInactiveCtrl(Agc_t *stt)
* silence. * silence.
*/ */
WebRtc_Word32 tmp32; int32_t tmp32;
WebRtc_Word16 vadThresh; int16_t vadThresh;
if (stt->vadMic.stdLongTerm < 2500) if (stt->vadMic.stdLongTerm < 2500)
{ {
@ -698,13 +698,13 @@ void WebRtcAgc_SpeakerInactiveCtrl(Agc_t *stt)
} }
/* stt->vadThreshold = (31 * stt->vadThreshold + vadThresh) / 32; */ /* stt->vadThreshold = (31 * stt->vadThreshold + vadThresh) / 32; */
tmp32 = (WebRtc_Word32)vadThresh; tmp32 = (int32_t)vadThresh;
tmp32 += WEBRTC_SPL_MUL_16_16((WebRtc_Word16)31, stt->vadThreshold); tmp32 += WEBRTC_SPL_MUL_16_16((int16_t)31, stt->vadThreshold);
stt->vadThreshold = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 5); stt->vadThreshold = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32, 5);
} }
} }
void WebRtcAgc_ExpCurve(WebRtc_Word16 volume, WebRtc_Word16 *index) void WebRtcAgc_ExpCurve(int16_t volume, int16_t *index)
{ {
// volume in Q14 // volume in Q14
// index in [0-7] // index in [0-7]
@ -754,16 +754,16 @@ void WebRtcAgc_ExpCurve(WebRtc_Word16 volume, WebRtc_Word16 *index)
} }
} }
WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel, int32_t WebRtcAgc_ProcessAnalog(void *state, int32_t inMicLevel,
WebRtc_Word32 *outMicLevel, int32_t *outMicLevel,
WebRtc_Word16 vadLogRatio, int16_t vadLogRatio,
WebRtc_Word16 echo, WebRtc_UWord8 *saturationWarning) int16_t echo, uint8_t *saturationWarning)
{ {
WebRtc_UWord32 tmpU32; uint32_t tmpU32;
WebRtc_Word32 Rxx16w32, tmp32; int32_t Rxx16w32, tmp32;
WebRtc_Word32 inMicLevelTmp, lastMicVol; int32_t inMicLevelTmp, lastMicVol;
WebRtc_Word16 i; int16_t i;
WebRtc_UWord8 saturated = 0; uint8_t saturated = 0;
Agc_t *stt; Agc_t *stt;
stt = (Agc_t *)state; stt = (Agc_t *)state;
@ -785,9 +785,9 @@ WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel,
if (stt->firstCall == 0) if (stt->firstCall == 0)
{ {
WebRtc_Word32 tmpVol; int32_t tmpVol;
stt->firstCall = 1; stt->firstCall = 1;
tmp32 = WEBRTC_SPL_RSHIFT_W32((stt->maxLevel - stt->minLevel) * (WebRtc_Word32)51, 9); tmp32 = WEBRTC_SPL_RSHIFT_W32((stt->maxLevel - stt->minLevel) * (int32_t)51, 9);
tmpVol = (stt->minLevel + tmp32); tmpVol = (stt->minLevel + tmp32);
/* If the mic level is very low at start, increase it! */ /* If the mic level is very low at start, increase it! */
@ -807,7 +807,7 @@ WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel,
/* If the mic level was manually changed to a very low value raise it! */ /* If the mic level was manually changed to a very low value raise it! */
if ((inMicLevelTmp != stt->micVol) && (inMicLevelTmp < stt->minOutput)) if ((inMicLevelTmp != stt->micVol) && (inMicLevelTmp < stt->minOutput))
{ {
tmp32 = WEBRTC_SPL_RSHIFT_W32((stt->maxLevel - stt->minLevel) * (WebRtc_Word32)51, 9); tmp32 = WEBRTC_SPL_RSHIFT_W32((stt->maxLevel - stt->minLevel) * (int32_t)51, 9);
inMicLevelTmp = (stt->minLevel + tmp32); inMicLevelTmp = (stt->minLevel + tmp32);
stt->micVol = inMicLevelTmp; stt->micVol = inMicLevelTmp;
#ifdef MIC_LEVEL_FEEDBACK #ifdef MIC_LEVEL_FEEDBACK
@ -856,8 +856,8 @@ WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel,
/* stt->micVol *= 0.903; */ /* stt->micVol *= 0.903; */
tmp32 = inMicLevelTmp - stt->minLevel; tmp32 = inMicLevelTmp - stt->minLevel;
tmpU32 = WEBRTC_SPL_UMUL(29591, (WebRtc_UWord32)(tmp32)); tmpU32 = WEBRTC_SPL_UMUL(29591, (uint32_t)(tmp32));
stt->micVol = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_U32(tmpU32, 15) + stt->minLevel; stt->micVol = (int32_t)WEBRTC_SPL_RSHIFT_U32(tmpU32, 15) + stt->minLevel;
if (stt->micVol > lastMicVol - 2) if (stt->micVol > lastMicVol - 2)
{ {
stt->micVol = lastMicVol - 2; stt->micVol = lastMicVol - 2;
@ -988,8 +988,8 @@ WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel,
/* 0.95 in Q15 */ /* 0.95 in Q15 */
tmp32 = inMicLevelTmp - stt->minLevel; tmp32 = inMicLevelTmp - stt->minLevel;
tmpU32 = WEBRTC_SPL_UMUL(31130, (WebRtc_UWord32)(tmp32)); tmpU32 = WEBRTC_SPL_UMUL(31130, (uint32_t)(tmp32));
stt->micVol = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_U32(tmpU32, 15) + stt->minLevel; stt->micVol = (int32_t)WEBRTC_SPL_RSHIFT_U32(tmpU32, 15) + stt->minLevel;
if (stt->micVol > lastMicVol - 1) if (stt->micVol > lastMicVol - 1)
{ {
stt->micVol = lastMicVol - 1; stt->micVol = lastMicVol - 1;
@ -1036,8 +1036,8 @@ WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel,
/* 0.965 in Q15 */ /* 0.965 in Q15 */
tmp32 = inMicLevelTmp - stt->minLevel; tmp32 = inMicLevelTmp - stt->minLevel;
tmpU32 = WEBRTC_SPL_UMUL(31621, (WebRtc_UWord32)(inMicLevelTmp - stt->minLevel)); tmpU32 = WEBRTC_SPL_UMUL(31621, (uint32_t)(inMicLevelTmp - stt->minLevel));
stt->micVol = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_U32(tmpU32, 15) + stt->minLevel; stt->micVol = (int32_t)WEBRTC_SPL_RSHIFT_U32(tmpU32, 15) + stt->minLevel;
if (stt->micVol > lastMicVol - 1) if (stt->micVol > lastMicVol - 1)
{ {
stt->micVol = lastMicVol - 1; stt->micVol = lastMicVol - 1;
@ -1062,8 +1062,8 @@ WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel,
if (stt->msTooLow > stt->msecSpeechOuterChange) if (stt->msTooLow > stt->msecSpeechOuterChange)
{ {
/* Raise the recording level */ /* Raise the recording level */
WebRtc_Word16 index, weightFIX; int16_t index, weightFIX;
WebRtc_Word16 volNormFIX = 16384; // =1 in Q14. int16_t volNormFIX = 16384; // =1 in Q14.
stt->msTooLow = 0; stt->msTooLow = 0;
@ -1071,7 +1071,7 @@ WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel,
tmp32 = WEBRTC_SPL_LSHIFT_W32(inMicLevelTmp - stt->minLevel, 14); tmp32 = WEBRTC_SPL_LSHIFT_W32(inMicLevelTmp - stt->minLevel, 14);
if (stt->maxInit != stt->minLevel) if (stt->maxInit != stt->minLevel)
{ {
volNormFIX = (WebRtc_Word16)WEBRTC_SPL_DIV(tmp32, volNormFIX = (int16_t)WEBRTC_SPL_DIV(tmp32,
(stt->maxInit - stt->minLevel)); (stt->maxInit - stt->minLevel));
} }
@ -1080,7 +1080,7 @@ WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel,
/* Compute weighting factor for the volume increase, 32^(-2*X)/2+1.05 */ /* Compute weighting factor for the volume increase, 32^(-2*X)/2+1.05 */
weightFIX = kOffset1[index] weightFIX = kOffset1[index]
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(kSlope1[index], - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(kSlope1[index],
volNormFIX, 13); volNormFIX, 13);
/* stt->Rxx160_LPw32 *= 1.047 [~0.2 dB]; */ /* stt->Rxx160_LPw32 *= 1.047 [~0.2 dB]; */
@ -1088,8 +1088,8 @@ WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel,
stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(tmp32, 67); stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(tmp32, 67);
tmp32 = inMicLevelTmp - stt->minLevel; tmp32 = inMicLevelTmp - stt->minLevel;
tmpU32 = ((WebRtc_UWord32)weightFIX * (WebRtc_UWord32)(inMicLevelTmp - stt->minLevel)); tmpU32 = ((uint32_t)weightFIX * (uint32_t)(inMicLevelTmp - stt->minLevel));
stt->micVol = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_U32(tmpU32, 14) + stt->minLevel; stt->micVol = (int32_t)WEBRTC_SPL_RSHIFT_U32(tmpU32, 14) + stt->minLevel;
if (stt->micVol < lastMicVol + 2) if (stt->micVol < lastMicVol + 2)
{ {
stt->micVol = lastMicVol + 2; stt->micVol = lastMicVol + 2;
@ -1122,8 +1122,8 @@ WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel,
if (stt->msTooLow > stt->msecSpeechInnerChange) if (stt->msTooLow > stt->msecSpeechInnerChange)
{ {
/* Raise the recording level */ /* Raise the recording level */
WebRtc_Word16 index, weightFIX; int16_t index, weightFIX;
WebRtc_Word16 volNormFIX = 16384; // =1 in Q14. int16_t volNormFIX = 16384; // =1 in Q14.
stt->msTooLow = 0; stt->msTooLow = 0;
@ -1131,7 +1131,7 @@ WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel,
tmp32 = WEBRTC_SPL_LSHIFT_W32(inMicLevelTmp - stt->minLevel, 14); tmp32 = WEBRTC_SPL_LSHIFT_W32(inMicLevelTmp - stt->minLevel, 14);
if (stt->maxInit != stt->minLevel) if (stt->maxInit != stt->minLevel)
{ {
volNormFIX = (WebRtc_Word16)WEBRTC_SPL_DIV(tmp32, volNormFIX = (int16_t)WEBRTC_SPL_DIV(tmp32,
(stt->maxInit - stt->minLevel)); (stt->maxInit - stt->minLevel));
} }
@ -1140,7 +1140,7 @@ WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel,
/* Compute weighting factor for the volume increase, (3.^(-2.*X))/8+1 */ /* Compute weighting factor for the volume increase, (3.^(-2.*X))/8+1 */
weightFIX = kOffset2[index] weightFIX = kOffset2[index]
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(kSlope2[index], - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(kSlope2[index],
volNormFIX, 13); volNormFIX, 13);
/* stt->Rxx160_LPw32 *= 1.047 [~0.2 dB]; */ /* stt->Rxx160_LPw32 *= 1.047 [~0.2 dB]; */
@ -1148,8 +1148,8 @@ WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel,
stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(tmp32, 67); stt->Rxx160_LPw32 = WEBRTC_SPL_MUL(tmp32, 67);
tmp32 = inMicLevelTmp - stt->minLevel; tmp32 = inMicLevelTmp - stt->minLevel;
tmpU32 = ((WebRtc_UWord32)weightFIX * (WebRtc_UWord32)(inMicLevelTmp - stt->minLevel)); tmpU32 = ((uint32_t)weightFIX * (uint32_t)(inMicLevelTmp - stt->minLevel));
stt->micVol = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_U32(tmpU32, 14) + stt->minLevel; stt->micVol = (int32_t)WEBRTC_SPL_RSHIFT_U32(tmpU32, 14) + stt->minLevel;
if (stt->micVol < lastMicVol + 1) if (stt->micVol < lastMicVol + 1)
{ {
stt->micVol = lastMicVol + 1; stt->micVol = lastMicVol + 1;
@ -1242,16 +1242,16 @@ WebRtc_Word32 WebRtcAgc_ProcessAnalog(void *state, WebRtc_Word32 inMicLevel,
return 0; return 0;
} }
int WebRtcAgc_Process(void *agcInst, const WebRtc_Word16 *in_near, int WebRtcAgc_Process(void *agcInst, const int16_t *in_near,
const WebRtc_Word16 *in_near_H, WebRtc_Word16 samples, const int16_t *in_near_H, int16_t samples,
WebRtc_Word16 *out, WebRtc_Word16 *out_H, WebRtc_Word32 inMicLevel, int16_t *out, int16_t *out_H, int32_t inMicLevel,
WebRtc_Word32 *outMicLevel, WebRtc_Word16 echo, int32_t *outMicLevel, int16_t echo,
WebRtc_UWord8 *saturationWarning) uint8_t *saturationWarning)
{ {
Agc_t *stt; Agc_t *stt;
WebRtc_Word32 inMicLevelTmp; int32_t inMicLevelTmp;
WebRtc_Word16 subFrames, i; int16_t subFrames, i;
WebRtc_UWord8 satWarningTmp = 0; uint8_t satWarningTmp = 0;
stt = (Agc_t *)agcInst; stt = (Agc_t *)agcInst;
@ -1326,13 +1326,13 @@ int WebRtcAgc_Process(void *agcInst, const WebRtc_Word16 *in_near,
if (in_near != out) if (in_near != out)
{ {
// Only needed if they don't already point to the same place. // Only needed if they don't already point to the same place.
memcpy(out, in_near, samples * sizeof(WebRtc_Word16)); memcpy(out, in_near, samples * sizeof(int16_t));
} }
if (stt->fs == 32000) if (stt->fs == 32000)
{ {
if (in_near_H != out_H) if (in_near_H != out_H)
{ {
memcpy(out_H, in_near_H, samples * sizeof(WebRtc_Word16)); memcpy(out_H, in_near_H, samples * sizeof(int16_t));
} }
} }
@ -1366,8 +1366,8 @@ int WebRtcAgc_Process(void *agcInst, const WebRtc_Word16 *in_near,
/* update queue */ /* update queue */
if (stt->inQueue > 1) if (stt->inQueue > 1)
{ {
memcpy(stt->env[0], stt->env[1], 10 * sizeof(WebRtc_Word32)); memcpy(stt->env[0], stt->env[1], 10 * sizeof(int32_t));
memcpy(stt->Rxx16w32_array[0], stt->Rxx16w32_array[1], 5 * sizeof(WebRtc_Word32)); memcpy(stt->Rxx16w32_array[0], stt->Rxx16w32_array[1], 5 * sizeof(int32_t));
} }
if (stt->inQueue > 0) if (stt->inQueue > 0)
@ -1523,11 +1523,11 @@ int WebRtcAgc_Free(void *state)
/* minLevel - Minimum volume level /* minLevel - Minimum volume level
* maxLevel - Maximum volume level * maxLevel - Maximum volume level
*/ */
int WebRtcAgc_Init(void *agcInst, WebRtc_Word32 minLevel, WebRtc_Word32 maxLevel, int WebRtcAgc_Init(void *agcInst, int32_t minLevel, int32_t maxLevel,
WebRtc_Word16 agcMode, WebRtc_UWord32 fs) int16_t agcMode, uint32_t fs)
{ {
WebRtc_Word32 max_add, tmp32; int32_t max_add, tmp32;
WebRtc_Word16 i; int16_t i;
int tmpNorm; int tmpNorm;
Agc_t *stt; Agc_t *stt;
@ -1567,7 +1567,7 @@ int WebRtcAgc_Init(void *agcInst, WebRtc_Word32 minLevel, WebRtc_Word32 maxLevel
/* If the volume range is smaller than 0-256 then /* If the volume range is smaller than 0-256 then
* the levels are shifted up to Q8-domain */ * the levels are shifted up to Q8-domain */
tmpNorm = WebRtcSpl_NormU32((WebRtc_UWord32)maxLevel); tmpNorm = WebRtcSpl_NormU32((uint32_t)maxLevel);
stt->scale = tmpNorm - 23; stt->scale = tmpNorm - 23;
if (stt->scale < 0) if (stt->scale < 0)
{ {
@ -1617,7 +1617,7 @@ int WebRtcAgc_Init(void *agcInst, WebRtc_Word32 minLevel, WebRtc_Word32 maxLevel
#endif #endif
/* Minimum output volume is 4% higher than the available lowest volume level */ /* Minimum output volume is 4% higher than the available lowest volume level */
tmp32 = WEBRTC_SPL_RSHIFT_W32((stt->maxLevel - stt->minLevel) * (WebRtc_Word32)10, 8); tmp32 = WEBRTC_SPL_RSHIFT_W32((stt->maxLevel - stt->minLevel) * (int32_t)10, 8);
stt->minOutput = (stt->minLevel + tmp32); stt->minOutput = (stt->minLevel + tmp32);
stt->msTooLow = 0; stt->msTooLow = 0;
@ -1639,12 +1639,12 @@ int WebRtcAgc_Init(void *agcInst, WebRtc_Word32 minLevel, WebRtc_Word32 maxLevel
for (i = 0; i < RXX_BUFFER_LEN; i++) for (i = 0; i < RXX_BUFFER_LEN; i++)
{ {
stt->Rxx16_vectorw32[i] = (WebRtc_Word32)1000; /* -54dBm0 */ stt->Rxx16_vectorw32[i] = (int32_t)1000; /* -54dBm0 */
} }
stt->Rxx160w32 = 125 * RXX_BUFFER_LEN; /* (stt->Rxx16_vectorw32[0]>>3) = 125 */ stt->Rxx160w32 = 125 * RXX_BUFFER_LEN; /* (stt->Rxx16_vectorw32[0]>>3) = 125 */
stt->Rxx16pos = 0; stt->Rxx16pos = 0;
stt->Rxx16_LPw32 = (WebRtc_Word32)16284; /* Q(-4) */ stt->Rxx16_LPw32 = (int32_t)16284; /* Q(-4) */
for (i = 0; i < 5; i++) for (i = 0; i < 5; i++)
{ {

View File

@ -35,87 +35,87 @@
*/ */
#define RXX_BUFFER_LEN 10 #define RXX_BUFFER_LEN 10
static const WebRtc_Word16 kMsecSpeechInner = 520; static const int16_t kMsecSpeechInner = 520;
static const WebRtc_Word16 kMsecSpeechOuter = 340; static const int16_t kMsecSpeechOuter = 340;
static const WebRtc_Word16 kNormalVadThreshold = 400; static const int16_t kNormalVadThreshold = 400;
static const WebRtc_Word16 kAlphaShortTerm = 6; // 1 >> 6 = 0.0156 static const int16_t kAlphaShortTerm = 6; // 1 >> 6 = 0.0156
static const WebRtc_Word16 kAlphaLongTerm = 10; // 1 >> 10 = 0.000977 static const int16_t kAlphaLongTerm = 10; // 1 >> 10 = 0.000977
typedef struct typedef struct
{ {
// Configurable parameters/variables // Configurable parameters/variables
WebRtc_UWord32 fs; // Sampling frequency uint32_t fs; // Sampling frequency
WebRtc_Word16 compressionGaindB; // Fixed gain level in dB int16_t compressionGaindB; // Fixed gain level in dB
WebRtc_Word16 targetLevelDbfs; // Target level in -dBfs of envelope (default -3) int16_t targetLevelDbfs; // Target level in -dBfs of envelope (default -3)
WebRtc_Word16 agcMode; // Hard coded mode (adaptAna/adaptDig/fixedDig) int16_t agcMode; // Hard coded mode (adaptAna/adaptDig/fixedDig)
WebRtc_UWord8 limiterEnable; // Enabling limiter (on/off (default off)) uint8_t limiterEnable; // Enabling limiter (on/off (default off))
WebRtcAgc_config_t defaultConfig; WebRtcAgc_config_t defaultConfig;
WebRtcAgc_config_t usedConfig; WebRtcAgc_config_t usedConfig;
// General variables // General variables
WebRtc_Word16 initFlag; int16_t initFlag;
WebRtc_Word16 lastError; int16_t lastError;
// Target level parameters // Target level parameters
// Based on the above: analogTargetLevel = round((32767*10^(-22/20))^2*16/2^7) // Based on the above: analogTargetLevel = round((32767*10^(-22/20))^2*16/2^7)
WebRtc_Word32 analogTargetLevel; // = RXX_BUFFER_LEN * 846805; -22 dBfs int32_t analogTargetLevel; // = RXX_BUFFER_LEN * 846805; -22 dBfs
WebRtc_Word32 startUpperLimit; // = RXX_BUFFER_LEN * 1066064; -21 dBfs int32_t startUpperLimit; // = RXX_BUFFER_LEN * 1066064; -21 dBfs
WebRtc_Word32 startLowerLimit; // = RXX_BUFFER_LEN * 672641; -23 dBfs int32_t startLowerLimit; // = RXX_BUFFER_LEN * 672641; -23 dBfs
WebRtc_Word32 upperPrimaryLimit; // = RXX_BUFFER_LEN * 1342095; -20 dBfs int32_t upperPrimaryLimit; // = RXX_BUFFER_LEN * 1342095; -20 dBfs
WebRtc_Word32 lowerPrimaryLimit; // = RXX_BUFFER_LEN * 534298; -24 dBfs int32_t lowerPrimaryLimit; // = RXX_BUFFER_LEN * 534298; -24 dBfs
WebRtc_Word32 upperSecondaryLimit;// = RXX_BUFFER_LEN * 2677832; -17 dBfs int32_t upperSecondaryLimit;// = RXX_BUFFER_LEN * 2677832; -17 dBfs
WebRtc_Word32 lowerSecondaryLimit;// = RXX_BUFFER_LEN * 267783; -27 dBfs int32_t lowerSecondaryLimit;// = RXX_BUFFER_LEN * 267783; -27 dBfs
WebRtc_UWord16 targetIdx; // Table index for corresponding target level uint16_t targetIdx; // Table index for corresponding target level
#ifdef MIC_LEVEL_FEEDBACK #ifdef MIC_LEVEL_FEEDBACK
WebRtc_UWord16 targetIdxOffset; // Table index offset for level compensation uint16_t targetIdxOffset; // Table index offset for level compensation
#endif #endif
WebRtc_Word16 analogTarget; // Digital reference level in ENV scale int16_t analogTarget; // Digital reference level in ENV scale
// Analog AGC specific variables // Analog AGC specific variables
WebRtc_Word32 filterState[8]; // For downsampling wb to nb int32_t filterState[8]; // For downsampling wb to nb
WebRtc_Word32 upperLimit; // Upper limit for mic energy int32_t upperLimit; // Upper limit for mic energy
WebRtc_Word32 lowerLimit; // Lower limit for mic energy int32_t lowerLimit; // Lower limit for mic energy
WebRtc_Word32 Rxx160w32; // Average energy for one frame int32_t Rxx160w32; // Average energy for one frame
WebRtc_Word32 Rxx16_LPw32; // Low pass filtered subframe energies int32_t Rxx16_LPw32; // Low pass filtered subframe energies
WebRtc_Word32 Rxx160_LPw32; // Low pass filtered frame energies int32_t Rxx160_LPw32; // Low pass filtered frame energies
WebRtc_Word32 Rxx16_LPw32Max; // Keeps track of largest energy subframe int32_t Rxx16_LPw32Max; // Keeps track of largest energy subframe
WebRtc_Word32 Rxx16_vectorw32[RXX_BUFFER_LEN];// Array with subframe energies int32_t Rxx16_vectorw32[RXX_BUFFER_LEN];// Array with subframe energies
WebRtc_Word32 Rxx16w32_array[2][5];// Energy values of microphone signal int32_t Rxx16w32_array[2][5];// Energy values of microphone signal
WebRtc_Word32 env[2][10]; // Envelope values of subframes int32_t env[2][10]; // Envelope values of subframes
WebRtc_Word16 Rxx16pos; // Current position in the Rxx16_vectorw32 int16_t Rxx16pos; // Current position in the Rxx16_vectorw32
WebRtc_Word16 envSum; // Filtered scaled envelope in subframes int16_t envSum; // Filtered scaled envelope in subframes
WebRtc_Word16 vadThreshold; // Threshold for VAD decision int16_t vadThreshold; // Threshold for VAD decision
WebRtc_Word16 inActive; // Inactive time in milliseconds int16_t inActive; // Inactive time in milliseconds
WebRtc_Word16 msTooLow; // Milliseconds of speech at a too low level int16_t msTooLow; // Milliseconds of speech at a too low level
WebRtc_Word16 msTooHigh; // Milliseconds of speech at a too high level int16_t msTooHigh; // Milliseconds of speech at a too high level
WebRtc_Word16 changeToSlowMode; // Change to slow mode after some time at target int16_t changeToSlowMode; // Change to slow mode after some time at target
WebRtc_Word16 firstCall; // First call to the process-function int16_t firstCall; // First call to the process-function
WebRtc_Word16 msZero; // Milliseconds of zero input int16_t msZero; // Milliseconds of zero input
WebRtc_Word16 msecSpeechOuterChange;// Min ms of speech between volume changes int16_t msecSpeechOuterChange;// Min ms of speech between volume changes
WebRtc_Word16 msecSpeechInnerChange;// Min ms of speech between volume changes int16_t msecSpeechInnerChange;// Min ms of speech between volume changes
WebRtc_Word16 activeSpeech; // Milliseconds of active speech int16_t activeSpeech; // Milliseconds of active speech
WebRtc_Word16 muteGuardMs; // Counter to prevent mute action int16_t muteGuardMs; // Counter to prevent mute action
WebRtc_Word16 inQueue; // 10 ms batch indicator int16_t inQueue; // 10 ms batch indicator
// Microphone level variables // Microphone level variables
WebRtc_Word32 micRef; // Remember ref. mic level for virtual mic int32_t micRef; // Remember ref. mic level for virtual mic
WebRtc_UWord16 gainTableIdx; // Current position in virtual gain table uint16_t gainTableIdx; // Current position in virtual gain table
WebRtc_Word32 micGainIdx; // Gain index of mic level to increase slowly int32_t micGainIdx; // Gain index of mic level to increase slowly
WebRtc_Word32 micVol; // Remember volume between frames int32_t micVol; // Remember volume between frames
WebRtc_Word32 maxLevel; // Max possible vol level, incl dig gain int32_t maxLevel; // Max possible vol level, incl dig gain
WebRtc_Word32 maxAnalog; // Maximum possible analog volume level int32_t maxAnalog; // Maximum possible analog volume level
WebRtc_Word32 maxInit; // Initial value of "max" int32_t maxInit; // Initial value of "max"
WebRtc_Word32 minLevel; // Minimum possible volume level int32_t minLevel; // Minimum possible volume level
WebRtc_Word32 minOutput; // Minimum output volume level int32_t minOutput; // Minimum output volume level
WebRtc_Word32 zeroCtrlMax; // Remember max gain => don't amp low input int32_t zeroCtrlMax; // Remember max gain => don't amp low input
WebRtc_Word16 scale; // Scale factor for internal volume levels int16_t scale; // Scale factor for internal volume levels
#ifdef MIC_LEVEL_FEEDBACK #ifdef MIC_LEVEL_FEEDBACK
WebRtc_Word16 numBlocksMicLvlSat; int16_t numBlocksMicLvlSat;
WebRtc_UWord8 micLvlSat; uint8_t micLvlSat;
#endif #endif
// Structs for VAD and digital_agc // Structs for VAD and digital_agc
AgcVad_t vadMic; AgcVad_t vadMic;
@ -124,10 +124,10 @@ typedef struct
#ifdef AGC_DEBUG #ifdef AGC_DEBUG
FILE* fpt; FILE* fpt;
FILE* agcLog; FILE* agcLog;
WebRtc_Word32 fcount; int32_t fcount;
#endif #endif
WebRtc_Word16 lowLevelSignal; int16_t lowLevelSignal;
} Agc_t; } Agc_t;
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AGC_MAIN_SOURCE_ANALOG_AGC_H_ #endif // WEBRTC_MODULES_AUDIO_PROCESSING_AGC_MAIN_SOURCE_ANALOG_AGC_H_

View File

@ -37,7 +37,7 @@
// Generator table for y=log2(1+e^x) in Q8. // Generator table for y=log2(1+e^x) in Q8.
enum { kGenFuncTableSize = 128 }; enum { kGenFuncTableSize = 128 };
static const WebRtc_UWord16 kGenFuncTable[kGenFuncTableSize] = { static const uint16_t kGenFuncTable[kGenFuncTableSize] = {
256, 485, 786, 1126, 1484, 1849, 2217, 2586, 256, 485, 786, 1126, 1484, 1849, 2217, 2586,
2955, 3324, 3693, 4063, 4432, 4801, 5171, 5540, 2955, 3324, 3693, 4063, 4432, 4801, 5171, 5540,
5909, 6279, 6648, 7017, 7387, 7756, 8125, 8495, 5909, 6279, 6648, 7017, 7387, 7756, 8125, 8495,
@ -56,29 +56,29 @@ static const WebRtc_UWord16 kGenFuncTable[kGenFuncTableSize] = {
44320, 44689, 45058, 45428, 45797, 46166, 46536, 46905 44320, 44689, 45058, 45428, 45797, 46166, 46536, 46905
}; };
static const WebRtc_Word16 kAvgDecayTime = 250; // frames; < 3000 static const int16_t kAvgDecayTime = 250; // frames; < 3000
WebRtc_Word32 WebRtcAgc_CalculateGainTable(WebRtc_Word32 *gainTable, // Q16 int32_t WebRtcAgc_CalculateGainTable(int32_t *gainTable, // Q16
WebRtc_Word16 digCompGaindB, // Q0 int16_t digCompGaindB, // Q0
WebRtc_Word16 targetLevelDbfs,// Q0 int16_t targetLevelDbfs,// Q0
WebRtc_UWord8 limiterEnable, uint8_t limiterEnable,
WebRtc_Word16 analogTarget) // Q0 int16_t analogTarget) // Q0
{ {
// This function generates the compressor gain table used in the fixed digital part. // This function generates the compressor gain table used in the fixed digital part.
WebRtc_UWord32 tmpU32no1, tmpU32no2, absInLevel, logApprox; uint32_t tmpU32no1, tmpU32no2, absInLevel, logApprox;
WebRtc_Word32 inLevel, limiterLvl; int32_t inLevel, limiterLvl;
WebRtc_Word32 tmp32, tmp32no1, tmp32no2, numFIX, den, y32; int32_t tmp32, tmp32no1, tmp32no2, numFIX, den, y32;
const WebRtc_UWord16 kLog10 = 54426; // log2(10) in Q14 const uint16_t kLog10 = 54426; // log2(10) in Q14
const WebRtc_UWord16 kLog10_2 = 49321; // 10*log10(2) in Q14 const uint16_t kLog10_2 = 49321; // 10*log10(2) in Q14
const WebRtc_UWord16 kLogE_1 = 23637; // log2(e) in Q14 const uint16_t kLogE_1 = 23637; // log2(e) in Q14
WebRtc_UWord16 constMaxGain; uint16_t constMaxGain;
WebRtc_UWord16 tmpU16, intPart, fracPart; uint16_t tmpU16, intPart, fracPart;
const WebRtc_Word16 kCompRatio = 3; const int16_t kCompRatio = 3;
const WebRtc_Word16 kSoftLimiterLeft = 1; const int16_t kSoftLimiterLeft = 1;
WebRtc_Word16 limiterOffset = 0; // Limiter offset int16_t limiterOffset = 0; // Limiter offset
WebRtc_Word16 limiterIdx, limiterLvlX; int16_t limiterIdx, limiterLvlX;
WebRtc_Word16 constLinApprox, zeroGainLvl, maxGain, diffGain; int16_t constLinApprox, zeroGainLvl, maxGain, diffGain;
WebRtc_Word16 i, tmp16, tmp16no1; int16_t i, tmp16, tmp16no1;
int zeros, zerosScale; int zeros, zerosScale;
// Constants // Constants
@ -117,7 +117,7 @@ WebRtc_Word32 WebRtcAgc_CalculateGainTable(WebRtc_Word32 *gainTable, // Q16
// limiterLvl = targetLevelDbfs + limiterOffset/compRatio // limiterLvl = targetLevelDbfs + limiterOffset/compRatio
limiterLvlX = analogTarget - limiterOffset; limiterLvlX = analogTarget - limiterOffset;
limiterIdx = 2 limiterIdx = 2
+ WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)limiterLvlX, 13), + WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_LSHIFT_W32((int32_t)limiterLvlX, 13),
WEBRTC_SPL_RSHIFT_U16(kLog10_2, 1)); WEBRTC_SPL_RSHIFT_U16(kLog10_2, 1));
tmp16no1 = WebRtcSpl_DivW32W16ResW16(limiterOffset + (kCompRatio >> 1), kCompRatio); tmp16no1 = WebRtcSpl_DivW32W16ResW16(limiterOffset + (kCompRatio >> 1), kCompRatio);
limiterLvl = targetLevelDbfs + tmp16no1; limiterLvl = targetLevelDbfs + tmp16no1;
@ -139,22 +139,22 @@ WebRtc_Word32 WebRtcAgc_CalculateGainTable(WebRtc_Word32 *gainTable, // Q16
{ {
// Calculate scaled input level (compressor): // Calculate scaled input level (compressor):
// inLevel = fix((-constLog10_2*(compRatio-1)*(1-i)+fix(compRatio/2))/compRatio) // inLevel = fix((-constLog10_2*(compRatio-1)*(1-i)+fix(compRatio/2))/compRatio)
tmp16 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(kCompRatio - 1, i - 1); // Q0 tmp16 = (int16_t)WEBRTC_SPL_MUL_16_16(kCompRatio - 1, i - 1); // Q0
tmp32 = WEBRTC_SPL_MUL_16_U16(tmp16, kLog10_2) + 1; // Q14 tmp32 = WEBRTC_SPL_MUL_16_U16(tmp16, kLog10_2) + 1; // Q14
inLevel = WebRtcSpl_DivW32W16(tmp32, kCompRatio); // Q14 inLevel = WebRtcSpl_DivW32W16(tmp32, kCompRatio); // Q14
// Calculate diffGain-inLevel, to map using the genFuncTable // Calculate diffGain-inLevel, to map using the genFuncTable
inLevel = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)diffGain, 14) - inLevel; // Q14 inLevel = WEBRTC_SPL_LSHIFT_W32((int32_t)diffGain, 14) - inLevel; // Q14
// Make calculations on abs(inLevel) and compensate for the sign afterwards. // Make calculations on abs(inLevel) and compensate for the sign afterwards.
absInLevel = (WebRtc_UWord32)WEBRTC_SPL_ABS_W32(inLevel); // Q14 absInLevel = (uint32_t)WEBRTC_SPL_ABS_W32(inLevel); // Q14
// LUT with interpolation // LUT with interpolation
intPart = (WebRtc_UWord16)WEBRTC_SPL_RSHIFT_U32(absInLevel, 14); intPart = (uint16_t)WEBRTC_SPL_RSHIFT_U32(absInLevel, 14);
fracPart = (WebRtc_UWord16)(absInLevel & 0x00003FFF); // extract the fractional part fracPart = (uint16_t)(absInLevel & 0x00003FFF); // extract the fractional part
tmpU16 = kGenFuncTable[intPart + 1] - kGenFuncTable[intPart]; // Q8 tmpU16 = kGenFuncTable[intPart + 1] - kGenFuncTable[intPart]; // Q8
tmpU32no1 = WEBRTC_SPL_UMUL_16_16(tmpU16, fracPart); // Q22 tmpU32no1 = WEBRTC_SPL_UMUL_16_16(tmpU16, fracPart); // Q22
tmpU32no1 += WEBRTC_SPL_LSHIFT_U32((WebRtc_UWord32)kGenFuncTable[intPart], 14); // Q22 tmpU32no1 += WEBRTC_SPL_LSHIFT_U32((uint32_t)kGenFuncTable[intPart], 14); // Q22
logApprox = WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 8); // Q14 logApprox = WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 8); // Q14
// Compensate for negative exponent using the relation: // Compensate for negative exponent using the relation:
// log2(1 + 2^-x) = log2(1 + 2^x) - x // log2(1 + 2^-x) = log2(1 + 2^x) - x
@ -187,7 +187,7 @@ WebRtc_Word32 WebRtcAgc_CalculateGainTable(WebRtc_Word32 *gainTable, // Q16
} }
} }
numFIX = WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_U16(maxGain, constMaxGain), 6); // Q14 numFIX = WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_U16(maxGain, constMaxGain), 6); // Q14
numFIX -= WEBRTC_SPL_MUL_32_16((WebRtc_Word32)logApprox, diffGain); // Q14 numFIX -= WEBRTC_SPL_MUL_32_16((int32_t)logApprox, diffGain); // Q14
// Calculate ratio // Calculate ratio
// Shift |numFIX| as much as possible. // Shift |numFIX| as much as possible.
@ -231,8 +231,8 @@ WebRtc_Word32 WebRtcAgc_CalculateGainTable(WebRtc_Word32 *gainTable, // Q16
// Calculate power // Calculate power
if (tmp32 > 0) if (tmp32 > 0)
{ {
intPart = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 14); intPart = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32, 14);
fracPart = (WebRtc_UWord16)(tmp32 & 0x00003FFF); // in Q14 fracPart = (uint16_t)(tmp32 & 0x00003FFF); // in Q14
if (WEBRTC_SPL_RSHIFT_W32(fracPart, 13)) if (WEBRTC_SPL_RSHIFT_W32(fracPart, 13))
{ {
tmp16 = WEBRTC_SPL_LSHIFT_W16(2, 14) - constLinApprox; tmp16 = WEBRTC_SPL_LSHIFT_W16(2, 14) - constLinApprox;
@ -246,7 +246,7 @@ WebRtc_Word32 WebRtcAgc_CalculateGainTable(WebRtc_Word32 *gainTable, // Q16
tmp32no2 = WEBRTC_SPL_MUL_32_16(fracPart, tmp16); tmp32no2 = WEBRTC_SPL_MUL_32_16(fracPart, tmp16);
tmp32no2 = WEBRTC_SPL_RSHIFT_W32(tmp32no2, 13); tmp32no2 = WEBRTC_SPL_RSHIFT_W32(tmp32no2, 13);
} }
fracPart = (WebRtc_UWord16)tmp32no2; fracPart = (uint16_t)tmp32no2;
gainTable[i] = WEBRTC_SPL_LSHIFT_W32(1, intPart) gainTable[i] = WEBRTC_SPL_LSHIFT_W32(1, intPart)
+ WEBRTC_SPL_SHIFT_W32(fracPart, intPart - 14); + WEBRTC_SPL_SHIFT_W32(fracPart, intPart - 14);
} else } else
@ -258,7 +258,7 @@ WebRtc_Word32 WebRtcAgc_CalculateGainTable(WebRtc_Word32 *gainTable, // Q16
return 0; return 0;
} }
WebRtc_Word32 WebRtcAgc_InitDigital(DigitalAgc_t *stt, WebRtc_Word16 agcMode) int32_t WebRtcAgc_InitDigital(DigitalAgc_t *stt, int16_t agcMode)
{ {
if (agcMode == kAgcModeFixedDigital) if (agcMode == kAgcModeFixedDigital)
@ -268,7 +268,7 @@ WebRtc_Word32 WebRtcAgc_InitDigital(DigitalAgc_t *stt, WebRtc_Word16 agcMode)
} else } else
{ {
// start out with 0 dB gain // start out with 0 dB gain
stt->capacitorSlow = 134217728; // (WebRtc_Word32)(0.125f * 32768.0f * 32768.0f); stt->capacitorSlow = 134217728; // (int32_t)(0.125f * 32768.0f * 32768.0f);
} }
stt->capacitorFast = 0; stt->capacitorFast = 0;
stt->gain = 65536; stt->gain = 65536;
@ -285,8 +285,8 @@ WebRtc_Word32 WebRtcAgc_InitDigital(DigitalAgc_t *stt, WebRtc_Word16 agcMode)
return 0; return 0;
} }
WebRtc_Word32 WebRtcAgc_AddFarendToDigital(DigitalAgc_t *stt, const WebRtc_Word16 *in_far, int32_t WebRtcAgc_AddFarendToDigital(DigitalAgc_t *stt, const int16_t *in_far,
WebRtc_Word16 nrSamples) int16_t nrSamples)
{ {
// Check for valid pointer // Check for valid pointer
if (&stt->vadFarend == NULL) if (&stt->vadFarend == NULL)
@ -300,26 +300,26 @@ WebRtc_Word32 WebRtcAgc_AddFarendToDigital(DigitalAgc_t *stt, const WebRtc_Word1
return 0; return 0;
} }
WebRtc_Word32 WebRtcAgc_ProcessDigital(DigitalAgc_t *stt, const WebRtc_Word16 *in_near, int32_t WebRtcAgc_ProcessDigital(DigitalAgc_t *stt, const int16_t *in_near,
const WebRtc_Word16 *in_near_H, WebRtc_Word16 *out, const int16_t *in_near_H, int16_t *out,
WebRtc_Word16 *out_H, WebRtc_UWord32 FS, int16_t *out_H, uint32_t FS,
WebRtc_Word16 lowlevelSignal) int16_t lowlevelSignal)
{ {
// array for gains (one value per ms, incl start & end) // array for gains (one value per ms, incl start & end)
WebRtc_Word32 gains[11]; int32_t gains[11];
WebRtc_Word32 out_tmp, tmp32; int32_t out_tmp, tmp32;
WebRtc_Word32 env[10]; int32_t env[10];
WebRtc_Word32 nrg, max_nrg; int32_t nrg, max_nrg;
WebRtc_Word32 cur_level; int32_t cur_level;
WebRtc_Word32 gain32, delta; int32_t gain32, delta;
WebRtc_Word16 logratio; int16_t logratio;
WebRtc_Word16 lower_thr, upper_thr; int16_t lower_thr, upper_thr;
WebRtc_Word16 zeros, zeros_fast, frac; int16_t zeros, zeros_fast, frac;
WebRtc_Word16 decay; int16_t decay;
WebRtc_Word16 gate, gain_adj; int16_t gate, gain_adj;
WebRtc_Word16 k, n; int16_t k, n;
WebRtc_Word16 L, L2; // samples/subframe int16_t L, L2; // samples/subframe
// determine number of samples per ms // determine number of samples per ms
if (FS == 8000) if (FS == 8000)
@ -343,13 +343,13 @@ WebRtc_Word32 WebRtcAgc_ProcessDigital(DigitalAgc_t *stt, const WebRtc_Word16 *i
if (in_near != out) if (in_near != out)
{ {
// Only needed if they don't already point to the same place. // Only needed if they don't already point to the same place.
memcpy(out, in_near, 10 * L * sizeof(WebRtc_Word16)); memcpy(out, in_near, 10 * L * sizeof(int16_t));
} }
if (FS == 32000) if (FS == 32000)
{ {
if (in_near_H != out_H) if (in_near_H != out_H)
{ {
memcpy(out_H, in_near_H, 10 * L * sizeof(WebRtc_Word16)); memcpy(out_H, in_near_H, 10 * L * sizeof(int16_t));
} }
} }
// VAD for near end // VAD for near end
@ -359,7 +359,7 @@ WebRtc_Word32 WebRtcAgc_ProcessDigital(DigitalAgc_t *stt, const WebRtc_Word16 *i
if (stt->vadFarend.counter > 10) if (stt->vadFarend.counter > 10)
{ {
tmp32 = WEBRTC_SPL_MUL_16_16(3, logratio); tmp32 = WEBRTC_SPL_MUL_16_16(3, logratio);
logratio = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32 - stt->vadFarend.logRatio, 2); logratio = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32 - stt->vadFarend.logRatio, 2);
} }
// Determine decay factor depending on VAD // Determine decay factor depending on VAD
@ -376,11 +376,11 @@ WebRtc_Word32 WebRtcAgc_ProcessDigital(DigitalAgc_t *stt, const WebRtc_Word16 *i
decay = 0; decay = 0;
} else } else
{ {
// decay = (WebRtc_Word16)(((lower_thr - logratio) // decay = (int16_t)(((lower_thr - logratio)
// * (2^27/(DecayTime*(upper_thr-lower_thr)))) >> 10); // * (2^27/(DecayTime*(upper_thr-lower_thr)))) >> 10);
// SUBSTITUTED: 2^27/(DecayTime*(upper_thr-lower_thr)) -> 65 // SUBSTITUTED: 2^27/(DecayTime*(upper_thr-lower_thr)) -> 65
tmp32 = WEBRTC_SPL_MUL_16_16((lower_thr - logratio), 65); tmp32 = WEBRTC_SPL_MUL_16_16((lower_thr - logratio), 65);
decay = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 10); decay = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32, 10);
} }
// adjust decay factor for long silence (detected as low standard deviation) // adjust decay factor for long silence (detected as low standard deviation)
@ -392,9 +392,9 @@ WebRtc_Word32 WebRtcAgc_ProcessDigital(DigitalAgc_t *stt, const WebRtc_Word16 *i
decay = 0; decay = 0;
} else if (stt->vadNearend.stdLongTerm < 8096) } else if (stt->vadNearend.stdLongTerm < 8096)
{ {
// decay = (WebRtc_Word16)(((stt->vadNearend.stdLongTerm - 4000) * decay) >> 12); // decay = (int16_t)(((stt->vadNearend.stdLongTerm - 4000) * decay) >> 12);
tmp32 = WEBRTC_SPL_MUL_16_16((stt->vadNearend.stdLongTerm - 4000), decay); tmp32 = WEBRTC_SPL_MUL_16_16((stt->vadNearend.stdLongTerm - 4000), decay);
decay = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 12); decay = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32, 12);
} }
if (lowlevelSignal != 0) if (lowlevelSignal != 0)
@ -457,13 +457,13 @@ WebRtc_Word32 WebRtcAgc_ProcessDigital(DigitalAgc_t *stt, const WebRtc_Word16 *i
} }
// Translate signal level into gain, using a piecewise linear approximation // Translate signal level into gain, using a piecewise linear approximation
// find number of leading zeros // find number of leading zeros
zeros = WebRtcSpl_NormU32((WebRtc_UWord32)cur_level); zeros = WebRtcSpl_NormU32((uint32_t)cur_level);
if (cur_level == 0) if (cur_level == 0)
{ {
zeros = 31; zeros = 31;
} }
tmp32 = (WEBRTC_SPL_LSHIFT_W32(cur_level, zeros) & 0x7FFFFFFF); tmp32 = (WEBRTC_SPL_LSHIFT_W32(cur_level, zeros) & 0x7FFFFFFF);
frac = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 19); // Q12 frac = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32, 19); // Q12
tmp32 = WEBRTC_SPL_MUL((stt->gainTable[zeros-1] - stt->gainTable[zeros]), frac); tmp32 = WEBRTC_SPL_MUL((stt->gainTable[zeros-1] - stt->gainTable[zeros]), frac);
gains[k + 1] = stt->gainTable[zeros] + WEBRTC_SPL_RSHIFT_W32(tmp32, 12); gains[k + 1] = stt->gainTable[zeros] + WEBRTC_SPL_RSHIFT_W32(tmp32, 12);
#ifdef AGC_DEBUG #ifdef AGC_DEBUG
@ -477,14 +477,14 @@ WebRtc_Word32 WebRtcAgc_ProcessDigital(DigitalAgc_t *stt, const WebRtc_Word16 *i
// Gate processing (lower gain during absence of speech) // Gate processing (lower gain during absence of speech)
zeros = WEBRTC_SPL_LSHIFT_W16(zeros, 9) - WEBRTC_SPL_RSHIFT_W16(frac, 3); zeros = WEBRTC_SPL_LSHIFT_W16(zeros, 9) - WEBRTC_SPL_RSHIFT_W16(frac, 3);
// find number of leading zeros // find number of leading zeros
zeros_fast = WebRtcSpl_NormU32((WebRtc_UWord32)stt->capacitorFast); zeros_fast = WebRtcSpl_NormU32((uint32_t)stt->capacitorFast);
if (stt->capacitorFast == 0) if (stt->capacitorFast == 0)
{ {
zeros_fast = 31; zeros_fast = 31;
} }
tmp32 = (WEBRTC_SPL_LSHIFT_W32(stt->capacitorFast, zeros_fast) & 0x7FFFFFFF); tmp32 = (WEBRTC_SPL_LSHIFT_W32(stt->capacitorFast, zeros_fast) & 0x7FFFFFFF);
zeros_fast = WEBRTC_SPL_LSHIFT_W16(zeros_fast, 9); zeros_fast = WEBRTC_SPL_LSHIFT_W16(zeros_fast, 9);
zeros_fast -= (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 22); zeros_fast -= (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32, 22);
gate = 1000 + zeros_fast - zeros - stt->vadNearend.stdShortTerm; gate = 1000 + zeros_fast - zeros - stt->vadNearend.stdShortTerm;
@ -494,7 +494,7 @@ WebRtc_Word32 WebRtcAgc_ProcessDigital(DigitalAgc_t *stt, const WebRtc_Word16 *i
} else } else
{ {
tmp32 = WEBRTC_SPL_MUL_16_16(stt->gatePrevious, 7); tmp32 = WEBRTC_SPL_MUL_16_16(stt->gatePrevious, 7);
gate = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)gate + tmp32, 3); gate = (int16_t)WEBRTC_SPL_RSHIFT_W32((int32_t)gate + tmp32, 3);
stt->gatePrevious = gate; stt->gatePrevious = gate;
} }
// gate < 0 -> no gate // gate < 0 -> no gate
@ -537,7 +537,7 @@ WebRtc_Word32 WebRtcAgc_ProcessDigital(DigitalAgc_t *stt, const WebRtc_Word16 *i
gain32 = WEBRTC_SPL_MUL(gain32, gain32); gain32 = WEBRTC_SPL_MUL(gain32, gain32);
// check for overflow // check for overflow
while (AGC_MUL32(WEBRTC_SPL_RSHIFT_W32(env[k], 12) + 1, gain32) while (AGC_MUL32(WEBRTC_SPL_RSHIFT_W32(env[k], 12) + 1, gain32)
> WEBRTC_SPL_SHIFT_W32((WebRtc_Word32)32767, 2 * (1 - zeros + 10))) > WEBRTC_SPL_SHIFT_W32((int32_t)32767, 2 * (1 - zeros + 10)))
{ {
// multiply by 253/256 ==> -0.1 dB // multiply by 253/256 ==> -0.1 dB
if (gains[k + 1] > 8388607) if (gains[k + 1] > 8388607)
@ -571,36 +571,36 @@ WebRtc_Word32 WebRtcAgc_ProcessDigital(DigitalAgc_t *stt, const WebRtc_Word16 *i
for (n = 0; n < L; n++) for (n = 0; n < L; n++)
{ {
// For lower band // For lower band
tmp32 = WEBRTC_SPL_MUL((WebRtc_Word32)out[n], WEBRTC_SPL_RSHIFT_W32(gain32 + 127, 7)); tmp32 = WEBRTC_SPL_MUL((int32_t)out[n], WEBRTC_SPL_RSHIFT_W32(gain32 + 127, 7));
out_tmp = WEBRTC_SPL_RSHIFT_W32(tmp32 , 16); out_tmp = WEBRTC_SPL_RSHIFT_W32(tmp32 , 16);
if (out_tmp > 4095) if (out_tmp > 4095)
{ {
out[n] = (WebRtc_Word16)32767; out[n] = (int16_t)32767;
} else if (out_tmp < -4096) } else if (out_tmp < -4096)
{ {
out[n] = (WebRtc_Word16)-32768; out[n] = (int16_t)-32768;
} else } else
{ {
tmp32 = WEBRTC_SPL_MUL((WebRtc_Word32)out[n], WEBRTC_SPL_RSHIFT_W32(gain32, 4)); tmp32 = WEBRTC_SPL_MUL((int32_t)out[n], WEBRTC_SPL_RSHIFT_W32(gain32, 4));
out[n] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32 , 16); out[n] = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32 , 16);
} }
// For higher band // For higher band
if (FS == 32000) if (FS == 32000)
{ {
tmp32 = WEBRTC_SPL_MUL((WebRtc_Word32)out_H[n], tmp32 = WEBRTC_SPL_MUL((int32_t)out_H[n],
WEBRTC_SPL_RSHIFT_W32(gain32 + 127, 7)); WEBRTC_SPL_RSHIFT_W32(gain32 + 127, 7));
out_tmp = WEBRTC_SPL_RSHIFT_W32(tmp32 , 16); out_tmp = WEBRTC_SPL_RSHIFT_W32(tmp32 , 16);
if (out_tmp > 4095) if (out_tmp > 4095)
{ {
out_H[n] = (WebRtc_Word16)32767; out_H[n] = (int16_t)32767;
} else if (out_tmp < -4096) } else if (out_tmp < -4096)
{ {
out_H[n] = (WebRtc_Word16)-32768; out_H[n] = (int16_t)-32768;
} else } else
{ {
tmp32 = WEBRTC_SPL_MUL((WebRtc_Word32)out_H[n], tmp32 = WEBRTC_SPL_MUL((int32_t)out_H[n],
WEBRTC_SPL_RSHIFT_W32(gain32, 4)); WEBRTC_SPL_RSHIFT_W32(gain32, 4));
out_H[n] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32 , 16); out_H[n] = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32 , 16);
} }
} }
// //
@ -616,15 +616,15 @@ WebRtc_Word32 WebRtcAgc_ProcessDigital(DigitalAgc_t *stt, const WebRtc_Word16 *i
for (n = 0; n < L; n++) for (n = 0; n < L; n++)
{ {
// For lower band // For lower band
tmp32 = WEBRTC_SPL_MUL((WebRtc_Word32)out[k * L + n], tmp32 = WEBRTC_SPL_MUL((int32_t)out[k * L + n],
WEBRTC_SPL_RSHIFT_W32(gain32, 4)); WEBRTC_SPL_RSHIFT_W32(gain32, 4));
out[k * L + n] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32 , 16); out[k * L + n] = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32 , 16);
// For higher band // For higher band
if (FS == 32000) if (FS == 32000)
{ {
tmp32 = WEBRTC_SPL_MUL((WebRtc_Word32)out_H[k * L + n], tmp32 = WEBRTC_SPL_MUL((int32_t)out_H[k * L + n],
WEBRTC_SPL_RSHIFT_W32(gain32, 4)); WEBRTC_SPL_RSHIFT_W32(gain32, 4));
out_H[k * L + n] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32 , 16); out_H[k * L + n] = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32 , 16);
} }
gain32 += delta; gain32 += delta;
} }
@ -635,7 +635,7 @@ WebRtc_Word32 WebRtcAgc_ProcessDigital(DigitalAgc_t *stt, const WebRtc_Word16 *i
void WebRtcAgc_InitVad(AgcVad_t *state) void WebRtcAgc_InitVad(AgcVad_t *state)
{ {
WebRtc_Word16 k; int16_t k;
state->HPstate = 0; // state of high pass filter state->HPstate = 0; // state of high pass filter
state->logRatio = 0; // log( P(active) / P(inactive) ) state->logRatio = 0; // log( P(active) / P(inactive) )
@ -661,17 +661,17 @@ void WebRtcAgc_InitVad(AgcVad_t *state)
} }
} }
WebRtc_Word16 WebRtcAgc_ProcessVad(AgcVad_t *state, // (i) VAD state int16_t WebRtcAgc_ProcessVad(AgcVad_t *state, // (i) VAD state
const WebRtc_Word16 *in, // (i) Speech signal const int16_t *in, // (i) Speech signal
WebRtc_Word16 nrSamples) // (i) number of samples int16_t nrSamples) // (i) number of samples
{ {
WebRtc_Word32 out, nrg, tmp32, tmp32b; int32_t out, nrg, tmp32, tmp32b;
WebRtc_UWord16 tmpU16; uint16_t tmpU16;
WebRtc_Word16 k, subfr, tmp16; int16_t k, subfr, tmp16;
WebRtc_Word16 buf1[8]; int16_t buf1[8];
WebRtc_Word16 buf2[4]; int16_t buf2[4];
WebRtc_Word16 HPstate; int16_t HPstate;
WebRtc_Word16 zeros, dB; int16_t zeros, dB;
// process in 10 sub frames of 1 ms (to save on memory) // process in 10 sub frames of 1 ms (to save on memory)
nrg = 0; nrg = 0;
@ -683,9 +683,9 @@ WebRtc_Word16 WebRtcAgc_ProcessVad(AgcVad_t *state, // (i) VAD state
{ {
for (k = 0; k < 8; k++) for (k = 0; k < 8; k++)
{ {
tmp32 = (WebRtc_Word32)in[2 * k] + (WebRtc_Word32)in[2 * k + 1]; tmp32 = (int32_t)in[2 * k] + (int32_t)in[2 * k + 1];
tmp32 = WEBRTC_SPL_RSHIFT_W32(tmp32, 1); tmp32 = WEBRTC_SPL_RSHIFT_W32(tmp32, 1);
buf1[k] = (WebRtc_Word16)tmp32; buf1[k] = (int16_t)tmp32;
} }
in += 16; in += 16;
@ -701,7 +701,7 @@ WebRtc_Word16 WebRtcAgc_ProcessVad(AgcVad_t *state, // (i) VAD state
{ {
out = buf2[k] + HPstate; out = buf2[k] + HPstate;
tmp32 = WEBRTC_SPL_MUL(600, out); tmp32 = WEBRTC_SPL_MUL(600, out);
HPstate = (WebRtc_Word16)(WEBRTC_SPL_RSHIFT_W32(tmp32, 10) - buf2[k]); HPstate = (int16_t)(WEBRTC_SPL_RSHIFT_W32(tmp32, 10) - buf2[k]);
tmp32 = WEBRTC_SPL_MUL(out, out); tmp32 = WEBRTC_SPL_MUL(out, out);
nrg += WEBRTC_SPL_RSHIFT_W32(tmp32, 6); nrg += WEBRTC_SPL_RSHIFT_W32(tmp32, 6);
} }
@ -745,8 +745,8 @@ WebRtc_Word16 WebRtcAgc_ProcessVad(AgcVad_t *state, // (i) VAD state
} }
// update short-term estimate of mean energy level (Q10) // update short-term estimate of mean energy level (Q10)
tmp32 = (WEBRTC_SPL_MUL_16_16(state->meanShortTerm, 15) + (WebRtc_Word32)dB); tmp32 = (WEBRTC_SPL_MUL_16_16(state->meanShortTerm, 15) + (int32_t)dB);
state->meanShortTerm = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 4); state->meanShortTerm = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32, 4);
// update short-term estimate of variance in energy level (Q8) // update short-term estimate of variance in energy level (Q8)
tmp32 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(dB, dB), 12); tmp32 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(dB, dB), 12);
@ -756,10 +756,10 @@ WebRtc_Word16 WebRtcAgc_ProcessVad(AgcVad_t *state, // (i) VAD state
// update short-term estimate of standard deviation in energy level (Q10) // update short-term estimate of standard deviation in energy level (Q10)
tmp32 = WEBRTC_SPL_MUL_16_16(state->meanShortTerm, state->meanShortTerm); tmp32 = WEBRTC_SPL_MUL_16_16(state->meanShortTerm, state->meanShortTerm);
tmp32 = WEBRTC_SPL_LSHIFT_W32(state->varianceShortTerm, 12) - tmp32; tmp32 = WEBRTC_SPL_LSHIFT_W32(state->varianceShortTerm, 12) - tmp32;
state->stdShortTerm = (WebRtc_Word16)WebRtcSpl_Sqrt(tmp32); state->stdShortTerm = (int16_t)WebRtcSpl_Sqrt(tmp32);
// update long-term estimate of mean energy level (Q10) // update long-term estimate of mean energy level (Q10)
tmp32 = WEBRTC_SPL_MUL_16_16(state->meanLongTerm, state->counter) + (WebRtc_Word32)dB; tmp32 = WEBRTC_SPL_MUL_16_16(state->meanLongTerm, state->counter) + (int32_t)dB;
state->meanLongTerm = WebRtcSpl_DivW32W16ResW16(tmp32, state->meanLongTerm = WebRtcSpl_DivW32W16ResW16(tmp32,
WEBRTC_SPL_ADD_SAT_W16(state->counter, 1)); WEBRTC_SPL_ADD_SAT_W16(state->counter, 1));
@ -772,17 +772,17 @@ WebRtc_Word16 WebRtcAgc_ProcessVad(AgcVad_t *state, // (i) VAD state
// update long-term estimate of standard deviation in energy level (Q10) // update long-term estimate of standard deviation in energy level (Q10)
tmp32 = WEBRTC_SPL_MUL_16_16(state->meanLongTerm, state->meanLongTerm); tmp32 = WEBRTC_SPL_MUL_16_16(state->meanLongTerm, state->meanLongTerm);
tmp32 = WEBRTC_SPL_LSHIFT_W32(state->varianceLongTerm, 12) - tmp32; tmp32 = WEBRTC_SPL_LSHIFT_W32(state->varianceLongTerm, 12) - tmp32;
state->stdLongTerm = (WebRtc_Word16)WebRtcSpl_Sqrt(tmp32); state->stdLongTerm = (int16_t)WebRtcSpl_Sqrt(tmp32);
// update voice activity measure (Q10) // update voice activity measure (Q10)
tmp16 = WEBRTC_SPL_LSHIFT_W16(3, 12); tmp16 = WEBRTC_SPL_LSHIFT_W16(3, 12);
tmp32 = WEBRTC_SPL_MUL_16_16(tmp16, (dB - state->meanLongTerm)); tmp32 = WEBRTC_SPL_MUL_16_16(tmp16, (dB - state->meanLongTerm));
tmp32 = WebRtcSpl_DivW32W16(tmp32, state->stdLongTerm); tmp32 = WebRtcSpl_DivW32W16(tmp32, state->stdLongTerm);
tmpU16 = WEBRTC_SPL_LSHIFT_U16((WebRtc_UWord16)13, 12); tmpU16 = WEBRTC_SPL_LSHIFT_U16((uint16_t)13, 12);
tmp32b = WEBRTC_SPL_MUL_16_U16(state->logRatio, tmpU16); tmp32b = WEBRTC_SPL_MUL_16_U16(state->logRatio, tmpU16);
tmp32 += WEBRTC_SPL_RSHIFT_W32(tmp32b, 10); tmp32 += WEBRTC_SPL_RSHIFT_W32(tmp32b, 10);
state->logRatio = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 6); state->logRatio = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32, 6);
// limit // limit
if (state->logRatio > 2048) if (state->logRatio > 2048)

View File

@ -24,26 +24,26 @@
typedef struct typedef struct
{ {
WebRtc_Word32 downState[8]; int32_t downState[8];
WebRtc_Word16 HPstate; int16_t HPstate;
WebRtc_Word16 counter; int16_t counter;
WebRtc_Word16 logRatio; // log( P(active) / P(inactive) ) (Q10) int16_t logRatio; // log( P(active) / P(inactive) ) (Q10)
WebRtc_Word16 meanLongTerm; // Q10 int16_t meanLongTerm; // Q10
WebRtc_Word32 varianceLongTerm; // Q8 int32_t varianceLongTerm; // Q8
WebRtc_Word16 stdLongTerm; // Q10 int16_t stdLongTerm; // Q10
WebRtc_Word16 meanShortTerm; // Q10 int16_t meanShortTerm; // Q10
WebRtc_Word32 varianceShortTerm; // Q8 int32_t varianceShortTerm; // Q8
WebRtc_Word16 stdShortTerm; // Q10 int16_t stdShortTerm; // Q10
} AgcVad_t; // total = 54 bytes } AgcVad_t; // total = 54 bytes
typedef struct typedef struct
{ {
WebRtc_Word32 capacitorSlow; int32_t capacitorSlow;
WebRtc_Word32 capacitorFast; int32_t capacitorFast;
WebRtc_Word32 gain; int32_t gain;
WebRtc_Word32 gainTable[32]; int32_t gainTable[32];
WebRtc_Word16 gatePrevious; int16_t gatePrevious;
WebRtc_Word16 agcMode; int16_t agcMode;
AgcVad_t vadNearend; AgcVad_t vadNearend;
AgcVad_t vadFarend; AgcVad_t vadFarend;
#ifdef AGC_DEBUG #ifdef AGC_DEBUG
@ -52,25 +52,27 @@ typedef struct
#endif #endif
} DigitalAgc_t; } DigitalAgc_t;
WebRtc_Word32 WebRtcAgc_InitDigital(DigitalAgc_t *digitalAgcInst, WebRtc_Word16 agcMode); int32_t WebRtcAgc_InitDigital(DigitalAgc_t *digitalAgcInst, int16_t agcMode);
WebRtc_Word32 WebRtcAgc_ProcessDigital(DigitalAgc_t *digitalAgcInst, const WebRtc_Word16 *inNear, int32_t WebRtcAgc_ProcessDigital(DigitalAgc_t *digitalAgcInst,
const WebRtc_Word16 *inNear_H, WebRtc_Word16 *out, const int16_t *inNear, const int16_t *inNear_H,
WebRtc_Word16 *out_H, WebRtc_UWord32 FS, int16_t *out, int16_t *out_H, uint32_t FS,
WebRtc_Word16 lowLevelSignal); int16_t lowLevelSignal);
WebRtc_Word32 WebRtcAgc_AddFarendToDigital(DigitalAgc_t *digitalAgcInst, const WebRtc_Word16 *inFar, int32_t WebRtcAgc_AddFarendToDigital(DigitalAgc_t *digitalAgcInst,
WebRtc_Word16 nrSamples); const int16_t *inFar,
int16_t nrSamples);
void WebRtcAgc_InitVad(AgcVad_t *vadInst); void WebRtcAgc_InitVad(AgcVad_t *vadInst);
WebRtc_Word16 WebRtcAgc_ProcessVad(AgcVad_t *vadInst, // (i) VAD state int16_t WebRtcAgc_ProcessVad(AgcVad_t *vadInst, // (i) VAD state
const WebRtc_Word16 *in, // (i) Speech signal const int16_t *in, // (i) Speech signal
WebRtc_Word16 nrSamples); // (i) number of samples int16_t nrSamples); // (i) number of samples
WebRtc_Word32 WebRtcAgc_CalculateGainTable(WebRtc_Word32 *gainTable, // Q16 int32_t WebRtcAgc_CalculateGainTable(int32_t *gainTable, // Q16
WebRtc_Word16 compressionGaindB, // Q0 (in dB) int16_t compressionGaindB, // Q0 (in dB)
WebRtc_Word16 targetLevelDbfs,// Q0 (in dB) int16_t targetLevelDbfs,// Q0 (in dB)
WebRtc_UWord8 limiterEnable, WebRtc_Word16 analogTarget); uint8_t limiterEnable,
int16_t analogTarget);
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AGC_MAIN_SOURCE_ANALOG_AGC_H_ #endif // WEBRTC_MODULES_AUDIO_PROCESSING_AGC_MAIN_SOURCE_ANALOG_AGC_H_

View File

@ -39,9 +39,9 @@ enum
typedef struct typedef struct
{ {
WebRtc_Word16 targetLevelDbfs; // default 3 (-3 dBOv) int16_t targetLevelDbfs; // default 3 (-3 dBOv)
WebRtc_Word16 compressionGaindB; // default 9 dB int16_t compressionGaindB; // default 9 dB
WebRtc_UWord8 limiterEnable; // default kAgcTrue (on) uint8_t limiterEnable; // default kAgcTrue (on)
} WebRtcAgc_config_t; } WebRtcAgc_config_t;
#if defined(__cplusplus) #if defined(__cplusplus)
@ -65,8 +65,8 @@ extern "C"
* : -1 - Error * : -1 - Error
*/ */
int WebRtcAgc_AddFarend(void* agcInst, int WebRtcAgc_AddFarend(void* agcInst,
const WebRtc_Word16* inFar, const int16_t* inFar,
WebRtc_Word16 samples); int16_t samples);
/* /*
* This function processes a 10/20ms frame of microphone speech to determine * This function processes a 10/20ms frame of microphone speech to determine
@ -92,9 +92,9 @@ int WebRtcAgc_AddFarend(void* agcInst,
* : -1 - Error * : -1 - Error
*/ */
int WebRtcAgc_AddMic(void* agcInst, int WebRtcAgc_AddMic(void* agcInst,
WebRtc_Word16* inMic, int16_t* inMic,
WebRtc_Word16* inMic_H, int16_t* inMic_H,
WebRtc_Word16 samples); int16_t samples);
/* /*
* This function replaces the analog microphone with a virtual one. * This function replaces the analog microphone with a virtual one.
@ -123,11 +123,11 @@ int WebRtcAgc_AddMic(void* agcInst,
* : -1 - Error * : -1 - Error
*/ */
int WebRtcAgc_VirtualMic(void* agcInst, int WebRtcAgc_VirtualMic(void* agcInst,
WebRtc_Word16* inMic, int16_t* inMic,
WebRtc_Word16* inMic_H, int16_t* inMic_H,
WebRtc_Word16 samples, int16_t samples,
WebRtc_Word32 micLevelIn, int32_t micLevelIn,
WebRtc_Word32* micLevelOut); int32_t* micLevelOut);
/* /*
* This function processes a 10/20ms frame and adjusts (normalizes) the gain * This function processes a 10/20ms frame and adjusts (normalizes) the gain
@ -168,15 +168,15 @@ int WebRtcAgc_VirtualMic(void* agcInst,
* : -1 - Error * : -1 - Error
*/ */
int WebRtcAgc_Process(void* agcInst, int WebRtcAgc_Process(void* agcInst,
const WebRtc_Word16* inNear, const int16_t* inNear,
const WebRtc_Word16* inNear_H, const int16_t* inNear_H,
WebRtc_Word16 samples, int16_t samples,
WebRtc_Word16* out, int16_t* out,
WebRtc_Word16* out_H, int16_t* out_H,
WebRtc_Word32 inMicLevel, int32_t inMicLevel,
WebRtc_Word32* outMicLevel, int32_t* outMicLevel,
WebRtc_Word16 echo, int16_t echo,
WebRtc_UWord8* saturationWarning); uint8_t* saturationWarning);
/* /*
* This function sets the config parameters (targetLevelDbfs, * This function sets the config parameters (targetLevelDbfs,
@ -247,10 +247,10 @@ int WebRtcAgc_Free(void *agcInst);
* -1 - Error * -1 - Error
*/ */
int WebRtcAgc_Init(void *agcInst, int WebRtcAgc_Init(void *agcInst,
WebRtc_Word32 minLevel, int32_t minLevel,
WebRtc_Word32 maxLevel, int32_t maxLevel,
WebRtc_Word16 agcMode, int16_t agcMode,
WebRtc_UWord32 fs); uint32_t fs);
#if defined(__cplusplus) #if defined(__cplusplus)
} }

View File

@ -54,10 +54,10 @@ struct SplitAudioChannel {
int16_t low_pass_data[kSamplesPer16kHzChannel]; int16_t low_pass_data[kSamplesPer16kHzChannel];
int16_t high_pass_data[kSamplesPer16kHzChannel]; int16_t high_pass_data[kSamplesPer16kHzChannel];
WebRtc_Word32 analysis_filter_state1[6]; int32_t analysis_filter_state1[6];
WebRtc_Word32 analysis_filter_state2[6]; int32_t analysis_filter_state2[6];
WebRtc_Word32 synthesis_filter_state1[6]; int32_t synthesis_filter_state1[6];
WebRtc_Word32 synthesis_filter_state2[6]; int32_t synthesis_filter_state2[6];
}; };
// TODO(andrew): check range of input parameters? // TODO(andrew): check range of input parameters?
@ -142,22 +142,22 @@ int16_t* AudioBuffer::low_pass_reference(int channel) const {
return low_pass_reference_channels_[channel].data; return low_pass_reference_channels_[channel].data;
} }
WebRtc_Word32* AudioBuffer::analysis_filter_state1(int channel) const { int32_t* AudioBuffer::analysis_filter_state1(int channel) const {
assert(channel >= 0 && channel < num_channels_); assert(channel >= 0 && channel < num_channels_);
return split_channels_[channel].analysis_filter_state1; return split_channels_[channel].analysis_filter_state1;
} }
WebRtc_Word32* AudioBuffer::analysis_filter_state2(int channel) const { int32_t* AudioBuffer::analysis_filter_state2(int channel) const {
assert(channel >= 0 && channel < num_channels_); assert(channel >= 0 && channel < num_channels_);
return split_channels_[channel].analysis_filter_state2; return split_channels_[channel].analysis_filter_state2;
} }
WebRtc_Word32* AudioBuffer::synthesis_filter_state1(int channel) const { int32_t* AudioBuffer::synthesis_filter_state1(int channel) const {
assert(channel >= 0 && channel < num_channels_); assert(channel >= 0 && channel < num_channels_);
return split_channels_[channel].synthesis_filter_state1; return split_channels_[channel].synthesis_filter_state1;
} }
WebRtc_Word32* AudioBuffer::synthesis_filter_state2(int channel) const { int32_t* AudioBuffer::synthesis_filter_state2(int channel) const {
assert(channel >= 0 && channel < num_channels_); assert(channel >= 0 && channel < num_channels_);
return split_channels_[channel].synthesis_filter_state2; return split_channels_[channel].synthesis_filter_state2;
} }

View File

@ -575,7 +575,7 @@ VoiceDetection* AudioProcessingImpl::voice_detection() const {
return voice_detection_; return voice_detection_;
} }
WebRtc_Word32 AudioProcessingImpl::ChangeUniqueId(const WebRtc_Word32 id) { int32_t AudioProcessingImpl::ChangeUniqueId(const int32_t id) {
CriticalSectionScoped crit_scoped(crit_); CriticalSectionScoped crit_scoped(crit_);
id_ = id; id_ = id;

View File

@ -82,7 +82,7 @@ class AudioProcessingImpl : public AudioProcessing {
virtual VoiceDetection* voice_detection() const; virtual VoiceDetection* voice_detection() const;
// Module methods. // Module methods.
virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id); virtual int32_t ChangeUniqueId(const int32_t id);
private: private:
bool is_data_processed() const; bool is_data_processed() const;

View File

@ -24,7 +24,7 @@ namespace webrtc {
typedef void Handle; typedef void Handle;
namespace { namespace {
WebRtc_Word16 MapSetting(EchoCancellation::SuppressionLevel level) { int16_t MapSetting(EchoCancellation::SuppressionLevel level) {
switch (level) { switch (level) {
case EchoCancellation::kLowSuppression: case EchoCancellation::kLowSuppression:
return kAecNlpConservative; return kAecNlpConservative;
@ -86,7 +86,7 @@ int EchoCancellationImpl::ProcessRenderAudio(const AudioBuffer* audio) {
err = WebRtcAec_BufferFarend( err = WebRtcAec_BufferFarend(
my_handle, my_handle,
audio->low_pass_split_data(j), audio->low_pass_split_data(j),
static_cast<WebRtc_Word16>(audio->samples_per_split_channel())); static_cast<int16_t>(audio->samples_per_split_channel()));
if (err != apm_->kNoError) { if (err != apm_->kNoError) {
return GetHandleError(my_handle); // TODO(ajm): warning possible? return GetHandleError(my_handle); // TODO(ajm): warning possible?
@ -129,7 +129,7 @@ int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) {
audio->high_pass_split_data(i), audio->high_pass_split_data(i),
audio->low_pass_split_data(i), audio->low_pass_split_data(i),
audio->high_pass_split_data(i), audio->high_pass_split_data(i),
static_cast<WebRtc_Word16>(audio->samples_per_split_channel()), static_cast<int16_t>(audio->samples_per_split_channel()),
apm_->stream_delay_ms(), apm_->stream_delay_ms(),
stream_drift_samples_); stream_drift_samples_);

View File

@ -24,7 +24,7 @@ namespace webrtc {
typedef void Handle; typedef void Handle;
namespace { namespace {
WebRtc_Word16 MapSetting(EchoControlMobile::RoutingMode mode) { int16_t MapSetting(EchoControlMobile::RoutingMode mode) {
switch (mode) { switch (mode) {
case EchoControlMobile::kQuietEarpieceOrHeadset: case EchoControlMobile::kQuietEarpieceOrHeadset:
return 0; return 0;
@ -95,7 +95,7 @@ int EchoControlMobileImpl::ProcessRenderAudio(const AudioBuffer* audio) {
err = WebRtcAecm_BufferFarend( err = WebRtcAecm_BufferFarend(
my_handle, my_handle,
audio->low_pass_split_data(j), audio->low_pass_split_data(j),
static_cast<WebRtc_Word16>(audio->samples_per_split_channel())); static_cast<int16_t>(audio->samples_per_split_channel()));
if (err != apm_->kNoError) { if (err != apm_->kNoError) {
return GetHandleError(my_handle); // TODO(ajm): warning possible? return GetHandleError(my_handle); // TODO(ajm): warning possible?
@ -127,8 +127,8 @@ int EchoControlMobileImpl::ProcessCaptureAudio(AudioBuffer* audio) {
for (int i = 0; i < audio->num_channels(); i++) { for (int i = 0; i < audio->num_channels(); i++) {
// TODO(ajm): improve how this works, possibly inside AECM. // TODO(ajm): improve how this works, possibly inside AECM.
// This is kind of hacked up. // This is kind of hacked up.
WebRtc_Word16* noisy = audio->low_pass_reference(i); int16_t* noisy = audio->low_pass_reference(i);
WebRtc_Word16* clean = audio->low_pass_split_data(i); int16_t* clean = audio->low_pass_split_data(i);
if (noisy == NULL) { if (noisy == NULL) {
noisy = clean; noisy = clean;
clean = NULL; clean = NULL;
@ -140,7 +140,7 @@ int EchoControlMobileImpl::ProcessCaptureAudio(AudioBuffer* audio) {
noisy, noisy,
clean, clean,
audio->low_pass_split_data(i), audio->low_pass_split_data(i),
static_cast<WebRtc_Word16>(audio->samples_per_split_channel()), static_cast<int16_t>(audio->samples_per_split_channel()),
apm_->stream_delay_ms()); apm_->stream_delay_ms());
if (err != apm_->kNoError) { if (err != apm_->kNoError) {

View File

@ -23,7 +23,7 @@ namespace webrtc {
typedef void Handle; typedef void Handle;
namespace { namespace {
WebRtc_Word16 MapSetting(GainControl::Mode mode) { int16_t MapSetting(GainControl::Mode mode) {
switch (mode) { switch (mode) {
case GainControl::kAdaptiveAnalog: case GainControl::kAdaptiveAnalog:
return kAgcModeAdaptiveAnalog; return kAgcModeAdaptiveAnalog;
@ -59,7 +59,7 @@ int GainControlImpl::ProcessRenderAudio(AudioBuffer* audio) {
assert(audio->samples_per_split_channel() <= 160); assert(audio->samples_per_split_channel() <= 160);
WebRtc_Word16* mixed_data = audio->low_pass_split_data(0); int16_t* mixed_data = audio->low_pass_split_data(0);
if (audio->num_channels() > 1) { if (audio->num_channels() > 1) {
audio->CopyAndMixLowPass(1); audio->CopyAndMixLowPass(1);
mixed_data = audio->mixed_low_pass_data(0); mixed_data = audio->mixed_low_pass_data(0);
@ -70,7 +70,7 @@ int GainControlImpl::ProcessRenderAudio(AudioBuffer* audio) {
int err = WebRtcAgc_AddFarend( int err = WebRtcAgc_AddFarend(
my_handle, my_handle,
mixed_data, mixed_data,
static_cast<WebRtc_Word16>(audio->samples_per_split_channel())); static_cast<int16_t>(audio->samples_per_split_channel()));
if (err != apm_->kNoError) { if (err != apm_->kNoError) {
return GetHandleError(my_handle); return GetHandleError(my_handle);
@ -97,7 +97,7 @@ int GainControlImpl::AnalyzeCaptureAudio(AudioBuffer* audio) {
my_handle, my_handle,
audio->low_pass_split_data(i), audio->low_pass_split_data(i),
audio->high_pass_split_data(i), audio->high_pass_split_data(i),
static_cast<WebRtc_Word16>(audio->samples_per_split_channel())); static_cast<int16_t>(audio->samples_per_split_channel()));
if (err != apm_->kNoError) { if (err != apm_->kNoError) {
return GetHandleError(my_handle); return GetHandleError(my_handle);
@ -107,13 +107,13 @@ int GainControlImpl::AnalyzeCaptureAudio(AudioBuffer* audio) {
for (int i = 0; i < num_handles(); i++) { for (int i = 0; i < num_handles(); i++) {
Handle* my_handle = static_cast<Handle*>(handle(i)); Handle* my_handle = static_cast<Handle*>(handle(i));
WebRtc_Word32 capture_level_out = 0; int32_t capture_level_out = 0;
err = WebRtcAgc_VirtualMic( err = WebRtcAgc_VirtualMic(
my_handle, my_handle,
audio->low_pass_split_data(i), audio->low_pass_split_data(i),
audio->high_pass_split_data(i), audio->high_pass_split_data(i),
static_cast<WebRtc_Word16>(audio->samples_per_split_channel()), static_cast<int16_t>(audio->samples_per_split_channel()),
//capture_levels_[i], //capture_levels_[i],
analog_capture_level_, analog_capture_level_,
&capture_level_out); &capture_level_out);
@ -145,14 +145,14 @@ int GainControlImpl::ProcessCaptureAudio(AudioBuffer* audio) {
stream_is_saturated_ = false; stream_is_saturated_ = false;
for (int i = 0; i < num_handles(); i++) { for (int i = 0; i < num_handles(); i++) {
Handle* my_handle = static_cast<Handle*>(handle(i)); Handle* my_handle = static_cast<Handle*>(handle(i));
WebRtc_Word32 capture_level_out = 0; int32_t capture_level_out = 0;
WebRtc_UWord8 saturation_warning = 0; uint8_t saturation_warning = 0;
int err = WebRtcAgc_Process( int err = WebRtcAgc_Process(
my_handle, my_handle,
audio->low_pass_split_data(i), audio->low_pass_split_data(i),
audio->high_pass_split_data(i), audio->high_pass_split_data(i),
static_cast<WebRtc_Word16>(audio->samples_per_split_channel()), static_cast<int16_t>(audio->samples_per_split_channel()),
audio->low_pass_split_data(i), audio->low_pass_split_data(i),
audio->high_pass_split_data(i), audio->high_pass_split_data(i),
capture_levels_[i], capture_levels_[i],
@ -345,10 +345,10 @@ int GainControlImpl::ConfigureHandle(void* handle) const {
// TODO(ajm): Flip the sign here (since AGC expects a positive value) if we // TODO(ajm): Flip the sign here (since AGC expects a positive value) if we
// change the interface. // change the interface.
//assert(target_level_dbfs_ <= 0); //assert(target_level_dbfs_ <= 0);
//config.targetLevelDbfs = static_cast<WebRtc_Word16>(-target_level_dbfs_); //config.targetLevelDbfs = static_cast<int16_t>(-target_level_dbfs_);
config.targetLevelDbfs = static_cast<WebRtc_Word16>(target_level_dbfs_); config.targetLevelDbfs = static_cast<int16_t>(target_level_dbfs_);
config.compressionGaindB = config.compressionGaindB =
static_cast<WebRtc_Word16>(compression_gain_db_); static_cast<int16_t>(compression_gain_db_);
config.limiterEnable = limiter_enabled_; config.limiterEnable = limiter_enabled_;
return WebRtcAgc_set_config(static_cast<Handle*>(handle), config); return WebRtcAgc_set_config(static_cast<Handle*>(handle), config);

View File

@ -21,16 +21,16 @@
namespace webrtc { namespace webrtc {
namespace { namespace {
const WebRtc_Word16 kFilterCoefficients8kHz[5] = const int16_t kFilterCoefficients8kHz[5] =
{3798, -7596, 3798, 7807, -3733}; {3798, -7596, 3798, 7807, -3733};
const WebRtc_Word16 kFilterCoefficients[5] = const int16_t kFilterCoefficients[5] =
{4012, -8024, 4012, 8002, -3913}; {4012, -8024, 4012, 8002, -3913};
struct FilterState { struct FilterState {
WebRtc_Word16 y[4]; int16_t y[4];
WebRtc_Word16 x[2]; int16_t x[2];
const WebRtc_Word16* ba; const int16_t* ba;
}; };
int InitializeFilter(FilterState* hpf, int sample_rate_hz) { int InitializeFilter(FilterState* hpf, int sample_rate_hz) {
@ -48,13 +48,13 @@ int InitializeFilter(FilterState* hpf, int sample_rate_hz) {
return AudioProcessing::kNoError; return AudioProcessing::kNoError;
} }
int Filter(FilterState* hpf, WebRtc_Word16* data, int length) { int Filter(FilterState* hpf, int16_t* data, int length) {
assert(hpf != NULL); assert(hpf != NULL);
WebRtc_Word32 tmp_int32 = 0; int32_t tmp_int32 = 0;
WebRtc_Word16* y = hpf->y; int16_t* y = hpf->y;
WebRtc_Word16* x = hpf->x; int16_t* x = hpf->x;
const WebRtc_Word16* ba = hpf->ba; const int16_t* ba = hpf->ba;
for (int i = 0; i < length; i++) { for (int i = 0; i < length; i++) {
// y[i] = b[0] * x[i] + b[1] * x[i-1] + b[2] * x[i-2] // y[i] = b[0] * x[i] + b[1] * x[i-1] + b[2] * x[i-2]
@ -82,20 +82,20 @@ int Filter(FilterState* hpf, WebRtc_Word16* data, int length) {
// Update state (filtered part) // Update state (filtered part)
y[2] = y[0]; y[2] = y[0];
y[3] = y[1]; y[3] = y[1];
y[0] = static_cast<WebRtc_Word16>(tmp_int32 >> 13); y[0] = static_cast<int16_t>(tmp_int32 >> 13);
y[1] = static_cast<WebRtc_Word16>((tmp_int32 - y[1] = static_cast<int16_t>((tmp_int32 -
WEBRTC_SPL_LSHIFT_W32(static_cast<WebRtc_Word32>(y[0]), 13)) << 2); WEBRTC_SPL_LSHIFT_W32(static_cast<int32_t>(y[0]), 13)) << 2);
// Rounding in Q12, i.e. add 2^11 // Rounding in Q12, i.e. add 2^11
tmp_int32 += 2048; tmp_int32 += 2048;
// Saturate (to 2^27) so that the HP filtered signal does not overflow // Saturate (to 2^27) so that the HP filtered signal does not overflow
tmp_int32 = WEBRTC_SPL_SAT(static_cast<WebRtc_Word32>(134217727), tmp_int32 = WEBRTC_SPL_SAT(static_cast<int32_t>(134217727),
tmp_int32, tmp_int32,
static_cast<WebRtc_Word32>(-134217728)); static_cast<int32_t>(-134217728));
// Convert back to Q0 and use rounding // Convert back to Q0 and use rounding
data[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp_int32, 12); data[i] = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp_int32, 12);
} }

View File

@ -253,8 +253,8 @@ class AudioProcessing : public Module {
}; };
// Inherited from Module. // Inherited from Module.
virtual WebRtc_Word32 TimeUntilNextProcess() { return -1; } virtual int32_t TimeUntilNextProcess() { return -1; }
virtual WebRtc_Word32 Process() { return -1; } virtual int32_t Process() { return -1; }
}; };
// The acoustic echo cancellation (AEC) component provides better performance // The acoustic echo cancellation (AEC) component provides better performance

View File

@ -233,9 +233,9 @@ class MockAudioProcessing : public AudioProcessing {
return voice_detection_.get(); return voice_detection_.get();
}; };
MOCK_METHOD0(TimeUntilNextProcess, MOCK_METHOD0(TimeUntilNextProcess,
WebRtc_Word32()); int32_t());
MOCK_METHOD0(Process, MOCK_METHOD0(Process,
WebRtc_Word32()); int32_t());
private: private:
scoped_ptr<MockEchoCancellation> echo_cancellation_; scoped_ptr<MockEchoCancellation> echo_cancellation_;

View File

@ -62,7 +62,7 @@ int WebRtcNs_Free(NsHandle* NS_inst);
* Return value : 0 - Ok * Return value : 0 - Ok
* -1 - Error * -1 - Error
*/ */
int WebRtcNs_Init(NsHandle* NS_inst, WebRtc_UWord32 fs); int WebRtcNs_Init(NsHandle* NS_inst, uint32_t fs);
/* /*
* This changes the aggressiveness of the noise suppression method. * This changes the aggressiveness of the noise suppression method.

View File

@ -61,7 +61,7 @@ int WebRtcNsx_Free(NsxHandle* nsxInst);
* Return value : 0 - Ok * Return value : 0 - Ok
* -1 - Error * -1 - Error
*/ */
int WebRtcNsx_Init(NsxHandle* nsxInst, WebRtc_UWord32 fs); int WebRtcNsx_Init(NsxHandle* nsxInst, uint32_t fs);
/* /*
* This changes the aggressiveness of the noise suppression method. * This changes the aggressiveness of the noise suppression method.

View File

@ -34,7 +34,7 @@ int WebRtcNs_Free(NsHandle* NS_inst) {
} }
int WebRtcNs_Init(NsHandle* NS_inst, WebRtc_UWord32 fs) { int WebRtcNs_Init(NsHandle* NS_inst, uint32_t fs) {
return WebRtcNs_InitCore((NSinst_t*) NS_inst, fs); return WebRtcNs_InitCore((NSinst_t*) NS_inst, fs);
} }

View File

@ -37,7 +37,7 @@ int WebRtcNsx_Free(NsxHandle* nsxInst) {
return 0; return 0;
} }
int WebRtcNsx_Init(NsxHandle* nsxInst, WebRtc_UWord32 fs) { int WebRtcNsx_Init(NsxHandle* nsxInst, uint32_t fs) {
return WebRtcNsx_InitCore((NsxInst_t*)nsxInst, fs); return WebRtcNsx_InitCore((NsxInst_t*)nsxInst, fs);
} }

View File

@ -68,7 +68,7 @@ void WebRtcNs_set_feature_extraction_parameters(NSinst_t* inst) {
} }
// Initialize state // Initialize state
int WebRtcNs_InitCore(NSinst_t* inst, WebRtc_UWord32 fs) { int WebRtcNs_InitCore(NSinst_t* inst, uint32_t fs) {
int i; int i;
//We only support 10ms frames //We only support 10ms frames

View File

@ -50,7 +50,7 @@ typedef struct NSParaExtract_t_ {
typedef struct NSinst_t_ { typedef struct NSinst_t_ {
WebRtc_UWord32 fs; uint32_t fs;
int blockLen; int blockLen;
int blockLen10ms; int blockLen10ms;
int windShift; int windShift;
@ -80,7 +80,7 @@ typedef struct NSinst_t_ {
float wfft[W_LENGTH]; float wfft[W_LENGTH];
// parameters for new method: some not needed, will reduce/cleanup later // parameters for new method: some not needed, will reduce/cleanup later
WebRtc_Word32 blockInd; //frame index counter int32_t blockInd; //frame index counter
int modelUpdatePars[4]; //parameters for updating or estimating int modelUpdatePars[4]; //parameters for updating or estimating
// thresholds/weights for prior model // thresholds/weights for prior model
float priorModelPars[7]; //parameters for prior model float priorModelPars[7]; //parameters for prior model
@ -127,7 +127,7 @@ extern "C" {
* Return value : 0 - Ok * Return value : 0 - Ok
* -1 - Error * -1 - Error
*/ */
int WebRtcNs_InitCore(NSinst_t* inst, WebRtc_UWord32 fs); int WebRtcNs_InitCore(NSinst_t* inst, uint32_t fs);
/**************************************************************************** /****************************************************************************
* WebRtcNs_set_policy_core(...) * WebRtcNs_set_policy_core(...)

File diff suppressed because it is too large Load Diff

View File

@ -20,19 +20,19 @@
#include "webrtc/typedefs.h" #include "webrtc/typedefs.h"
typedef struct NsxInst_t_ { typedef struct NsxInst_t_ {
WebRtc_UWord32 fs; uint32_t fs;
const WebRtc_Word16* window; const int16_t* window;
WebRtc_Word16 analysisBuffer[ANAL_BLOCKL_MAX]; int16_t analysisBuffer[ANAL_BLOCKL_MAX];
WebRtc_Word16 synthesisBuffer[ANAL_BLOCKL_MAX]; int16_t synthesisBuffer[ANAL_BLOCKL_MAX];
WebRtc_UWord16 noiseSupFilter[HALF_ANAL_BLOCKL]; uint16_t noiseSupFilter[HALF_ANAL_BLOCKL];
WebRtc_UWord16 overdrive; /* Q8 */ uint16_t overdrive; /* Q8 */
WebRtc_UWord16 denoiseBound; /* Q14 */ uint16_t denoiseBound; /* Q14 */
const WebRtc_Word16* factor2Table; const int16_t* factor2Table;
WebRtc_Word16 noiseEstLogQuantile[SIMULT* HALF_ANAL_BLOCKL]; int16_t noiseEstLogQuantile[SIMULT* HALF_ANAL_BLOCKL];
WebRtc_Word16 noiseEstDensity[SIMULT* HALF_ANAL_BLOCKL]; int16_t noiseEstDensity[SIMULT* HALF_ANAL_BLOCKL];
WebRtc_Word16 noiseEstCounter[SIMULT]; int16_t noiseEstCounter[SIMULT];
WebRtc_Word16 noiseEstQuantile[HALF_ANAL_BLOCKL]; int16_t noiseEstQuantile[HALF_ANAL_BLOCKL];
int anaLen; int anaLen;
int anaLen2; int anaLen2;
@ -42,45 +42,45 @@ typedef struct NsxInst_t_ {
int initFlag; int initFlag;
int gainMap; int gainMap;
WebRtc_Word32 maxLrt; int32_t maxLrt;
WebRtc_Word32 minLrt; int32_t minLrt;
// Log LRT factor with time-smoothing in Q8. // Log LRT factor with time-smoothing in Q8.
WebRtc_Word32 logLrtTimeAvgW32[HALF_ANAL_BLOCKL]; int32_t logLrtTimeAvgW32[HALF_ANAL_BLOCKL];
WebRtc_Word32 featureLogLrt; int32_t featureLogLrt;
WebRtc_Word32 thresholdLogLrt; int32_t thresholdLogLrt;
WebRtc_Word16 weightLogLrt; int16_t weightLogLrt;
WebRtc_UWord32 featureSpecDiff; uint32_t featureSpecDiff;
WebRtc_UWord32 thresholdSpecDiff; uint32_t thresholdSpecDiff;
WebRtc_Word16 weightSpecDiff; int16_t weightSpecDiff;
WebRtc_UWord32 featureSpecFlat; uint32_t featureSpecFlat;
WebRtc_UWord32 thresholdSpecFlat; uint32_t thresholdSpecFlat;
WebRtc_Word16 weightSpecFlat; int16_t weightSpecFlat;
// Conservative estimate of noise spectrum. // Conservative estimate of noise spectrum.
WebRtc_Word32 avgMagnPause[HALF_ANAL_BLOCKL]; int32_t avgMagnPause[HALF_ANAL_BLOCKL];
WebRtc_UWord32 magnEnergy; uint32_t magnEnergy;
WebRtc_UWord32 sumMagn; uint32_t sumMagn;
WebRtc_UWord32 curAvgMagnEnergy; uint32_t curAvgMagnEnergy;
WebRtc_UWord32 timeAvgMagnEnergy; uint32_t timeAvgMagnEnergy;
WebRtc_UWord32 timeAvgMagnEnergyTmp; uint32_t timeAvgMagnEnergyTmp;
WebRtc_UWord32 whiteNoiseLevel; // Initial noise estimate. uint32_t whiteNoiseLevel; // Initial noise estimate.
// Initial magnitude spectrum estimate. // Initial magnitude spectrum estimate.
WebRtc_UWord32 initMagnEst[HALF_ANAL_BLOCKL]; uint32_t initMagnEst[HALF_ANAL_BLOCKL];
// Pink noise parameters: // Pink noise parameters:
WebRtc_Word32 pinkNoiseNumerator; // Numerator. int32_t pinkNoiseNumerator; // Numerator.
WebRtc_Word32 pinkNoiseExp; // Power of freq. int32_t pinkNoiseExp; // Power of freq.
int minNorm; // Smallest normalization factor. int minNorm; // Smallest normalization factor.
int zeroInputSignal; // Zero input signal flag. int zeroInputSignal; // Zero input signal flag.
// Noise spectrum from previous frame. // Noise spectrum from previous frame.
WebRtc_UWord32 prevNoiseU32[HALF_ANAL_BLOCKL]; uint32_t prevNoiseU32[HALF_ANAL_BLOCKL];
// Magnitude spectrum from previous frame. // Magnitude spectrum from previous frame.
WebRtc_UWord16 prevMagnU16[HALF_ANAL_BLOCKL]; uint16_t prevMagnU16[HALF_ANAL_BLOCKL];
// Prior speech/noise probability in Q14. // Prior speech/noise probability in Q14.
WebRtc_Word16 priorNonSpeechProb; int16_t priorNonSpeechProb;
int blockIndex; // Frame index counter. int blockIndex; // Frame index counter.
// Parameter for updating or estimating thresholds/weights for prior model. // Parameter for updating or estimating thresholds/weights for prior model.
@ -88,21 +88,21 @@ typedef struct NsxInst_t_ {
int cntThresUpdate; int cntThresUpdate;
// Histograms for parameter estimation. // Histograms for parameter estimation.
WebRtc_Word16 histLrt[HIST_PAR_EST]; int16_t histLrt[HIST_PAR_EST];
WebRtc_Word16 histSpecFlat[HIST_PAR_EST]; int16_t histSpecFlat[HIST_PAR_EST];
WebRtc_Word16 histSpecDiff[HIST_PAR_EST]; int16_t histSpecDiff[HIST_PAR_EST];
// Quantities for high band estimate. // Quantities for high band estimate.
WebRtc_Word16 dataBufHBFX[ANAL_BLOCKL_MAX]; // Q0 int16_t dataBufHBFX[ANAL_BLOCKL_MAX]; // Q0
int qNoise; int qNoise;
int prevQNoise; int prevQNoise;
int prevQMagn; int prevQMagn;
int blockLen10ms; int blockLen10ms;
WebRtc_Word16 real[ANAL_BLOCKL_MAX]; int16_t real[ANAL_BLOCKL_MAX];
WebRtc_Word16 imag[ANAL_BLOCKL_MAX]; int16_t imag[ANAL_BLOCKL_MAX];
WebRtc_Word32 energyIn; int32_t energyIn;
int scaleEnergyIn; int scaleEnergyIn;
int normData; int normData;
@ -129,7 +129,7 @@ extern "C"
* Return value : 0 - Ok * Return value : 0 - Ok
* -1 - Error * -1 - Error
*/ */
WebRtc_Word32 WebRtcNsx_InitCore(NsxInst_t* inst, WebRtc_UWord32 fs); int32_t WebRtcNsx_InitCore(NsxInst_t* inst, uint32_t fs);
/**************************************************************************** /****************************************************************************
* WebRtcNsx_set_policy_core(...) * WebRtcNsx_set_policy_core(...)

View File

@ -14,11 +14,11 @@
#include <assert.h> #include <assert.h>
// Constants to compensate for shifting signal log(2^shifts). // Constants to compensate for shifting signal log(2^shifts).
const WebRtc_Word16 WebRtcNsx_kLogTable[9] = { const int16_t WebRtcNsx_kLogTable[9] = {
0, 177, 355, 532, 710, 887, 1065, 1242, 1420 0, 177, 355, 532, 710, 887, 1065, 1242, 1420
}; };
const WebRtc_Word16 WebRtcNsx_kCounterDiv[201] = { const int16_t WebRtcNsx_kCounterDiv[201] = {
32767, 16384, 10923, 8192, 6554, 5461, 4681, 4096, 3641, 3277, 2979, 2731, 32767, 16384, 10923, 8192, 6554, 5461, 4681, 4096, 3641, 3277, 2979, 2731,
2521, 2341, 2185, 2048, 1928, 1820, 1725, 1638, 1560, 1489, 1425, 1365, 1311, 2521, 2341, 2185, 2048, 1928, 1820, 1725, 1638, 1560, 1489, 1425, 1365, 1311,
1260, 1214, 1170, 1130, 1092, 1057, 1024, 993, 964, 936, 910, 886, 862, 840, 1260, 1214, 1170, 1130, 1092, 1057, 1024, 993, 964, 936, 910, 886, 862, 840,
@ -35,7 +35,7 @@ const WebRtc_Word16 WebRtcNsx_kCounterDiv[201] = {
172, 172, 171, 170, 169, 168, 167, 166, 165, 165, 164, 163 172, 172, 171, 170, 169, 168, 167, 166, 165, 165, 164, 163
}; };
const WebRtc_Word16 WebRtcNsx_kLogTableFrac[256] = { const int16_t WebRtcNsx_kLogTableFrac[256] = {
0, 1, 3, 4, 6, 7, 9, 10, 11, 13, 14, 16, 17, 18, 20, 21, 0, 1, 3, 4, 6, 7, 9, 10, 11, 13, 14, 16, 17, 18, 20, 21,
22, 24, 25, 26, 28, 29, 30, 32, 33, 34, 36, 37, 38, 40, 41, 42, 22, 24, 25, 26, 28, 29, 30, 32, 33, 34, 36, 37, 38, 40, 41, 42,
44, 45, 46, 47, 49, 50, 51, 52, 54, 55, 56, 57, 59, 60, 61, 62, 44, 45, 46, 47, 49, 50, 51, 52, 54, 55, 56, 57, 59, 60, 61, 62,

View File

@ -13,20 +13,20 @@
namespace webrtc { namespace webrtc {
void SplittingFilterAnalysis(const WebRtc_Word16* in_data, void SplittingFilterAnalysis(const int16_t* in_data,
WebRtc_Word16* low_band, int16_t* low_band,
WebRtc_Word16* high_band, int16_t* high_band,
WebRtc_Word32* filter_state1, int32_t* filter_state1,
WebRtc_Word32* filter_state2) int32_t* filter_state2)
{ {
WebRtcSpl_AnalysisQMF(in_data, low_band, high_band, filter_state1, filter_state2); WebRtcSpl_AnalysisQMF(in_data, low_band, high_band, filter_state1, filter_state2);
} }
void SplittingFilterSynthesis(const WebRtc_Word16* low_band, void SplittingFilterSynthesis(const int16_t* low_band,
const WebRtc_Word16* high_band, const int16_t* high_band,
WebRtc_Word16* out_data, int16_t* out_data,
WebRtc_Word32* filt_state1, int32_t* filt_state1,
WebRtc_Word32* filt_state2) int32_t* filt_state2)
{ {
WebRtcSpl_SynthesisQMF(low_band, high_band, out_data, filt_state1, filt_state2); WebRtcSpl_SynthesisQMF(low_band, high_band, out_data, filt_state1, filt_state2);
} }

View File

@ -31,11 +31,11 @@ namespace webrtc {
* - low_band : The signal from the 0-4 kHz band * - low_band : The signal from the 0-4 kHz band
* - high_band : The signal from the 4-8 kHz band * - high_band : The signal from the 4-8 kHz band
*/ */
void SplittingFilterAnalysis(const WebRtc_Word16* in_data, void SplittingFilterAnalysis(const int16_t* in_data,
WebRtc_Word16* low_band, int16_t* low_band,
WebRtc_Word16* high_band, int16_t* high_band,
WebRtc_Word32* filt_state1, int32_t* filt_state1,
WebRtc_Word32* filt_state2); int32_t* filt_state2);
/* /*
* SplittingFilterbank_synthesisQMF(...) * SplittingFilterbank_synthesisQMF(...)
@ -53,11 +53,11 @@ void SplittingFilterAnalysis(const WebRtc_Word16* in_data,
* Output: * Output:
* - out_data : super-wb speech signal * - out_data : super-wb speech signal
*/ */
void SplittingFilterSynthesis(const WebRtc_Word16* low_band, void SplittingFilterSynthesis(const int16_t* low_band,
const WebRtc_Word16* high_band, const int16_t* high_band,
WebRtc_Word16* out_data, int16_t* out_data,
WebRtc_Word32* filt_state1, int32_t* filt_state1,
WebRtc_Word32* filt_state2); int32_t* filt_state2);
} // namespace webrtc } // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_SPLITTING_FILTER_H_ #endif // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_SPLITTING_FILTER_H_

View File

@ -574,10 +574,10 @@ void void_main(int argc, char* argv[]) {
TickTime t0 = TickTime::Now(); TickTime t0 = TickTime::Now();
TickTime t1 = t0; TickTime t1 = t0;
WebRtc_Word64 max_time_us = 0; int64_t max_time_us = 0;
WebRtc_Word64 max_time_reverse_us = 0; int64_t max_time_reverse_us = 0;
WebRtc_Word64 min_time_us = 1e6; int64_t min_time_us = 1e6;
WebRtc_Word64 min_time_reverse_us = 1e6; int64_t min_time_reverse_us = 1e6;
// TODO(ajm): Ideally we would refactor this block into separate functions, // TODO(ajm): Ideally we would refactor this block into separate functions,
// but for now we want to share the variables. // but for now we want to share the variables.
@ -1015,7 +1015,7 @@ void void_main(int argc, char* argv[]) {
if (perf_testing) { if (perf_testing) {
if (primary_count > 0) { if (primary_count > 0) {
WebRtc_Word64 exec_time = acc_ticks.Milliseconds(); int64_t exec_time = acc_ticks.Milliseconds();
printf("\nTotal time: %.3f s, file time: %.2f s\n", printf("\nTotal time: %.3f s, file time: %.2f s\n",
exec_time * 0.001, primary_count * 0.01); exec_time * 0.001, primary_count * 0.01);
printf("Time per frame: %.3f ms (average), %.3f ms (max)," printf("Time per frame: %.3f ms (average), %.3f ms (max),"

View File

@ -61,7 +61,7 @@ int VoiceDetectionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
} }
assert(audio->samples_per_split_channel() <= 160); assert(audio->samples_per_split_channel() <= 160);
WebRtc_Word16* mixed_data = audio->low_pass_split_data(0); int16_t* mixed_data = audio->low_pass_split_data(0);
if (audio->num_channels() > 1) { if (audio->num_channels() > 1) {
audio->CopyAndMixLowPass(1); audio->CopyAndMixLowPass(1);
mixed_data = audio->mixed_low_pass_data(0); mixed_data = audio->mixed_low_pass_data(0);