Revert "Audio processing: Feed each processing step its choice of int or float data"
This reverts r6138. tbr=kwiberg@webrtc.org Review URL: https://webrtc-codereview.appspot.com/13509004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@6142 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
db60434b31
commit
b1a66d166c
Binary file not shown.
@ -116,7 +116,7 @@ extern int webrtc_aec_instance_count;
|
|||||||
// "Private" function prototypes.
|
// "Private" function prototypes.
|
||||||
static void ProcessBlock(AecCore* aec);
|
static void ProcessBlock(AecCore* aec);
|
||||||
|
|
||||||
static void NonLinearProcessing(AecCore* aec, float* output, float* outputH);
|
static void NonLinearProcessing(AecCore* aec, short* output, short* outputH);
|
||||||
|
|
||||||
static void GetHighbandGain(const float* lambda, float* nlpGainHband);
|
static void GetHighbandGain(const float* lambda, float* nlpGainHband);
|
||||||
|
|
||||||
@ -160,28 +160,28 @@ int WebRtcAec_CreateAec(AecCore** aecInst) {
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
aec->nearFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(float));
|
aec->nearFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t));
|
||||||
if (!aec->nearFrBuf) {
|
if (!aec->nearFrBuf) {
|
||||||
WebRtcAec_FreeAec(aec);
|
WebRtcAec_FreeAec(aec);
|
||||||
aec = NULL;
|
aec = NULL;
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
aec->outFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(float));
|
aec->outFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t));
|
||||||
if (!aec->outFrBuf) {
|
if (!aec->outFrBuf) {
|
||||||
WebRtcAec_FreeAec(aec);
|
WebRtcAec_FreeAec(aec);
|
||||||
aec = NULL;
|
aec = NULL;
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
aec->nearFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(float));
|
aec->nearFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t));
|
||||||
if (!aec->nearFrBufH) {
|
if (!aec->nearFrBufH) {
|
||||||
WebRtcAec_FreeAec(aec);
|
WebRtcAec_FreeAec(aec);
|
||||||
aec = NULL;
|
aec = NULL;
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
aec->outFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(float));
|
aec->outFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t));
|
||||||
if (!aec->outFrBufH) {
|
if (!aec->outFrBufH) {
|
||||||
WebRtcAec_FreeAec(aec);
|
WebRtcAec_FreeAec(aec);
|
||||||
aec = NULL;
|
aec = NULL;
|
||||||
@ -617,11 +617,11 @@ int WebRtcAec_MoveFarReadPtr(AecCore* aec, int elements) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void WebRtcAec_ProcessFrame(AecCore* aec,
|
void WebRtcAec_ProcessFrame(AecCore* aec,
|
||||||
const float* nearend,
|
const short* nearend,
|
||||||
const float* nearendH,
|
const short* nearendH,
|
||||||
int knownDelay,
|
int knownDelay,
|
||||||
float* out,
|
int16_t* out,
|
||||||
float* outH) {
|
int16_t* outH) {
|
||||||
int out_elements = 0;
|
int out_elements = 0;
|
||||||
|
|
||||||
// For each frame the process is as follows:
|
// For each frame the process is as follows:
|
||||||
@ -814,7 +814,7 @@ void WebRtcAec_SetSystemDelay(AecCore* self, int delay) {
|
|||||||
|
|
||||||
static void ProcessBlock(AecCore* aec) {
|
static void ProcessBlock(AecCore* aec) {
|
||||||
int i;
|
int i;
|
||||||
float y[PART_LEN], e[PART_LEN];
|
float d[PART_LEN], y[PART_LEN], e[PART_LEN], dH[PART_LEN];
|
||||||
float scale;
|
float scale;
|
||||||
|
|
||||||
float fft[PART_LEN2];
|
float fft[PART_LEN2];
|
||||||
@ -833,22 +833,30 @@ static void ProcessBlock(AecCore* aec) {
|
|||||||
const float ramp = 1.0002f;
|
const float ramp = 1.0002f;
|
||||||
const float gInitNoise[2] = {0.999f, 0.001f};
|
const float gInitNoise[2] = {0.999f, 0.001f};
|
||||||
|
|
||||||
float nearend[PART_LEN];
|
int16_t nearend[PART_LEN];
|
||||||
float* nearend_ptr = NULL;
|
int16_t* nearend_ptr = NULL;
|
||||||
float output[PART_LEN];
|
int16_t output[PART_LEN];
|
||||||
float outputH[PART_LEN];
|
int16_t outputH[PART_LEN];
|
||||||
|
|
||||||
float* xf_ptr = NULL;
|
float* xf_ptr = NULL;
|
||||||
|
|
||||||
// Concatenate old and new nearend blocks.
|
memset(dH, 0, sizeof(dH));
|
||||||
if (aec->sampFreq == 32000) {
|
if (aec->sampFreq == 32000) {
|
||||||
|
// Get the upper band first so we can reuse |nearend|.
|
||||||
WebRtc_ReadBuffer(aec->nearFrBufH, (void**)&nearend_ptr, nearend, PART_LEN);
|
WebRtc_ReadBuffer(aec->nearFrBufH, (void**)&nearend_ptr, nearend, PART_LEN);
|
||||||
memcpy(aec->dBufH + PART_LEN, nearend_ptr, sizeof(nearend));
|
for (i = 0; i < PART_LEN; i++) {
|
||||||
|
dH[i] = (float)(nearend_ptr[i]);
|
||||||
|
}
|
||||||
|
memcpy(aec->dBufH + PART_LEN, dH, sizeof(float) * PART_LEN);
|
||||||
}
|
}
|
||||||
WebRtc_ReadBuffer(aec->nearFrBuf, (void**)&nearend_ptr, nearend, PART_LEN);
|
WebRtc_ReadBuffer(aec->nearFrBuf, (void**)&nearend_ptr, nearend, PART_LEN);
|
||||||
memcpy(aec->dBuf + PART_LEN, nearend_ptr, sizeof(nearend));
|
|
||||||
|
|
||||||
// ---------- Ooura fft ----------
|
// ---------- Ooura fft ----------
|
||||||
|
// Concatenate old and new nearend blocks.
|
||||||
|
for (i = 0; i < PART_LEN; i++) {
|
||||||
|
d[i] = (float)(nearend_ptr[i]);
|
||||||
|
}
|
||||||
|
memcpy(aec->dBuf + PART_LEN, d, sizeof(float) * PART_LEN);
|
||||||
|
|
||||||
#ifdef WEBRTC_AEC_DEBUG_DUMP
|
#ifdef WEBRTC_AEC_DEBUG_DUMP
|
||||||
{
|
{
|
||||||
@ -960,7 +968,7 @@ static void ProcessBlock(AecCore* aec) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (i = 0; i < PART_LEN; i++) {
|
for (i = 0; i < PART_LEN; i++) {
|
||||||
e[i] = nearend_ptr[i] - y[i];
|
e[i] = d[i] - y[i];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Error fft
|
// Error fft
|
||||||
@ -1019,7 +1027,7 @@ static void ProcessBlock(AecCore* aec) {
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void NonLinearProcessing(AecCore* aec, float* output, float* outputH) {
|
static void NonLinearProcessing(AecCore* aec, short* output, short* outputH) {
|
||||||
float efw[2][PART_LEN1], dfw[2][PART_LEN1], xfw[2][PART_LEN1];
|
float efw[2][PART_LEN1], dfw[2][PART_LEN1], xfw[2][PART_LEN1];
|
||||||
complex_t comfortNoiseHband[PART_LEN1];
|
complex_t comfortNoiseHband[PART_LEN1];
|
||||||
float fft[PART_LEN2];
|
float fft[PART_LEN2];
|
||||||
@ -1313,10 +1321,13 @@ static void NonLinearProcessing(AecCore* aec, float* output, float* outputH) {
|
|||||||
fft[i] *= scale; // fft scaling
|
fft[i] *= scale; // fft scaling
|
||||||
fft[i] = fft[i] * sqrtHanning[i] + aec->outBuf[i];
|
fft[i] = fft[i] * sqrtHanning[i] + aec->outBuf[i];
|
||||||
|
|
||||||
|
// Saturation protection
|
||||||
|
output[i] = (short)WEBRTC_SPL_SAT(
|
||||||
|
WEBRTC_SPL_WORD16_MAX, fft[i], WEBRTC_SPL_WORD16_MIN);
|
||||||
|
|
||||||
fft[PART_LEN + i] *= scale; // fft scaling
|
fft[PART_LEN + i] *= scale; // fft scaling
|
||||||
aec->outBuf[i] = fft[PART_LEN + i] * sqrtHanning[PART_LEN - i];
|
aec->outBuf[i] = fft[PART_LEN + i] * sqrtHanning[PART_LEN - i];
|
||||||
}
|
}
|
||||||
memcpy(output, fft, sizeof(*output) * PART_LEN);
|
|
||||||
|
|
||||||
// For H band
|
// For H band
|
||||||
if (aec->sampFreq == 32000) {
|
if (aec->sampFreq == 32000) {
|
||||||
@ -1340,8 +1351,8 @@ static void NonLinearProcessing(AecCore* aec, float* output, float* outputH) {
|
|||||||
|
|
||||||
// compute gain factor
|
// compute gain factor
|
||||||
for (i = 0; i < PART_LEN; i++) {
|
for (i = 0; i < PART_LEN; i++) {
|
||||||
dtmp = aec->dBufH[i];
|
dtmp = (float)aec->dBufH[i];
|
||||||
dtmp = dtmp * nlpGainHband; // for variable gain
|
dtmp = (float)dtmp * nlpGainHband; // for variable gain
|
||||||
|
|
||||||
// add some comfort noise where Hband is attenuated
|
// add some comfort noise where Hband is attenuated
|
||||||
if (flagHbandCn == 1) {
|
if (flagHbandCn == 1) {
|
||||||
@ -1349,7 +1360,9 @@ static void NonLinearProcessing(AecCore* aec, float* output, float* outputH) {
|
|||||||
dtmp += cnScaleHband * fft[i];
|
dtmp += cnScaleHband * fft[i];
|
||||||
}
|
}
|
||||||
|
|
||||||
outputH[i] = dtmp;
|
// Saturation protection
|
||||||
|
outputH[i] = (short)WEBRTC_SPL_SAT(
|
||||||
|
WEBRTC_SPL_WORD16_MAX, dtmp, WEBRTC_SPL_WORD16_MIN);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -60,11 +60,11 @@ void WebRtcAec_InitAec_mips(void);
|
|||||||
|
|
||||||
void WebRtcAec_BufferFarendPartition(AecCore* aec, const float* farend);
|
void WebRtcAec_BufferFarendPartition(AecCore* aec, const float* farend);
|
||||||
void WebRtcAec_ProcessFrame(AecCore* aec,
|
void WebRtcAec_ProcessFrame(AecCore* aec,
|
||||||
const float* nearend,
|
const short* nearend,
|
||||||
const float* nearendH,
|
const short* nearendH,
|
||||||
int knownDelay,
|
int knownDelay,
|
||||||
float* out,
|
int16_t* out,
|
||||||
float* outH);
|
int16_t* outH);
|
||||||
|
|
||||||
// A helper function to call WebRtc_MoveReadPtr() for all far-end buffers.
|
// A helper function to call WebRtc_MoveReadPtr() for all far-end buffers.
|
||||||
// Returns the number of elements moved, and adjusts |system_delay| by the
|
// Returns the number of elements moved, and adjusts |system_delay| by the
|
||||||
|
@ -104,18 +104,18 @@ int webrtc_aec_instance_count = 0;
|
|||||||
static void EstBufDelayNormal(aecpc_t* aecInst);
|
static void EstBufDelayNormal(aecpc_t* aecInst);
|
||||||
static void EstBufDelayExtended(aecpc_t* aecInst);
|
static void EstBufDelayExtended(aecpc_t* aecInst);
|
||||||
static int ProcessNormal(aecpc_t* self,
|
static int ProcessNormal(aecpc_t* self,
|
||||||
const float* near,
|
const int16_t* near,
|
||||||
const float* near_high,
|
const int16_t* near_high,
|
||||||
float* out,
|
int16_t* out,
|
||||||
float* out_high,
|
int16_t* out_high,
|
||||||
int16_t num_samples,
|
int16_t num_samples,
|
||||||
int16_t reported_delay_ms,
|
int16_t reported_delay_ms,
|
||||||
int32_t skew);
|
int32_t skew);
|
||||||
static void ProcessExtended(aecpc_t* self,
|
static void ProcessExtended(aecpc_t* self,
|
||||||
const float* near,
|
const int16_t* near,
|
||||||
const float* near_high,
|
const int16_t* near_high,
|
||||||
float* out,
|
int16_t* out,
|
||||||
float* out_high,
|
int16_t* out_high,
|
||||||
int16_t num_samples,
|
int16_t num_samples,
|
||||||
int16_t reported_delay_ms,
|
int16_t reported_delay_ms,
|
||||||
int32_t skew);
|
int32_t skew);
|
||||||
@ -372,10 +372,10 @@ int32_t WebRtcAec_BufferFarend(void* aecInst,
|
|||||||
}
|
}
|
||||||
|
|
||||||
int32_t WebRtcAec_Process(void* aecInst,
|
int32_t WebRtcAec_Process(void* aecInst,
|
||||||
const float* nearend,
|
const int16_t* nearend,
|
||||||
const float* nearendH,
|
const int16_t* nearendH,
|
||||||
float* out,
|
int16_t* out,
|
||||||
float* outH,
|
int16_t* outH,
|
||||||
int16_t nrOfSamples,
|
int16_t nrOfSamples,
|
||||||
int16_t msInSndCardBuf,
|
int16_t msInSndCardBuf,
|
||||||
int32_t skew) {
|
int32_t skew) {
|
||||||
@ -632,10 +632,10 @@ AecCore* WebRtcAec_aec_core(void* handle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static int ProcessNormal(aecpc_t* aecpc,
|
static int ProcessNormal(aecpc_t* aecpc,
|
||||||
const float* nearend,
|
const int16_t* nearend,
|
||||||
const float* nearendH,
|
const int16_t* nearendH,
|
||||||
float* out,
|
int16_t* out,
|
||||||
float* outH,
|
int16_t* outH,
|
||||||
int16_t nrOfSamples,
|
int16_t nrOfSamples,
|
||||||
int16_t msInSndCardBuf,
|
int16_t msInSndCardBuf,
|
||||||
int32_t skew) {
|
int32_t skew) {
|
||||||
@ -689,10 +689,10 @@ static int ProcessNormal(aecpc_t* aecpc,
|
|||||||
if (aecpc->startup_phase) {
|
if (aecpc->startup_phase) {
|
||||||
// Only needed if they don't already point to the same place.
|
// Only needed if they don't already point to the same place.
|
||||||
if (nearend != out) {
|
if (nearend != out) {
|
||||||
memcpy(out, nearend, sizeof(*out) * nrOfSamples);
|
memcpy(out, nearend, sizeof(short) * nrOfSamples);
|
||||||
}
|
}
|
||||||
if (nearendH != outH) {
|
if (nearendH != outH) {
|
||||||
memcpy(outH, nearendH, sizeof(*outH) * nrOfSamples);
|
memcpy(outH, nearendH, sizeof(short) * nrOfSamples);
|
||||||
}
|
}
|
||||||
|
|
||||||
// The AEC is in the start up mode
|
// The AEC is in the start up mode
|
||||||
@ -789,10 +789,10 @@ static int ProcessNormal(aecpc_t* aecpc,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void ProcessExtended(aecpc_t* self,
|
static void ProcessExtended(aecpc_t* self,
|
||||||
const float* near,
|
const int16_t* near,
|
||||||
const float* near_high,
|
const int16_t* near_high,
|
||||||
float* out,
|
int16_t* out,
|
||||||
float* out_high,
|
int16_t* out_high,
|
||||||
int16_t num_samples,
|
int16_t num_samples,
|
||||||
int16_t reported_delay_ms,
|
int16_t reported_delay_ms,
|
||||||
int32_t skew) {
|
int32_t skew) {
|
||||||
@ -823,10 +823,10 @@ static void ProcessExtended(aecpc_t* self,
|
|||||||
if (!self->farend_started) {
|
if (!self->farend_started) {
|
||||||
// Only needed if they don't already point to the same place.
|
// Only needed if they don't already point to the same place.
|
||||||
if (near != out) {
|
if (near != out) {
|
||||||
memcpy(out, near, sizeof(*out) * num_samples);
|
memcpy(out, near, sizeof(short) * num_samples);
|
||||||
}
|
}
|
||||||
if (near_high != out_high) {
|
if (near_high != out_high) {
|
||||||
memcpy(out_high, near_high, sizeof(*out_high) * num_samples);
|
memcpy(out_high, near_high, sizeof(short) * num_samples);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -133,9 +133,9 @@ int32_t WebRtcAec_BufferFarend(void* aecInst,
|
|||||||
* Inputs Description
|
* Inputs Description
|
||||||
* -------------------------------------------------------------------
|
* -------------------------------------------------------------------
|
||||||
* void* aecInst Pointer to the AEC instance
|
* void* aecInst Pointer to the AEC instance
|
||||||
* float* nearend In buffer containing one frame of
|
* int16_t* nearend In buffer containing one frame of
|
||||||
* nearend+echo signal for L band
|
* nearend+echo signal for L band
|
||||||
* float* nearendH In buffer containing one frame of
|
* int16_t* nearendH In buffer containing one frame of
|
||||||
* nearend+echo signal for H band
|
* nearend+echo signal for H band
|
||||||
* int16_t nrOfSamples Number of samples in nearend buffer
|
* int16_t nrOfSamples Number of samples in nearend buffer
|
||||||
* int16_t msInSndCardBuf Delay estimate for sound card and
|
* int16_t msInSndCardBuf Delay estimate for sound card and
|
||||||
@ -146,18 +146,18 @@ int32_t WebRtcAec_BufferFarend(void* aecInst,
|
|||||||
*
|
*
|
||||||
* Outputs Description
|
* Outputs Description
|
||||||
* -------------------------------------------------------------------
|
* -------------------------------------------------------------------
|
||||||
* float* out Out buffer, one frame of processed nearend
|
* int16_t* out Out buffer, one frame of processed nearend
|
||||||
* for L band
|
* for L band
|
||||||
* float* outH Out buffer, one frame of processed nearend
|
* int16_t* outH Out buffer, one frame of processed nearend
|
||||||
* for H band
|
* for H band
|
||||||
* int32_t return 0: OK
|
* int32_t return 0: OK
|
||||||
* -1: error
|
* -1: error
|
||||||
*/
|
*/
|
||||||
int32_t WebRtcAec_Process(void* aecInst,
|
int32_t WebRtcAec_Process(void* aecInst,
|
||||||
const float* nearend,
|
const int16_t* nearend,
|
||||||
const float* nearendH,
|
const int16_t* nearendH,
|
||||||
float* out,
|
int16_t* out,
|
||||||
float* outH,
|
int16_t* outH,
|
||||||
int16_t nrOfSamples,
|
int16_t nrOfSamples,
|
||||||
int16_t msInSndCardBuf,
|
int16_t msInSndCardBuf,
|
||||||
int32_t skew);
|
int32_t skew);
|
||||||
|
@ -46,18 +46,16 @@ class SystemDelayTest : public ::testing::Test {
|
|||||||
aecpc_t* self_;
|
aecpc_t* self_;
|
||||||
int samples_per_frame_;
|
int samples_per_frame_;
|
||||||
// Dummy input/output speech data.
|
// Dummy input/output speech data.
|
||||||
static const int kSamplesPerChunk = 160;
|
int16_t far_[160];
|
||||||
int16_t far_[kSamplesPerChunk];
|
int16_t near_[160];
|
||||||
float near_[kSamplesPerChunk];
|
int16_t out_[160];
|
||||||
float out_[kSamplesPerChunk];
|
|
||||||
};
|
};
|
||||||
|
|
||||||
SystemDelayTest::SystemDelayTest()
|
SystemDelayTest::SystemDelayTest()
|
||||||
: handle_(NULL), self_(NULL), samples_per_frame_(0) {
|
: handle_(NULL), self_(NULL), samples_per_frame_(0) {
|
||||||
// Dummy input data are set with more or less arbitrary non-zero values.
|
// Dummy input data are set with more or less arbitrary non-zero values.
|
||||||
memset(far_, 1, sizeof(far_));
|
memset(far_, 1, sizeof(far_));
|
||||||
for (int i = 0; i < kSamplesPerChunk; i++)
|
memset(near_, 2, sizeof(near_));
|
||||||
near_[i] = 514.0;
|
|
||||||
memset(out_, 0, sizeof(out_));
|
memset(out_, 0, sizeof(out_));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -68,64 +68,6 @@ void StereoToMono(const int16_t* left, const int16_t* right, int16_t* out,
|
|||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
// One int16_t and one float ChannelBuffer that are kept in sync. The sync is
|
|
||||||
// broken when someone requests write access to either ChannelBuffer, and
|
|
||||||
// reestablished when someone requests the outdated ChannelBuffer. It is
|
|
||||||
// therefore safe to use the return value of ibuf() and fbuf() until the next
|
|
||||||
// call to the other method.
|
|
||||||
class IFChannelBuffer {
|
|
||||||
public:
|
|
||||||
IFChannelBuffer(int samples_per_channel, int num_channels)
|
|
||||||
: ivalid_(true),
|
|
||||||
ibuf_(samples_per_channel, num_channels),
|
|
||||||
fvalid_(true),
|
|
||||||
fbuf_(samples_per_channel, num_channels) {}
|
|
||||||
|
|
||||||
ChannelBuffer<int16_t>* ibuf() {
|
|
||||||
RefreshI();
|
|
||||||
fvalid_ = false;
|
|
||||||
return &ibuf_;
|
|
||||||
}
|
|
||||||
|
|
||||||
ChannelBuffer<float>* fbuf() {
|
|
||||||
RefreshF();
|
|
||||||
ivalid_ = false;
|
|
||||||
return &fbuf_;
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
|
||||||
void RefreshF() {
|
|
||||||
if (!fvalid_) {
|
|
||||||
assert(ivalid_);
|
|
||||||
const int16_t* const int_data = ibuf_.data();
|
|
||||||
float* const float_data = fbuf_.data();
|
|
||||||
const int length = fbuf_.length();
|
|
||||||
for (int i = 0; i < length; ++i)
|
|
||||||
float_data[i] = int_data[i];
|
|
||||||
fvalid_ = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void RefreshI() {
|
|
||||||
if (!ivalid_) {
|
|
||||||
assert(fvalid_);
|
|
||||||
const float* const float_data = fbuf_.data();
|
|
||||||
int16_t* const int_data = ibuf_.data();
|
|
||||||
const int length = ibuf_.length();
|
|
||||||
for (int i = 0; i < length; ++i)
|
|
||||||
int_data[i] = WEBRTC_SPL_SAT(std::numeric_limits<int16_t>::max(),
|
|
||||||
float_data[i],
|
|
||||||
std::numeric_limits<int16_t>::min());
|
|
||||||
ivalid_ = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bool ivalid_;
|
|
||||||
ChannelBuffer<int16_t> ibuf_;
|
|
||||||
bool fvalid_;
|
|
||||||
ChannelBuffer<float> fbuf_;
|
|
||||||
};
|
|
||||||
|
|
||||||
class SplitChannelBuffer {
|
class SplitChannelBuffer {
|
||||||
public:
|
public:
|
||||||
SplitChannelBuffer(int samples_per_split_channel, int num_channels)
|
SplitChannelBuffer(int samples_per_split_channel, int num_channels)
|
||||||
@ -134,14 +76,12 @@ class SplitChannelBuffer {
|
|||||||
}
|
}
|
||||||
~SplitChannelBuffer() {}
|
~SplitChannelBuffer() {}
|
||||||
|
|
||||||
int16_t* low_channel(int i) { return low_.ibuf()->channel(i); }
|
int16_t* low_channel(int i) { return low_.channel(i); }
|
||||||
int16_t* high_channel(int i) { return high_.ibuf()->channel(i); }
|
int16_t* high_channel(int i) { return high_.channel(i); }
|
||||||
float* low_channel_f(int i) { return low_.fbuf()->channel(i); }
|
|
||||||
float* high_channel_f(int i) { return high_.fbuf()->channel(i); }
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
IFChannelBuffer low_;
|
ChannelBuffer<int16_t> low_;
|
||||||
IFChannelBuffer high_;
|
ChannelBuffer<int16_t> high_;
|
||||||
};
|
};
|
||||||
|
|
||||||
AudioBuffer::AudioBuffer(int input_samples_per_channel,
|
AudioBuffer::AudioBuffer(int input_samples_per_channel,
|
||||||
@ -162,8 +102,8 @@ AudioBuffer::AudioBuffer(int input_samples_per_channel,
|
|||||||
is_muted_(false),
|
is_muted_(false),
|
||||||
data_(NULL),
|
data_(NULL),
|
||||||
keyboard_data_(NULL),
|
keyboard_data_(NULL),
|
||||||
channels_(new IFChannelBuffer(proc_samples_per_channel_,
|
channels_(new ChannelBuffer<int16_t>(proc_samples_per_channel_,
|
||||||
num_proc_channels_)) {
|
num_proc_channels_)) {
|
||||||
assert(input_samples_per_channel_ > 0);
|
assert(input_samples_per_channel_ > 0);
|
||||||
assert(proc_samples_per_channel_ > 0);
|
assert(proc_samples_per_channel_ > 0);
|
||||||
assert(output_samples_per_channel_ > 0);
|
assert(output_samples_per_channel_ > 0);
|
||||||
@ -245,7 +185,7 @@ void AudioBuffer::CopyFrom(const float* const* data,
|
|||||||
// Convert to int16.
|
// Convert to int16.
|
||||||
for (int i = 0; i < num_proc_channels_; ++i) {
|
for (int i = 0; i < num_proc_channels_; ++i) {
|
||||||
ScaleAndRoundToInt16(data_ptr[i], proc_samples_per_channel_,
|
ScaleAndRoundToInt16(data_ptr[i], proc_samples_per_channel_,
|
||||||
channels_->ibuf()->channel(i));
|
channels_->channel(i));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -262,9 +202,7 @@ void AudioBuffer::CopyTo(int samples_per_channel,
|
|||||||
data_ptr = process_buffer_->channels();
|
data_ptr = process_buffer_->channels();
|
||||||
}
|
}
|
||||||
for (int i = 0; i < num_proc_channels_; ++i) {
|
for (int i = 0; i < num_proc_channels_; ++i) {
|
||||||
ScaleToFloat(channels_->ibuf()->channel(i),
|
ScaleToFloat(channels_->channel(i), proc_samples_per_channel_, data_ptr[i]);
|
||||||
proc_samples_per_channel_,
|
|
||||||
data_ptr[i]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resample.
|
// Resample.
|
||||||
@ -295,7 +233,7 @@ const int16_t* AudioBuffer::data(int channel) const {
|
|||||||
return data_;
|
return data_;
|
||||||
}
|
}
|
||||||
|
|
||||||
return channels_->ibuf()->channel(channel);
|
return channels_->channel(channel);
|
||||||
}
|
}
|
||||||
|
|
||||||
int16_t* AudioBuffer::data(int channel) {
|
int16_t* AudioBuffer::data(int channel) {
|
||||||
@ -303,19 +241,6 @@ int16_t* AudioBuffer::data(int channel) {
|
|||||||
return const_cast<int16_t*>(t->data(channel));
|
return const_cast<int16_t*>(t->data(channel));
|
||||||
}
|
}
|
||||||
|
|
||||||
float* AudioBuffer::data_f(int channel) {
|
|
||||||
assert(channel >= 0 && channel < num_proc_channels_);
|
|
||||||
if (data_ != NULL) {
|
|
||||||
// Need to make a copy of the data instead of just pointing to it, since
|
|
||||||
// we're about to convert it to float.
|
|
||||||
assert(channel == 0 && num_proc_channels_ == 1);
|
|
||||||
memcpy(channels_->ibuf()->channel(0), data_,
|
|
||||||
sizeof(*data_) * proc_samples_per_channel_);
|
|
||||||
data_ = NULL;
|
|
||||||
}
|
|
||||||
return channels_->fbuf()->channel(channel);
|
|
||||||
}
|
|
||||||
|
|
||||||
const int16_t* AudioBuffer::low_pass_split_data(int channel) const {
|
const int16_t* AudioBuffer::low_pass_split_data(int channel) const {
|
||||||
assert(channel >= 0 && channel < num_proc_channels_);
|
assert(channel >= 0 && channel < num_proc_channels_);
|
||||||
if (split_channels_.get() == NULL) {
|
if (split_channels_.get() == NULL) {
|
||||||
@ -330,12 +255,6 @@ int16_t* AudioBuffer::low_pass_split_data(int channel) {
|
|||||||
return const_cast<int16_t*>(t->low_pass_split_data(channel));
|
return const_cast<int16_t*>(t->low_pass_split_data(channel));
|
||||||
}
|
}
|
||||||
|
|
||||||
float* AudioBuffer::low_pass_split_data_f(int channel) {
|
|
||||||
assert(channel >= 0 && channel < num_proc_channels_);
|
|
||||||
return split_channels_.get() ? split_channels_->low_channel_f(channel)
|
|
||||||
: data_f(channel);
|
|
||||||
}
|
|
||||||
|
|
||||||
const int16_t* AudioBuffer::high_pass_split_data(int channel) const {
|
const int16_t* AudioBuffer::high_pass_split_data(int channel) const {
|
||||||
assert(channel >= 0 && channel < num_proc_channels_);
|
assert(channel >= 0 && channel < num_proc_channels_);
|
||||||
if (split_channels_.get() == NULL) {
|
if (split_channels_.get() == NULL) {
|
||||||
@ -350,12 +269,6 @@ int16_t* AudioBuffer::high_pass_split_data(int channel) {
|
|||||||
return const_cast<int16_t*>(t->high_pass_split_data(channel));
|
return const_cast<int16_t*>(t->high_pass_split_data(channel));
|
||||||
}
|
}
|
||||||
|
|
||||||
float* AudioBuffer::high_pass_split_data_f(int channel) {
|
|
||||||
assert(channel >= 0 && channel < num_proc_channels_);
|
|
||||||
return split_channels_.get() ? split_channels_->high_channel_f(channel)
|
|
||||||
: NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
const int16_t* AudioBuffer::mixed_data(int channel) const {
|
const int16_t* AudioBuffer::mixed_data(int channel) const {
|
||||||
assert(channel >= 0 && channel < num_mixed_channels_);
|
assert(channel >= 0 && channel < num_mixed_channels_);
|
||||||
|
|
||||||
@ -435,7 +348,7 @@ void AudioBuffer::DeinterleaveFrom(AudioFrame* frame) {
|
|||||||
|
|
||||||
int16_t* interleaved = frame->data_;
|
int16_t* interleaved = frame->data_;
|
||||||
for (int i = 0; i < num_proc_channels_; i++) {
|
for (int i = 0; i < num_proc_channels_; i++) {
|
||||||
int16_t* deinterleaved = channels_->ibuf()->channel(i);
|
int16_t* deinterleaved = channels_->channel(i);
|
||||||
int interleaved_idx = i;
|
int interleaved_idx = i;
|
||||||
for (int j = 0; j < proc_samples_per_channel_; j++) {
|
for (int j = 0; j < proc_samples_per_channel_; j++) {
|
||||||
deinterleaved[j] = interleaved[interleaved_idx];
|
deinterleaved[j] = interleaved[interleaved_idx];
|
||||||
@ -455,15 +368,14 @@ void AudioBuffer::InterleaveTo(AudioFrame* frame, bool data_changed) const {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data_) {
|
if (num_proc_channels_ == 1) {
|
||||||
assert(num_proc_channels_ == 1);
|
|
||||||
assert(data_ == frame->data_);
|
assert(data_ == frame->data_);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
int16_t* interleaved = frame->data_;
|
int16_t* interleaved = frame->data_;
|
||||||
for (int i = 0; i < num_proc_channels_; i++) {
|
for (int i = 0; i < num_proc_channels_; i++) {
|
||||||
int16_t* deinterleaved = channels_->ibuf()->channel(i);
|
int16_t* deinterleaved = channels_->channel(i);
|
||||||
int interleaved_idx = i;
|
int interleaved_idx = i;
|
||||||
for (int j = 0; j < proc_samples_per_channel_; j++) {
|
for (int j = 0; j < proc_samples_per_channel_; j++) {
|
||||||
interleaved[interleaved_idx] = deinterleaved[j];
|
interleaved[interleaved_idx] = deinterleaved[j];
|
||||||
@ -482,8 +394,8 @@ void AudioBuffer::CopyAndMix(int num_mixed_channels) {
|
|||||||
num_mixed_channels));
|
num_mixed_channels));
|
||||||
}
|
}
|
||||||
|
|
||||||
StereoToMono(channels_->ibuf()->channel(0),
|
StereoToMono(channels_->channel(0),
|
||||||
channels_->ibuf()->channel(1),
|
channels_->channel(1),
|
||||||
mixed_channels_->channel(0),
|
mixed_channels_->channel(0),
|
||||||
proc_samples_per_channel_);
|
proc_samples_per_channel_);
|
||||||
|
|
||||||
|
@ -24,7 +24,6 @@ namespace webrtc {
|
|||||||
|
|
||||||
class PushSincResampler;
|
class PushSincResampler;
|
||||||
class SplitChannelBuffer;
|
class SplitChannelBuffer;
|
||||||
class IFChannelBuffer;
|
|
||||||
|
|
||||||
struct SplitFilterStates {
|
struct SplitFilterStates {
|
||||||
SplitFilterStates() {
|
SplitFilterStates() {
|
||||||
@ -65,13 +64,6 @@ class AudioBuffer {
|
|||||||
const int16_t* mixed_data(int channel) const;
|
const int16_t* mixed_data(int channel) const;
|
||||||
const int16_t* mixed_low_pass_data(int channel) const;
|
const int16_t* mixed_low_pass_data(int channel) const;
|
||||||
const int16_t* low_pass_reference(int channel) const;
|
const int16_t* low_pass_reference(int channel) const;
|
||||||
|
|
||||||
// Float versions of the accessors, with automatic conversion back and forth
|
|
||||||
// as necessary. The range of the numbers are the same as for int16_t.
|
|
||||||
float* data_f(int channel);
|
|
||||||
float* low_pass_split_data_f(int channel);
|
|
||||||
float* high_pass_split_data_f(int channel);
|
|
||||||
|
|
||||||
const float* keyboard_data() const;
|
const float* keyboard_data() const;
|
||||||
|
|
||||||
SplitFilterStates* filter_states(int channel);
|
SplitFilterStates* filter_states(int channel);
|
||||||
@ -122,7 +114,7 @@ class AudioBuffer {
|
|||||||
int16_t* data_;
|
int16_t* data_;
|
||||||
|
|
||||||
const float* keyboard_data_;
|
const float* keyboard_data_;
|
||||||
scoped_ptr<IFChannelBuffer> channels_;
|
scoped_ptr<ChannelBuffer<int16_t> > channels_;
|
||||||
scoped_ptr<SplitChannelBuffer> split_channels_;
|
scoped_ptr<SplitChannelBuffer> split_channels_;
|
||||||
scoped_ptr<SplitFilterStates[]> filter_states_;
|
scoped_ptr<SplitFilterStates[]> filter_states_;
|
||||||
scoped_ptr<ChannelBuffer<int16_t> > mixed_channels_;
|
scoped_ptr<ChannelBuffer<int16_t> > mixed_channels_;
|
||||||
|
@ -129,10 +129,10 @@ int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) {
|
|||||||
Handle* my_handle = handle(handle_index);
|
Handle* my_handle = handle(handle_index);
|
||||||
err = WebRtcAec_Process(
|
err = WebRtcAec_Process(
|
||||||
my_handle,
|
my_handle,
|
||||||
audio->low_pass_split_data_f(i),
|
audio->low_pass_split_data(i),
|
||||||
audio->high_pass_split_data_f(i),
|
audio->high_pass_split_data(i),
|
||||||
audio->low_pass_split_data_f(i),
|
audio->low_pass_split_data(i),
|
||||||
audio->high_pass_split_data_f(i),
|
audio->high_pass_split_data(i),
|
||||||
static_cast<int16_t>(audio->samples_per_split_channel()),
|
static_cast<int16_t>(audio->samples_per_split_channel()),
|
||||||
apm_->stream_delay_ms(),
|
apm_->stream_delay_ms(),
|
||||||
stream_drift_samples_);
|
stream_drift_samples_);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user