Refactor AEC: PowerLevel
* Style changes * Tested with audioproc_unittest and trybots TEST=none BUG=none Review URL: https://webrtc-codereview.appspot.com/1116005 git-svn-id: http://webrtc.googlecode.com/svn/trunk@3551 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
4d1cfae622
commit
71e91f3b64
@ -126,10 +126,10 @@ static void ComfortNoise(aec_t *aec, float efw[2][PART_LEN1],
|
||||
complex_t *comfortNoiseHband,
|
||||
const float *noisePow, const float *lambda);
|
||||
|
||||
static void InitLevel(power_level_t *level);
|
||||
static void InitLevel(PowerLevel* level);
|
||||
static void InitStats(Stats* stats);
|
||||
static void InitMetrics(aec_t *aec);
|
||||
static void UpdateLevel(power_level_t* level, float in[2][PART_LEN1]);
|
||||
static void UpdateLevel(PowerLevel* level, float in[2][PART_LEN1]);
|
||||
static void UpdateMetrics(aec_t *aec);
|
||||
// Convert from time domain to frequency domain. Note that |time_data| are
|
||||
// overwritten.
|
||||
@ -1440,27 +1440,26 @@ static void ComfortNoise(aec_t *aec, float efw[2][PART_LEN1],
|
||||
}
|
||||
}
|
||||
|
||||
static void InitLevel(power_level_t *level)
|
||||
{
|
||||
const float bigFloat = 1E17f;
|
||||
static void InitLevel(PowerLevel* level) {
|
||||
const float kBigFloat = 1E17f;
|
||||
|
||||
level->averagelevel = 0;
|
||||
level->framelevel = 0;
|
||||
level->minlevel = bigFloat;
|
||||
level->frsum = 0;
|
||||
level->sfrsum = 0;
|
||||
level->frcounter = 0;
|
||||
level->sfrcounter = 0;
|
||||
level->averagelevel = 0;
|
||||
level->framelevel = 0;
|
||||
level->minlevel = kBigFloat;
|
||||
level->frsum = 0;
|
||||
level->sfrsum = 0;
|
||||
level->frcounter = 0;
|
||||
level->sfrcounter = 0;
|
||||
}
|
||||
|
||||
static void InitStats(Stats* stats) {
|
||||
stats->instant = offsetLevel;
|
||||
stats->average = offsetLevel;
|
||||
stats->max = offsetLevel;
|
||||
stats->min = offsetLevel * (-1);
|
||||
stats->instant = kOffsetLevel;
|
||||
stats->average = kOffsetLevel;
|
||||
stats->max = kOffsetLevel;
|
||||
stats->min = kOffsetLevel * (-1);
|
||||
stats->sum = 0;
|
||||
stats->hisum = 0;
|
||||
stats->himean = offsetLevel;
|
||||
stats->himean = kOffsetLevel;
|
||||
stats->counter = 0;
|
||||
stats->hicounter = 0;
|
||||
}
|
||||
@ -1479,7 +1478,7 @@ static void InitMetrics(aec_t* self) {
|
||||
InitStats(&self->rerl);
|
||||
}
|
||||
|
||||
static void UpdateLevel(power_level_t* level, float in[2][PART_LEN1]) {
|
||||
static void UpdateLevel(PowerLevel* level, float in[2][PART_LEN1]) {
|
||||
// Do the energy calculation in the frequency domain. The FFT is performed on
|
||||
// a segment of PART_LEN2 samples due to overlap, but we only want the energy
|
||||
// of half that data (the last PART_LEN samples). Parseval's relation states
|
||||
|
@ -43,17 +43,17 @@ typedef float complex_t[2];
|
||||
// compile time.
|
||||
|
||||
// Metrics
|
||||
enum {offsetLevel = -100};
|
||||
enum { kOffsetLevel = -100 };
|
||||
|
||||
typedef struct {
|
||||
float sfrsum;
|
||||
int sfrcounter;
|
||||
float framelevel;
|
||||
float frsum;
|
||||
int frcounter;
|
||||
float minlevel;
|
||||
float averagelevel;
|
||||
} power_level_t;
|
||||
typedef struct PowerLevel {
|
||||
float sfrsum;
|
||||
int sfrcounter;
|
||||
float framelevel;
|
||||
float frsum;
|
||||
int frcounter;
|
||||
float minlevel;
|
||||
float averagelevel;
|
||||
} PowerLevel;
|
||||
|
||||
typedef struct Stats {
|
||||
float instant;
|
||||
@ -124,10 +124,10 @@ typedef struct {
|
||||
|
||||
int noiseEstCtr;
|
||||
|
||||
power_level_t farlevel;
|
||||
power_level_t nearlevel;
|
||||
power_level_t linoutlevel;
|
||||
power_level_t nlpoutlevel;
|
||||
PowerLevel farlevel;
|
||||
PowerLevel nearlevel;
|
||||
PowerLevel linoutlevel;
|
||||
PowerLevel nlpoutlevel;
|
||||
|
||||
int metricsMode;
|
||||
int stateCounter;
|
||||
|
@ -591,47 +591,47 @@ int WebRtcAec_GetMetrics(void* handle, AecMetrics* metrics) {
|
||||
// ERL
|
||||
metrics->erl.instant = (int) erl.instant;
|
||||
|
||||
if ((erl.himean > offsetLevel) && (erl.average > offsetLevel)) {
|
||||
if ((erl.himean > kOffsetLevel) && (erl.average > kOffsetLevel)) {
|
||||
// Use a mix between regular average and upper part average.
|
||||
dtmp = kUpWeight * erl.himean + (1 - kUpWeight) * erl.average;
|
||||
metrics->erl.average = (int) dtmp;
|
||||
} else {
|
||||
metrics->erl.average = offsetLevel;
|
||||
metrics->erl.average = kOffsetLevel;
|
||||
}
|
||||
|
||||
metrics->erl.max = (int) erl.max;
|
||||
|
||||
if (erl.min < (offsetLevel * (-1))) {
|
||||
if (erl.min < (kOffsetLevel * (-1))) {
|
||||
metrics->erl.min = (int) erl.min;
|
||||
} else {
|
||||
metrics->erl.min = offsetLevel;
|
||||
metrics->erl.min = kOffsetLevel;
|
||||
}
|
||||
|
||||
// ERLE
|
||||
metrics->erle.instant = (int) erle.instant;
|
||||
|
||||
if ((erle.himean > offsetLevel) && (erle.average > offsetLevel)) {
|
||||
if ((erle.himean > kOffsetLevel) && (erle.average > kOffsetLevel)) {
|
||||
// Use a mix between regular average and upper part average.
|
||||
dtmp = kUpWeight * erle.himean + (1 - kUpWeight) * erle.average;
|
||||
metrics->erle.average = (int) dtmp;
|
||||
} else {
|
||||
metrics->erle.average = offsetLevel;
|
||||
metrics->erle.average = kOffsetLevel;
|
||||
}
|
||||
|
||||
metrics->erle.max = (int) erle.max;
|
||||
|
||||
if (erle.min < (offsetLevel * (-1))) {
|
||||
if (erle.min < (kOffsetLevel * (-1))) {
|
||||
metrics->erle.min = (int) erle.min;
|
||||
} else {
|
||||
metrics->erle.min = offsetLevel;
|
||||
metrics->erle.min = kOffsetLevel;
|
||||
}
|
||||
|
||||
// RERL
|
||||
if ((metrics->erl.average > offsetLevel)
|
||||
&& (metrics->erle.average > offsetLevel)) {
|
||||
if ((metrics->erl.average > kOffsetLevel)
|
||||
&& (metrics->erle.average > kOffsetLevel)) {
|
||||
stmp = metrics->erl.average + metrics->erle.average;
|
||||
} else {
|
||||
stmp = offsetLevel;
|
||||
stmp = kOffsetLevel;
|
||||
}
|
||||
metrics->rerl.average = stmp;
|
||||
|
||||
@ -643,20 +643,20 @@ int WebRtcAec_GetMetrics(void* handle, AecMetrics* metrics) {
|
||||
// A_NLP
|
||||
metrics->aNlp.instant = (int) a_nlp.instant;
|
||||
|
||||
if ((a_nlp.himean > offsetLevel) && (a_nlp.average > offsetLevel)) {
|
||||
if ((a_nlp.himean > kOffsetLevel) && (a_nlp.average > kOffsetLevel)) {
|
||||
// Use a mix between regular average and upper part average.
|
||||
dtmp = kUpWeight * a_nlp.himean + (1 - kUpWeight) * a_nlp.average;
|
||||
metrics->aNlp.average = (int) dtmp;
|
||||
} else {
|
||||
metrics->aNlp.average = offsetLevel;
|
||||
metrics->aNlp.average = kOffsetLevel;
|
||||
}
|
||||
|
||||
metrics->aNlp.max = (int) a_nlp.max;
|
||||
|
||||
if (a_nlp.min < (offsetLevel * (-1))) {
|
||||
if (a_nlp.min < (kOffsetLevel * (-1))) {
|
||||
metrics->aNlp.min = (int) a_nlp.min;
|
||||
} else {
|
||||
metrics->aNlp.min = offsetLevel;
|
||||
metrics->aNlp.min = kOffsetLevel;
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
Loading…
x
Reference in New Issue
Block a user