diff --git a/webrtc/common_audio/signal_processing/include/signal_processing_library.h b/webrtc/common_audio/signal_processing/include/signal_processing_library.h index a9cf3842f..56bbbe66d 100644 --- a/webrtc/common_audio/signal_processing/include/signal_processing_library.h +++ b/webrtc/common_audio/signal_processing/include/signal_processing_library.h @@ -46,8 +46,6 @@ ((uint32_t) ((uint32_t)(a) * (uint16_t)(b))) #define WEBRTC_SPL_MUL_16_U16(a, b) \ ((int32_t)(int16_t)(a) * (uint16_t)(b)) -#define WEBRTC_SPL_DIV(a, b) \ - ((int32_t) ((int32_t)(a) / (int32_t)(b))) #ifndef WEBRTC_ARCH_ARM_V7 // For ARMv7 platforms, these are inline functions in spl_inl_armv7.h diff --git a/webrtc/common_audio/signal_processing/signal_processing_unittest.cc b/webrtc/common_audio/signal_processing/signal_processing_unittest.cc index 5d07f16ed..6a70a02ef 100644 --- a/webrtc/common_audio/signal_processing/signal_processing_unittest.cc +++ b/webrtc/common_audio/signal_processing/signal_processing_unittest.cc @@ -47,7 +47,6 @@ TEST_F(SplTest, MacroTest) { a = b; b = -3; - EXPECT_EQ(-5461, WEBRTC_SPL_DIV(a, b)); EXPECT_EQ(-1, WEBRTC_SPL_MUL_16_32_RSFT16(a, b)); EXPECT_EQ(-1, WEBRTC_SPL_MUL_16_32_RSFT15(a, b)); diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c index e57416580..4efdecc9a 100644 --- a/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c +++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c @@ -282,11 +282,11 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7, if (inSqrt < 0) inSqrt=-inSqrt; - newRes = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_DIV(inSqrt, res) + res, 1); + newRes = (inSqrt / res + res) >> 1; do { res = newRes; - newRes = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_DIV(inSqrt, res) + res, 1); + newRes = (inSqrt / res + res) >> 1; } while (newRes != res && i-- > 0); tmpARSpecQ8 = (uint16_t)newRes; diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.c index 2ac15350c..d28a6f705 100644 --- a/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.c +++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.c @@ -296,9 +296,9 @@ int32_t WebRtcIsacfix_UpdateUplinkBwImpl(BwEstimatorstr *bweStr, bweStr->recBwInv = WEBRTC_SPL_RSHIFT_W32((int32_t)bweStr->recBwInv, 13); } else { - /* recBwInv = 1 / (INIT_BN_EST + INIT_HDR_RATE) in Q26 (Q30??)*/ - bweStr->recBwInv = WEBRTC_SPL_DIV((1073741824 + - WEBRTC_SPL_LSHIFT_W32(((int32_t)INIT_BN_EST + INIT_HDR_RATE), 1)), INIT_BN_EST + INIT_HDR_RATE); + static const uint32_t kInitRate = INIT_BN_EST + INIT_HDR_RATE; + /* recBwInv = 1 / kInitRate in Q26 (Q30??)*/ + bweStr->recBwInv = (1073741824 + kInitRate / 2) / kInitRate; } /* reset time-since-update counter */ @@ -854,13 +854,14 @@ uint16_t WebRtcIsacfix_GetMinBytes(RateModel *State, } else { /* handle burst */ if (State->BurstCounter) { - if (State->StillBuffered < WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL((512 - WEBRTC_SPL_DIV(512, BURST_LEN)), DelayBuildUp), 9)) { + if (State->StillBuffered < + (((512 - 512 / BURST_LEN) * DelayBuildUp) >> 9)) { /* max bps derived from BottleNeck and DelayBuildUp values */ - inv_Q12 = WEBRTC_SPL_DIV(4096, WEBRTC_SPL_MUL(BURST_LEN, FrameSamples)); + inv_Q12 = 4096 / (BURST_LEN * FrameSamples); MinRate = WEBRTC_SPL_MUL(512 + WEBRTC_SPL_MUL(SAMPLES_PER_MSEC, WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(DelayBuildUp, inv_Q12), 3)), BottleNeck); } else { /* max bps derived from StillBuffered and DelayBuildUp values */ - inv_Q12 = WEBRTC_SPL_DIV(4096, FrameSamples); + inv_Q12 = 4096 / FrameSamples; if (DelayBuildUp > State->StillBuffered) { MinRate = WEBRTC_SPL_MUL(512 + WEBRTC_SPL_MUL(SAMPLES_PER_MSEC, WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(DelayBuildUp - State->StillBuffered, inv_Q12), 3)), BottleNeck); } else if ((den = WEBRTC_SPL_MUL(SAMPLES_PER_MSEC, (State->StillBuffered - DelayBuildUp))) >= FrameSamples) { @@ -895,10 +896,10 @@ uint16_t WebRtcIsacfix_GetMinBytes(RateModel *State, /* keep track of when bottle neck was last exceeded by at least 1% */ //517/512 ~ 1.01 - if (WEBRTC_SPL_DIV(WEBRTC_SPL_MUL(StreamSize, FS8), FrameSamples) > (WEBRTC_SPL_MUL(517, BottleNeck) >> 9)) { + if ((StreamSize * (int32_t)FS8) / FrameSamples > (517 * BottleNeck) >> 9) { if (State->PrevExceed) { /* bottle_neck exceded twice in a row, decrease ExceedAgo */ - State->ExceedAgo -= WEBRTC_SPL_DIV(BURST_INTERVAL, BURST_LEN - 1); + State->ExceedAgo -= BURST_INTERVAL / (BURST_LEN - 1); if (State->ExceedAgo < 0) { State->ExceedAgo = 0; } @@ -922,7 +923,7 @@ uint16_t WebRtcIsacfix_GetMinBytes(RateModel *State, /* Update buffer delay */ - TransmissionTime = (int16_t)WEBRTC_SPL_DIV(WEBRTC_SPL_MUL(StreamSize, 8000), BottleNeck); /* ms */ + TransmissionTime = (StreamSize * 8000) / BottleNeck; /* ms */ State->StillBuffered += TransmissionTime; State->StillBuffered -= (int16_t)WEBRTC_SPL_RSHIFT_W16(FrameSamples, 4); //>>4 = SAMPLES_PER_MSEC /* ms */ if (State->StillBuffered < 0) { @@ -945,13 +946,12 @@ void WebRtcIsacfix_UpdateRateModel(RateModel *State, const int16_t FrameSamples, /* samples per frame */ const int16_t BottleNeck) /* bottle neck rate; excl headers (bps) */ { - int16_t TransmissionTime; + const int16_t TransmissionTime = (StreamSize * 8000) / BottleNeck; /* ms */ /* avoid the initial "high-rate" burst */ State->InitCounter = 0; /* Update buffer delay */ - TransmissionTime = (int16_t)WEBRTC_SPL_DIV(WEBRTC_SPL_MUL(WEBRTC_SPL_MUL(StreamSize, 8), 1000), BottleNeck); /* ms */ State->StillBuffered += TransmissionTime; State->StillBuffered -= (int16_t)WEBRTC_SPL_RSHIFT_W16(FrameSamples, 4); /* ms */ if (State->StillBuffered < 0) { diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/decode.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/decode.c index 0f5c81938..263f88a4f 100644 --- a/webrtc/modules/audio_coding/codecs/isac/fix/source/decode.c +++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/decode.c @@ -59,8 +59,8 @@ int16_t WebRtcIsacfix_DecodeImpl(int16_t *signal_out16, int16_t frame_nb; /* counter */ - int16_t frame_mode; /* 0 for 20ms and 30ms, 1 for 60ms */ - int16_t processed_samples; + int16_t frame_mode; /* 0 for 30ms, 1 for 60ms */ + static const int16_t kProcessedSamples = 480; /* 480 (for both 30, 60 ms) */ /* PLC */ int16_t overlapWin[ 240 ]; @@ -76,14 +76,14 @@ int16_t WebRtcIsacfix_DecodeImpl(int16_t *signal_out16, if (err<0) // error check return err; - frame_mode = (int16_t)WEBRTC_SPL_DIV(*current_framesamples, MAX_FRAMESAMPLES); /* 0, or 1 */ - processed_samples = (int16_t)WEBRTC_SPL_DIV(*current_framesamples, frame_mode+1); /* either 320 (20ms) or 480 (30, 60 ms) */ + frame_mode = *current_framesamples / MAX_FRAMESAMPLES; /* 0, or 1 */ err = WebRtcIsacfix_DecodeSendBandwidth(&ISACdec_obj->bitstr_obj, &BWno); if (err<0) // error check return err; - /* one loop if it's one frame (20 or 30ms), 2 loops if 2 frames bundled together (60ms) */ + /* one loop if it's one frame (30ms), two loops if two frames bundled together + * (60ms) */ for (frame_nb = 0; frame_nb <= frame_mode; frame_nb++) { /* decode & dequantize pitch parameters */ @@ -210,7 +210,10 @@ int16_t WebRtcIsacfix_DecodeImpl(int16_t *signal_out16, Vector_Word16_2[k] = tmp_2; } - WebRtcIsacfix_FilterAndCombine1(Vector_Word16_1, Vector_Word16_2, signal_out16 + frame_nb * processed_samples, &ISACdec_obj->postfiltbankstr_obj); + WebRtcIsacfix_FilterAndCombine1(Vector_Word16_1, + Vector_Word16_2, + signal_out16 + frame_nb * kProcessedSamples, + &ISACdec_obj->postfiltbankstr_obj); } return len; diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.c index 435f57232..27d1c1fc1 100644 --- a/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.c +++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.c @@ -350,11 +350,11 @@ static void CalcRootInvArSpec(const int16_t *ARCoefQ12, if(in_sqrt<0) in_sqrt=-in_sqrt; - newRes = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_DIV(in_sqrt, res) + res, 1); + newRes = (in_sqrt / res + res) >> 1; do { res = newRes; - newRes = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_DIV(in_sqrt, res) + res, 1); + newRes = (in_sqrt / res + res) >> 1; } while (newRes != res && i-- > 0); CurveQ8[k] = (int16_t)newRes; @@ -368,11 +368,11 @@ static void CalcRootInvArSpec(const int16_t *ARCoefQ12, if(in_sqrt<0) in_sqrt=-in_sqrt; - newRes = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_DIV(in_sqrt, res) + res, 1); + newRes = (in_sqrt / res + res) >> 1; do { res = newRes; - newRes = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_DIV(in_sqrt, res) + res, 1); + newRes = (in_sqrt / res + res) >> 1; } while (newRes != res && i-- > 0); CurveQ8[k] = (int16_t)newRes; diff --git a/webrtc/modules/audio_processing/agc/analog_agc.c b/webrtc/modules/audio_processing/agc/analog_agc.c index 4f110cc20..0376dae51 100644 --- a/webrtc/modules/audio_processing/agc/analog_agc.c +++ b/webrtc/modules/audio_processing/agc/analog_agc.c @@ -208,7 +208,7 @@ int WebRtcAgc_AddMic(void *state, int16_t *in_mic, int16_t *in_mic_H, tmp16 = (int16_t)(stt->micVol - stt->maxAnalog); tmp32 = WEBRTC_SPL_MUL_16_16(GAIN_TBL_LEN - 1, tmp16); tmp16 = (int16_t)(stt->maxLevel - stt->maxAnalog); - targetGainIdx = (uint16_t)WEBRTC_SPL_DIV(tmp32, tmp16); + targetGainIdx = tmp32 / tmp16; assert(targetGainIdx < GAIN_TBL_LEN); /* Increment through the table towards the target gain. @@ -1078,8 +1078,7 @@ int32_t WebRtcAgc_ProcessAnalog(void *state, int32_t inMicLevel, tmp32 = WEBRTC_SPL_LSHIFT_W32(inMicLevelTmp - stt->minLevel, 14); if (stt->maxInit != stt->minLevel) { - volNormFIX = (int16_t)WEBRTC_SPL_DIV(tmp32, - (stt->maxInit - stt->minLevel)); + volNormFIX = tmp32 / (stt->maxInit - stt->minLevel); } /* Find correct curve */ @@ -1138,8 +1137,7 @@ int32_t WebRtcAgc_ProcessAnalog(void *state, int32_t inMicLevel, tmp32 = WEBRTC_SPL_LSHIFT_W32(inMicLevelTmp - stt->minLevel, 14); if (stt->maxInit != stt->minLevel) { - volNormFIX = (int16_t)WEBRTC_SPL_DIV(tmp32, - (stt->maxInit - stt->minLevel)); + volNormFIX = tmp32 / (stt->maxInit - stt->minLevel); } /* Find correct curve */ diff --git a/webrtc/modules/audio_processing/agc/digital_agc.c b/webrtc/modules/audio_processing/agc/digital_agc.c index 7b515a57d..b15b6e39d 100644 --- a/webrtc/modules/audio_processing/agc/digital_agc.c +++ b/webrtc/modules/audio_processing/agc/digital_agc.c @@ -210,7 +210,7 @@ int32_t WebRtcAgc_CalculateGainTable(int32_t *gainTable, // Q16 { numFIX += WEBRTC_SPL_RSHIFT_W32(tmp32no1, 1); } - y32 = WEBRTC_SPL_DIV(numFIX, tmp32no1); // in Q14 + y32 = numFIX / tmp32no1; // in Q14 if (limiterEnable && (i < limiterIdx)) { tmp32 = WEBRTC_SPL_MUL_16_U16(i - 1, kLog10_2); // Q14 diff --git a/webrtc/modules/audio_processing/ns/nsx_core.c b/webrtc/modules/audio_processing/ns/nsx_core.c index 41244d4c9..5a88c12fb 100644 --- a/webrtc/modules/audio_processing/ns/nsx_core.c +++ b/webrtc/modules/audio_processing/ns/nsx_core.c @@ -1493,8 +1493,7 @@ void WebRtcNsx_DataSynthesis(NsxInst_t* inst, short* outFrame) { } assert(inst->energyIn > 0); - energyRatio = (int16_t)WEBRTC_SPL_DIV(energyOut - + WEBRTC_SPL_RSHIFT_W32(inst->energyIn, 1), inst->energyIn); // Q8 + energyRatio = (energyOut + inst->energyIn / 2) / inst->energyIn; // Q8 // Limit the ratio to [0, 1] in Q8, i.e., [0, 256] energyRatio = WEBRTC_SPL_SAT(256, energyRatio, 0); diff --git a/webrtc/modules/audio_processing/ns/nsx_core_c.c b/webrtc/modules/audio_processing/ns/nsx_core_c.c index de92441ac..b50d4f858 100644 --- a/webrtc/modules/audio_processing/ns/nsx_core_c.c +++ b/webrtc/modules/audio_processing/ns/nsx_core_c.c @@ -258,8 +258,8 @@ void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst, tmp32no1 = WEBRTC_SPL_LSHIFT_W32((int32_t)inst->priorNonSpeechProb, 8); // Q22 - nonSpeechProbFinal[i] = (uint16_t)WEBRTC_SPL_DIV(tmp32no1, - (int32_t)inst->priorNonSpeechProb + invLrtFX); // Q8 + nonSpeechProbFinal[i] = tmp32no1 / + (inst->priorNonSpeechProb + invLrtFX); // Q8 } } }