Prepare to roll Chromium to 149181.
- This roll brings in VS2010 by default. The buildbots need updating (issue710). - We'll roll to 149181 later (past current Canary) to fix a Mac gyp issue: https://chromiumcodereview.appspot.com/10824105 - Chromium is now using a later libvpx than us. We should investigate rolling our standalone build. - Fix set-but-unused-warning - Fix -Wunused-private-field warnings on Mac. TBR=kjellander@webrtc.org BUG=issue709,issue710 TEST=trybots Review URL: https://webrtc-codereview.appspot.com/709007 git-svn-id: http://webrtc.googlecode.com/svn/trunk@2544 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
@@ -81,8 +81,6 @@ _processEventB(NULL),
|
|||||||
_apiEventB(NULL),
|
_apiEventB(NULL),
|
||||||
_codecCntrA(0),
|
_codecCntrA(0),
|
||||||
_codecCntrB(0),
|
_codecCntrB(0),
|
||||||
_testCntrA(1),
|
|
||||||
_testCntrB(1),
|
|
||||||
_thereIsEncoderA(false),
|
_thereIsEncoderA(false),
|
||||||
_thereIsEncoderB(false),
|
_thereIsEncoderB(false),
|
||||||
_thereIsDecoderA(false),
|
_thereIsDecoderA(false),
|
||||||
@@ -118,7 +116,7 @@ _testNumB(1)
|
|||||||
_receiveVADActivityA[n] = 0;
|
_receiveVADActivityA[n] = 0;
|
||||||
_receiveVADActivityB[n] = 0;
|
_receiveVADActivityB[n] = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
_movingDot[40] = '\0';
|
_movingDot[40] = '\0';
|
||||||
|
|
||||||
for(int n = 0; n <40; n++)
|
for(int n = 0; n <40; n++)
|
||||||
@@ -172,7 +170,7 @@ APITest::~APITest()
|
|||||||
// return _outFile.Open(fileName, frequencyHz, "wb");
|
// return _outFile.Open(fileName, frequencyHz, "wb");
|
||||||
//}
|
//}
|
||||||
|
|
||||||
WebRtc_Word16
|
WebRtc_Word16
|
||||||
APITest::SetUp()
|
APITest::SetUp()
|
||||||
{
|
{
|
||||||
_acmA = AudioCodingModule::Create(1);
|
_acmA = AudioCodingModule::Create(1);
|
||||||
@@ -260,7 +258,7 @@ APITest::SetUp()
|
|||||||
|
|
||||||
char fileName[500];
|
char fileName[500];
|
||||||
WebRtc_UWord16 frequencyHz;
|
WebRtc_UWord16 frequencyHz;
|
||||||
|
|
||||||
printf("\n\nAPI Test\n");
|
printf("\n\nAPI Test\n");
|
||||||
printf("========\n");
|
printf("========\n");
|
||||||
printf("Hit enter to accept the default values indicated in []\n\n");
|
printf("Hit enter to accept the default values indicated in []\n\n");
|
||||||
@@ -301,7 +299,7 @@ APITest::SetUp()
|
|||||||
_channel_B2A = new Channel(1);
|
_channel_B2A = new Channel(1);
|
||||||
CHECK_ERROR_MT(_acmB->RegisterTransportCallback(_channel_B2A));
|
CHECK_ERROR_MT(_acmB->RegisterTransportCallback(_channel_B2A));
|
||||||
_channel_B2A->RegisterReceiverACM(_acmA);
|
_channel_B2A->RegisterReceiverACM(_acmA);
|
||||||
|
|
||||||
//--- EVENT TIMERS
|
//--- EVENT TIMERS
|
||||||
// A
|
// A
|
||||||
_pullEventA = EventWrapper::Create();
|
_pullEventA = EventWrapper::Create();
|
||||||
@@ -321,7 +319,7 @@ APITest::SetUp()
|
|||||||
_outFreqHzB = _outFileB.SamplingFrequency();
|
_outFreqHzB = _outFileB.SamplingFrequency();
|
||||||
|
|
||||||
|
|
||||||
//Trace::SetEncryptedTraceFile("ACMAPITestEncrypted.txt");
|
//Trace::SetEncryptedTraceFile("ACMAPITestEncrypted.txt");
|
||||||
|
|
||||||
char print[11];
|
char print[11];
|
||||||
|
|
||||||
@@ -357,29 +355,29 @@ APITest::SetUp()
|
|||||||
#endif
|
#endif
|
||||||
_vadCallbackA = new VADCallback;
|
_vadCallbackA = new VADCallback;
|
||||||
_vadCallbackB = new VADCallback;
|
_vadCallbackB = new VADCallback;
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
APITest::PushAudioThreadA(void* obj)
|
APITest::PushAudioThreadA(void* obj)
|
||||||
{
|
{
|
||||||
return static_cast<APITest*>(obj)->PushAudioRunA();
|
return static_cast<APITest*>(obj)->PushAudioRunA();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
APITest::PushAudioThreadB(void* obj)
|
APITest::PushAudioThreadB(void* obj)
|
||||||
{
|
{
|
||||||
return static_cast<APITest*>(obj)->PushAudioRunB();
|
return static_cast<APITest*>(obj)->PushAudioRunB();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
APITest::PullAudioThreadA(void* obj)
|
APITest::PullAudioThreadA(void* obj)
|
||||||
{
|
{
|
||||||
return static_cast<APITest*>(obj)->PullAudioRunA();
|
return static_cast<APITest*>(obj)->PullAudioRunA();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
APITest::PullAudioThreadB(void* obj)
|
APITest::PullAudioThreadB(void* obj)
|
||||||
{
|
{
|
||||||
return static_cast<APITest*>(obj)->PullAudioRunB();
|
return static_cast<APITest*>(obj)->PullAudioRunB();
|
||||||
@@ -409,7 +407,7 @@ APITest::APIThreadB(void* obj)
|
|||||||
return static_cast<APITest*>(obj)->APIRunB();
|
return static_cast<APITest*>(obj)->APIRunB();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
APITest::PullAudioRunA()
|
APITest::PullAudioRunA()
|
||||||
{
|
{
|
||||||
_pullEventA->Wait(100);
|
_pullEventA->Wait(100);
|
||||||
@@ -437,7 +435,7 @@ APITest::PullAudioRunA()
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
APITest::PullAudioRunB()
|
APITest::PullAudioRunB()
|
||||||
{
|
{
|
||||||
_pullEventB->Wait(100);
|
_pullEventB->Wait(100);
|
||||||
@@ -462,11 +460,11 @@ APITest::PullAudioRunB()
|
|||||||
_outFileB.Write10MsData(audioFrame);
|
_outFileB.Write10MsData(audioFrame);
|
||||||
}
|
}
|
||||||
_receiveVADActivityB[(int)audioFrame.vad_activity_]++;
|
_receiveVADActivityB[(int)audioFrame.vad_activity_]++;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
APITest::PushAudioRunA()
|
APITest::PushAudioRunA()
|
||||||
{
|
{
|
||||||
_pushEventA->Wait(100);
|
_pushEventA->Wait(100);
|
||||||
@@ -487,7 +485,7 @@ APITest::PushAudioRunA()
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
APITest::PushAudioRunB()
|
APITest::PushAudioRunB()
|
||||||
{
|
{
|
||||||
_pushEventB->Wait(100);
|
_pushEventB->Wait(100);
|
||||||
@@ -567,7 +565,7 @@ APITest::RunTest(char thread)
|
|||||||
{
|
{
|
||||||
_testNumA = (_testNumB + 1 + (rand() % 6)) % 7;
|
_testNumA = (_testNumB + 1 + (rand() % 6)) % 7;
|
||||||
testNum = _testNumA;
|
testNum = _testNumA;
|
||||||
|
|
||||||
_movingDot[_dotPositionA] = ' ';
|
_movingDot[_dotPositionA] = ' ';
|
||||||
if(_dotPositionA == 0)
|
if(_dotPositionA == 0)
|
||||||
{
|
{
|
||||||
@@ -577,7 +575,7 @@ APITest::RunTest(char thread)
|
|||||||
{
|
{
|
||||||
_dotMoveDirectionA = -1;
|
_dotMoveDirectionA = -1;
|
||||||
}
|
}
|
||||||
_dotPositionA += _dotMoveDirectionA;
|
_dotPositionA += _dotMoveDirectionA;
|
||||||
_movingDot[_dotPositionA] = (_dotMoveDirectionA > 0)? '>':'<';
|
_movingDot[_dotPositionA] = (_dotMoveDirectionA > 0)? '>':'<';
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@@ -594,7 +592,7 @@ APITest::RunTest(char thread)
|
|||||||
{
|
{
|
||||||
_dotMoveDirectionB = -1;
|
_dotMoveDirectionB = -1;
|
||||||
}
|
}
|
||||||
_dotPositionB += _dotMoveDirectionB;
|
_dotPositionB += _dotMoveDirectionB;
|
||||||
_movingDot[_dotPositionB] = (_dotMoveDirectionB > 0)? '>':'<';
|
_movingDot[_dotPositionB] = (_dotMoveDirectionB > 0)? '>':'<';
|
||||||
}
|
}
|
||||||
//fprintf(stderr, "%c: %d \n", thread, testNum);
|
//fprintf(stderr, "%c: %d \n", thread, testNum);
|
||||||
@@ -617,7 +615,7 @@ APITest::RunTest(char thread)
|
|||||||
TestDelay('A');
|
TestDelay('A');
|
||||||
break;
|
break;
|
||||||
case 3:
|
case 3:
|
||||||
TestSendVAD('A');
|
TestSendVAD('A');
|
||||||
break;
|
break;
|
||||||
case 4:
|
case 4:
|
||||||
TestRegisteration('A');
|
TestRegisteration('A');
|
||||||
@@ -641,7 +639,7 @@ APITest::RunTest(char thread)
|
|||||||
|
|
||||||
bool
|
bool
|
||||||
APITest::APIRunA()
|
APITest::APIRunA()
|
||||||
{
|
{
|
||||||
_apiEventA->Wait(50);
|
_apiEventA->Wait(50);
|
||||||
|
|
||||||
bool randomTest;
|
bool randomTest;
|
||||||
@@ -664,7 +662,7 @@ APITest::APIRunA()
|
|||||||
TestDelay('A');
|
TestDelay('A');
|
||||||
}
|
}
|
||||||
// VAD TEST
|
// VAD TEST
|
||||||
TestSendVAD('A');
|
TestSendVAD('A');
|
||||||
TestRegisteration('A');
|
TestRegisteration('A');
|
||||||
TestReceiverVAD('A');
|
TestReceiverVAD('A');
|
||||||
#ifdef WEBRTC_DTMF_DETECTION
|
#ifdef WEBRTC_DTMF_DETECTION
|
||||||
@@ -676,7 +674,7 @@ APITest::APIRunA()
|
|||||||
|
|
||||||
bool
|
bool
|
||||||
APITest::APIRunB()
|
APITest::APIRunB()
|
||||||
{
|
{
|
||||||
_apiEventB->Wait(50);
|
_apiEventB->Wait(50);
|
||||||
bool randomTest;
|
bool randomTest;
|
||||||
{
|
{
|
||||||
@@ -688,7 +686,7 @@ APITest::APIRunB()
|
|||||||
{
|
{
|
||||||
RunTest('B');
|
RunTest('B');
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -700,46 +698,46 @@ APITest::Perform()
|
|||||||
//--- THREADS
|
//--- THREADS
|
||||||
// A
|
// A
|
||||||
// PUSH
|
// PUSH
|
||||||
ThreadWrapper* myPushAudioThreadA = ThreadWrapper::CreateThread(PushAudioThreadA,
|
ThreadWrapper* myPushAudioThreadA = ThreadWrapper::CreateThread(PushAudioThreadA,
|
||||||
this, kNormalPriority, "PushAudioThreadA");
|
this, kNormalPriority, "PushAudioThreadA");
|
||||||
CHECK_THREAD_NULLITY(myPushAudioThreadA, "Unable to start A::PUSH thread");
|
CHECK_THREAD_NULLITY(myPushAudioThreadA, "Unable to start A::PUSH thread");
|
||||||
// PULL
|
// PULL
|
||||||
ThreadWrapper* myPullAudioThreadA = ThreadWrapper::CreateThread(PullAudioThreadA,
|
ThreadWrapper* myPullAudioThreadA = ThreadWrapper::CreateThread(PullAudioThreadA,
|
||||||
this, kNormalPriority, "PullAudioThreadA");
|
this, kNormalPriority, "PullAudioThreadA");
|
||||||
CHECK_THREAD_NULLITY(myPullAudioThreadA, "Unable to start A::PULL thread");
|
CHECK_THREAD_NULLITY(myPullAudioThreadA, "Unable to start A::PULL thread");
|
||||||
// Process
|
// Process
|
||||||
ThreadWrapper* myProcessThreadA = ThreadWrapper::CreateThread(ProcessThreadA,
|
ThreadWrapper* myProcessThreadA = ThreadWrapper::CreateThread(ProcessThreadA,
|
||||||
this, kNormalPriority, "ProcessThreadA");
|
this, kNormalPriority, "ProcessThreadA");
|
||||||
CHECK_THREAD_NULLITY(myProcessThreadA, "Unable to start A::Process thread");
|
CHECK_THREAD_NULLITY(myProcessThreadA, "Unable to start A::Process thread");
|
||||||
// API
|
// API
|
||||||
ThreadWrapper* myAPIThreadA = ThreadWrapper::CreateThread(APIThreadA,
|
ThreadWrapper* myAPIThreadA = ThreadWrapper::CreateThread(APIThreadA,
|
||||||
this, kNormalPriority, "APIThreadA");
|
this, kNormalPriority, "APIThreadA");
|
||||||
CHECK_THREAD_NULLITY(myAPIThreadA, "Unable to start A::API thread");
|
CHECK_THREAD_NULLITY(myAPIThreadA, "Unable to start A::API thread");
|
||||||
// B
|
// B
|
||||||
// PUSH
|
// PUSH
|
||||||
ThreadWrapper* myPushAudioThreadB = ThreadWrapper::CreateThread(PushAudioThreadB,
|
ThreadWrapper* myPushAudioThreadB = ThreadWrapper::CreateThread(PushAudioThreadB,
|
||||||
this, kNormalPriority, "PushAudioThreadB");
|
this, kNormalPriority, "PushAudioThreadB");
|
||||||
CHECK_THREAD_NULLITY(myPushAudioThreadB, "Unable to start B::PUSH thread");
|
CHECK_THREAD_NULLITY(myPushAudioThreadB, "Unable to start B::PUSH thread");
|
||||||
// PULL
|
// PULL
|
||||||
ThreadWrapper* myPullAudioThreadB = ThreadWrapper::CreateThread(PullAudioThreadB,
|
ThreadWrapper* myPullAudioThreadB = ThreadWrapper::CreateThread(PullAudioThreadB,
|
||||||
this, kNormalPriority, "PullAudioThreadB");
|
this, kNormalPriority, "PullAudioThreadB");
|
||||||
CHECK_THREAD_NULLITY(myPullAudioThreadB, "Unable to start B::PULL thread");
|
CHECK_THREAD_NULLITY(myPullAudioThreadB, "Unable to start B::PULL thread");
|
||||||
// Process
|
// Process
|
||||||
ThreadWrapper* myProcessThreadB = ThreadWrapper::CreateThread(ProcessThreadB,
|
ThreadWrapper* myProcessThreadB = ThreadWrapper::CreateThread(ProcessThreadB,
|
||||||
this, kNormalPriority, "ProcessThreadB");
|
this, kNormalPriority, "ProcessThreadB");
|
||||||
CHECK_THREAD_NULLITY(myProcessThreadB, "Unable to start B::Process thread");
|
CHECK_THREAD_NULLITY(myProcessThreadB, "Unable to start B::Process thread");
|
||||||
// API
|
// API
|
||||||
ThreadWrapper* myAPIThreadB = ThreadWrapper::CreateThread(APIThreadB,
|
ThreadWrapper* myAPIThreadB = ThreadWrapper::CreateThread(APIThreadB,
|
||||||
this, kNormalPriority, "APIThreadB");
|
this, kNormalPriority, "APIThreadB");
|
||||||
CHECK_THREAD_NULLITY(myAPIThreadB, "Unable to start B::API thread");
|
CHECK_THREAD_NULLITY(myAPIThreadB, "Unable to start B::API thread");
|
||||||
|
|
||||||
|
|
||||||
//_apiEventA->StartTimer(true, 5000);
|
//_apiEventA->StartTimer(true, 5000);
|
||||||
//_apiEventB->StartTimer(true, 5000);
|
//_apiEventB->StartTimer(true, 5000);
|
||||||
|
|
||||||
_processEventA->StartTimer(true, 10);
|
_processEventA->StartTimer(true, 10);
|
||||||
_processEventB->StartTimer(true, 10);
|
_processEventB->StartTimer(true, 10);
|
||||||
|
|
||||||
_pullEventA->StartTimer(true, 10);
|
_pullEventA->StartTimer(true, 10);
|
||||||
_pullEventB->StartTimer(true, 10);
|
_pullEventB->StartTimer(true, 10);
|
||||||
|
|
||||||
@@ -764,7 +762,7 @@ APITest::Perform()
|
|||||||
|
|
||||||
//completeEvent->Wait(0xFFFFFFFF);//(unsigned long)((unsigned long)TEST_DURATION_SEC * (unsigned long)1000));
|
//completeEvent->Wait(0xFFFFFFFF);//(unsigned long)((unsigned long)TEST_DURATION_SEC * (unsigned long)1000));
|
||||||
delete completeEvent;
|
delete completeEvent;
|
||||||
|
|
||||||
myPushAudioThreadA->Stop();
|
myPushAudioThreadA->Stop();
|
||||||
myPullAudioThreadA->Stop();
|
myPullAudioThreadA->Stop();
|
||||||
myProcessThreadA->Stop();
|
myProcessThreadA->Stop();
|
||||||
@@ -802,12 +800,12 @@ APITest::CheckVADStatus(char side)
|
|||||||
_acmA->RegisterVADCallback(NULL);
|
_acmA->RegisterVADCallback(NULL);
|
||||||
_vadCallbackA->Reset();
|
_vadCallbackA->Reset();
|
||||||
_acmA->RegisterVADCallback(_vadCallbackA);
|
_acmA->RegisterVADCallback(_vadCallbackA);
|
||||||
|
|
||||||
if(!_randomTest)
|
if(!_randomTest)
|
||||||
{
|
{
|
||||||
if(_verbose)
|
if(_verbose)
|
||||||
{
|
{
|
||||||
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d",
|
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d",
|
||||||
dtxEnabled? "ON":"OFF",
|
dtxEnabled? "ON":"OFF",
|
||||||
vadEnabled? "ON":"OFF",
|
vadEnabled? "ON":"OFF",
|
||||||
(int)vadMode);
|
(int)vadMode);
|
||||||
@@ -818,7 +816,7 @@ APITest::CheckVADStatus(char side)
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
Wait(5000);
|
Wait(5000);
|
||||||
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n",
|
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n",
|
||||||
dtxEnabled? "ON":"OFF",
|
dtxEnabled? "ON":"OFF",
|
||||||
vadEnabled? "ON":"OFF",
|
vadEnabled? "ON":"OFF",
|
||||||
(int)vadMode,
|
(int)vadMode,
|
||||||
@@ -847,12 +845,12 @@ APITest::CheckVADStatus(char side)
|
|||||||
_acmB->RegisterVADCallback(NULL);
|
_acmB->RegisterVADCallback(NULL);
|
||||||
_vadCallbackB->Reset();
|
_vadCallbackB->Reset();
|
||||||
_acmB->RegisterVADCallback(_vadCallbackB);
|
_acmB->RegisterVADCallback(_vadCallbackB);
|
||||||
|
|
||||||
if(!_randomTest)
|
if(!_randomTest)
|
||||||
{
|
{
|
||||||
if(_verbose)
|
if(_verbose)
|
||||||
{
|
{
|
||||||
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d",
|
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d",
|
||||||
dtxEnabled? "ON":"OFF",
|
dtxEnabled? "ON":"OFF",
|
||||||
vadEnabled? "ON":"OFF",
|
vadEnabled? "ON":"OFF",
|
||||||
(int)vadMode);
|
(int)vadMode);
|
||||||
@@ -863,7 +861,7 @@ APITest::CheckVADStatus(char side)
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
Wait(5000);
|
Wait(5000);
|
||||||
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n",
|
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n",
|
||||||
dtxEnabled? "ON":"OFF",
|
dtxEnabled? "ON":"OFF",
|
||||||
vadEnabled? "ON":"OFF",
|
vadEnabled? "ON":"OFF",
|
||||||
(int)vadMode,
|
(int)vadMode,
|
||||||
@@ -898,7 +896,7 @@ APITest::TestDelay(char side)
|
|||||||
|
|
||||||
WebRtc_UWord32 inTimestamp = 0;
|
WebRtc_UWord32 inTimestamp = 0;
|
||||||
WebRtc_UWord32 outTimestamp = 0;
|
WebRtc_UWord32 outTimestamp = 0;
|
||||||
double estimDelay = 0;
|
double estimDelay = 0;
|
||||||
|
|
||||||
double averageEstimDelay = 0;
|
double averageEstimDelay = 0;
|
||||||
double averageDelay = 0;
|
double averageDelay = 0;
|
||||||
@@ -923,7 +921,7 @@ APITest::TestDelay(char side)
|
|||||||
CHECK_ERROR_MT(myACM->SetMinimumPlayoutDelay(*myMinDelay));
|
CHECK_ERROR_MT(myACM->SetMinimumPlayoutDelay(*myMinDelay));
|
||||||
|
|
||||||
|
|
||||||
inTimestamp = myChannel->LastInTimestamp();
|
inTimestamp = myChannel->LastInTimestamp();
|
||||||
CHECK_ERROR_MT(myACM->PlayoutTimestamp(outTimestamp));
|
CHECK_ERROR_MT(myACM->PlayoutTimestamp(outTimestamp));
|
||||||
|
|
||||||
if(!_randomTest)
|
if(!_randomTest)
|
||||||
@@ -935,11 +933,11 @@ APITest::TestDelay(char side)
|
|||||||
{
|
{
|
||||||
myEvent->Wait(1000);
|
myEvent->Wait(1000);
|
||||||
|
|
||||||
inTimestamp = myChannel->LastInTimestamp();
|
inTimestamp = myChannel->LastInTimestamp();
|
||||||
CHECK_ERROR_MT(myACM->PlayoutTimestamp(outTimestamp));
|
CHECK_ERROR_MT(myACM->PlayoutTimestamp(outTimestamp));
|
||||||
|
|
||||||
//std::cout << outTimestamp << std::endl << std::flush;
|
//std::cout << outTimestamp << std::endl << std::flush;
|
||||||
estimDelay = (double)((WebRtc_UWord32)(inTimestamp - outTimestamp)) /
|
estimDelay = (double)((WebRtc_UWord32)(inTimestamp - outTimestamp)) /
|
||||||
((double)myACM->ReceiveFrequency() / 1000.0);
|
((double)myACM->ReceiveFrequency() / 1000.0);
|
||||||
|
|
||||||
estimDelayCB.Update(estimDelay);
|
estimDelayCB.Update(estimDelay);
|
||||||
@@ -970,7 +968,7 @@ APITest::TestDelay(char side)
|
|||||||
}
|
}
|
||||||
|
|
||||||
*myMinDelay = (rand() % 1000) + 1;
|
*myMinDelay = (rand() % 1000) + 1;
|
||||||
|
|
||||||
ACMNetworkStatistics networkStat;
|
ACMNetworkStatistics networkStat;
|
||||||
CHECK_ERROR_MT(myACM->NetworkStatistics(networkStat));
|
CHECK_ERROR_MT(myACM->NetworkStatistics(networkStat));
|
||||||
|
|
||||||
@@ -978,12 +976,12 @@ APITest::TestDelay(char side)
|
|||||||
{
|
{
|
||||||
fprintf(stdout, "\n\nJitter Statistics at Side %c\n", side);
|
fprintf(stdout, "\n\nJitter Statistics at Side %c\n", side);
|
||||||
fprintf(stdout, "--------------------------------------\n");
|
fprintf(stdout, "--------------------------------------\n");
|
||||||
fprintf(stdout, "buffer-size............. %d\n", networkStat.currentBufferSize);
|
fprintf(stdout, "buffer-size............. %d\n", networkStat.currentBufferSize);
|
||||||
fprintf(stdout, "Preferred buffer-size... %d\n", networkStat.preferredBufferSize);
|
fprintf(stdout, "Preferred buffer-size... %d\n", networkStat.preferredBufferSize);
|
||||||
fprintf(stdout, "Peaky jitter mode........%d\n", networkStat.jitterPeaksFound);
|
fprintf(stdout, "Peaky jitter mode........%d\n", networkStat.jitterPeaksFound);
|
||||||
fprintf(stdout, "packet-size rate........ %d\n", networkStat.currentPacketLossRate);
|
fprintf(stdout, "packet-size rate........ %d\n", networkStat.currentPacketLossRate);
|
||||||
fprintf(stdout, "discard rate............ %d\n", networkStat.currentDiscardRate);
|
fprintf(stdout, "discard rate............ %d\n", networkStat.currentDiscardRate);
|
||||||
fprintf(stdout, "expand rate............. %d\n", networkStat.currentExpandRate);
|
fprintf(stdout, "expand rate............. %d\n", networkStat.currentExpandRate);
|
||||||
fprintf(stdout, "Preemptive rate......... %d\n", networkStat.currentPreemptiveRate);
|
fprintf(stdout, "Preemptive rate......... %d\n", networkStat.currentPreemptiveRate);
|
||||||
fprintf(stdout, "Accelerate rate......... %d\n", networkStat.currentAccelerateRate);
|
fprintf(stdout, "Accelerate rate......... %d\n", networkStat.currentAccelerateRate);
|
||||||
fprintf(stdout, "Clock-drift............. %d\n", networkStat.clockDriftPPM);
|
fprintf(stdout, "Clock-drift............. %d\n", networkStat.clockDriftPPM);
|
||||||
@@ -1020,7 +1018,7 @@ APITest::TestRegisteration(char sendSide)
|
|||||||
fprintf(stdout, " Unregister/register Receive Codec\n");
|
fprintf(stdout, " Unregister/register Receive Codec\n");
|
||||||
fprintf(stdout, "---------------------------------------------------------\n");
|
fprintf(stdout, "---------------------------------------------------------\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
switch(sendSide)
|
switch(sendSide)
|
||||||
{
|
{
|
||||||
case 'A':
|
case 'A':
|
||||||
@@ -1179,7 +1177,7 @@ APITest::TestPlayout(char receiveSide)
|
|||||||
|
|
||||||
CHECK_ERROR_MT(receiveFreqHz);
|
CHECK_ERROR_MT(receiveFreqHz);
|
||||||
CHECK_ERROR_MT(playoutFreqHz);
|
CHECK_ERROR_MT(playoutFreqHz);
|
||||||
|
|
||||||
char bgnString[25];
|
char bgnString[25];
|
||||||
switch(*bgnMode)
|
switch(*bgnMode)
|
||||||
{
|
{
|
||||||
@@ -1400,7 +1398,7 @@ APITest::TestSendVAD(char side)
|
|||||||
// Fault Test
|
// Fault Test
|
||||||
CHECK_PROTECTED_MT(myACM->SetVAD(false, true, (ACMVADMode)-1));
|
CHECK_PROTECTED_MT(myACM->SetVAD(false, true, (ACMVADMode)-1));
|
||||||
CHECK_PROTECTED_MT(myACM->SetVAD(false, true, (ACMVADMode)4));
|
CHECK_PROTECTED_MT(myACM->SetVAD(false, true, (ACMVADMode)4));
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -1477,14 +1475,14 @@ APITest::ChangeCodec(char side)
|
|||||||
myChannel = _channel_B2A;
|
myChannel = _channel_B2A;
|
||||||
}
|
}
|
||||||
|
|
||||||
myACM->ResetEncoder();
|
myACM->ResetEncoder();
|
||||||
Wait(100);
|
Wait(100);
|
||||||
|
|
||||||
// Register the next codec
|
// Register the next codec
|
||||||
do
|
do
|
||||||
{
|
{
|
||||||
*codecCntr = (*codecCntr < AudioCodingModule::NumberOfCodecs() - 1)?
|
*codecCntr = (*codecCntr < AudioCodingModule::NumberOfCodecs() - 1)?
|
||||||
(*codecCntr + 1):0;
|
(*codecCntr + 1):0;
|
||||||
|
|
||||||
if(*codecCntr == 0)
|
if(*codecCntr == 0)
|
||||||
{
|
{
|
||||||
@@ -1494,7 +1492,7 @@ APITest::ChangeCodec(char side)
|
|||||||
*thereIsEncoder = false;
|
*thereIsEncoder = false;
|
||||||
}
|
}
|
||||||
CHECK_ERROR_MT(myACM->InitializeSender());
|
CHECK_ERROR_MT(myACM->InitializeSender());
|
||||||
Wait(1000);
|
Wait(1000);
|
||||||
|
|
||||||
// After Initialization CN is lost, re-register them
|
// After Initialization CN is lost, re-register them
|
||||||
if(AudioCodingModule::Codec("CN", myCodec, 8000, 1) >= 0)
|
if(AudioCodingModule::Codec("CN", myCodec, 8000, 1) >= 0)
|
||||||
@@ -1541,8 +1539,8 @@ APITest::ChangeCodec(char side)
|
|||||||
Wait(500);
|
Wait(500);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void
|
void
|
||||||
APITest::LookForDTMF(char side)
|
APITest::LookForDTMF(char side)
|
||||||
{
|
{
|
||||||
if(!_randomTest)
|
if(!_randomTest)
|
||||||
@@ -1550,11 +1548,11 @@ APITest::LookForDTMF(char side)
|
|||||||
fprintf(stdout, "\n\nLooking for DTMF Signal in Side %c\n", side);
|
fprintf(stdout, "\n\nLooking for DTMF Signal in Side %c\n", side);
|
||||||
fprintf(stdout, "----------------------------------------\n");
|
fprintf(stdout, "----------------------------------------\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
if(side == 'A')
|
if(side == 'A')
|
||||||
{
|
{
|
||||||
_acmB->RegisterIncomingMessagesCallback(NULL);
|
_acmB->RegisterIncomingMessagesCallback(NULL);
|
||||||
_acmA->RegisterIncomingMessagesCallback(_dtmfCallback);
|
_acmA->RegisterIncomingMessagesCallback(_dtmfCallback);
|
||||||
Wait(1000);
|
Wait(1000);
|
||||||
_acmA->RegisterIncomingMessagesCallback(NULL);
|
_acmA->RegisterIncomingMessagesCallback(NULL);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||||
*
|
*
|
||||||
* Use of this source code is governed by a BSD-style license
|
* Use of this source code is governed by a BSD-style license
|
||||||
* that can be found in the LICENSE file in the root of the source
|
* that can be found in the LICENSE file in the root of the source
|
||||||
@@ -30,7 +30,7 @@ public:
|
|||||||
void Perform();
|
void Perform();
|
||||||
private:
|
private:
|
||||||
WebRtc_Word16 SetUp();
|
WebRtc_Word16 SetUp();
|
||||||
|
|
||||||
static bool PushAudioThreadA(void* obj);
|
static bool PushAudioThreadA(void* obj);
|
||||||
static bool PullAudioThreadA(void* obj);
|
static bool PullAudioThreadA(void* obj);
|
||||||
static bool ProcessThreadA(void* obj);
|
static bool ProcessThreadA(void* obj);
|
||||||
@@ -56,24 +56,24 @@ private:
|
|||||||
// set/get receiver VAD status & mode.
|
// set/get receiver VAD status & mode.
|
||||||
void TestReceiverVAD(char side);
|
void TestReceiverVAD(char side);
|
||||||
|
|
||||||
//
|
//
|
||||||
void TestSendVAD(char side);
|
void TestSendVAD(char side);
|
||||||
|
|
||||||
void CurrentCodec(char side);
|
void CurrentCodec(char side);
|
||||||
|
|
||||||
void ChangeCodec(char side);
|
void ChangeCodec(char side);
|
||||||
|
|
||||||
void Wait(WebRtc_UWord32 waitLengthMs);
|
void Wait(WebRtc_UWord32 waitLengthMs);
|
||||||
|
|
||||||
void LookForDTMF(char side);
|
void LookForDTMF(char side);
|
||||||
|
|
||||||
void RunTest(char thread);
|
void RunTest(char thread);
|
||||||
|
|
||||||
bool PushAudioRunA();
|
bool PushAudioRunA();
|
||||||
bool PullAudioRunA();
|
bool PullAudioRunA();
|
||||||
bool ProcessRunA();
|
bool ProcessRunA();
|
||||||
bool APIRunA();
|
bool APIRunA();
|
||||||
|
|
||||||
bool PullAudioRunB();
|
bool PullAudioRunB();
|
||||||
bool PushAudioRunB();
|
bool PushAudioRunB();
|
||||||
bool ProcessRunB();
|
bool ProcessRunB();
|
||||||
@@ -84,11 +84,11 @@ private:
|
|||||||
//--- ACMs
|
//--- ACMs
|
||||||
AudioCodingModule* _acmA;
|
AudioCodingModule* _acmA;
|
||||||
AudioCodingModule* _acmB;
|
AudioCodingModule* _acmB;
|
||||||
|
|
||||||
//--- Channels
|
//--- Channels
|
||||||
Channel* _channel_A2B;
|
Channel* _channel_A2B;
|
||||||
Channel* _channel_B2A;
|
Channel* _channel_B2A;
|
||||||
|
|
||||||
//--- I/O files
|
//--- I/O files
|
||||||
// A
|
// A
|
||||||
PCMFile _inFileA;
|
PCMFile _inFileA;
|
||||||
@@ -96,13 +96,13 @@ private:
|
|||||||
// B
|
// B
|
||||||
PCMFile _outFileB;
|
PCMFile _outFileB;
|
||||||
PCMFile _inFileB;
|
PCMFile _inFileB;
|
||||||
|
|
||||||
//--- I/O params
|
//--- I/O params
|
||||||
// A
|
// A
|
||||||
WebRtc_Word32 _outFreqHzA;
|
WebRtc_Word32 _outFreqHzA;
|
||||||
// B
|
// B
|
||||||
WebRtc_Word32 _outFreqHzB;
|
WebRtc_Word32 _outFreqHzB;
|
||||||
|
|
||||||
// Should we write to file.
|
// Should we write to file.
|
||||||
// we might skip writing to file if we
|
// we might skip writing to file if we
|
||||||
// run the test for a long time.
|
// run the test for a long time.
|
||||||
@@ -123,10 +123,6 @@ private:
|
|||||||
WebRtc_UWord8 _codecCntrA;
|
WebRtc_UWord8 _codecCntrA;
|
||||||
WebRtc_UWord8 _codecCntrB;
|
WebRtc_UWord8 _codecCntrB;
|
||||||
|
|
||||||
// keep track of tests
|
|
||||||
WebRtc_UWord8 _testCntrA;
|
|
||||||
WebRtc_UWord8 _testCntrB;
|
|
||||||
|
|
||||||
// Is set to true if there is no encoder in either side
|
// Is set to true if there is no encoder in either side
|
||||||
bool _thereIsEncoderA;
|
bool _thereIsEncoderA;
|
||||||
bool _thereIsEncoderB;
|
bool _thereIsEncoderB;
|
||||||
@@ -144,7 +140,7 @@ private:
|
|||||||
WebRtc_Word32 _minDelayA;
|
WebRtc_Word32 _minDelayA;
|
||||||
WebRtc_Word32 _minDelayB;
|
WebRtc_Word32 _minDelayB;
|
||||||
bool _payloadUsed[32];
|
bool _payloadUsed[32];
|
||||||
|
|
||||||
AudioPlayoutMode _playoutModeA;
|
AudioPlayoutMode _playoutModeA;
|
||||||
AudioPlayoutMode _playoutModeB;
|
AudioPlayoutMode _playoutModeB;
|
||||||
|
|
||||||
@@ -155,14 +151,14 @@ private:
|
|||||||
int _receiveVADActivityA[3];
|
int _receiveVADActivityA[3];
|
||||||
int _receiveVADActivityB[3];
|
int _receiveVADActivityB[3];
|
||||||
bool _verbose;
|
bool _verbose;
|
||||||
|
|
||||||
int _dotPositionA;
|
int _dotPositionA;
|
||||||
int _dotMoveDirectionA;
|
int _dotMoveDirectionA;
|
||||||
int _dotPositionB;
|
int _dotPositionB;
|
||||||
int _dotMoveDirectionB;
|
int _dotMoveDirectionB;
|
||||||
|
|
||||||
char _movingDot[41];
|
char _movingDot[41];
|
||||||
|
|
||||||
DTMFDetector* _dtmfCallback;
|
DTMFDetector* _dtmfCallback;
|
||||||
VADCallback* _vadCallbackA;
|
VADCallback* _vadCallbackA;
|
||||||
VADCallback* _vadCallbackB;
|
VADCallback* _vadCallbackB;
|
||||||
|
|||||||
@@ -19,12 +19,12 @@
|
|||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
WebRtc_Word32
|
WebRtc_Word32
|
||||||
Channel::SendData(
|
Channel::SendData(
|
||||||
const FrameType frameType,
|
const FrameType frameType,
|
||||||
const WebRtc_UWord8 payloadType,
|
const WebRtc_UWord8 payloadType,
|
||||||
const WebRtc_UWord32 timeStamp,
|
const WebRtc_UWord32 timeStamp,
|
||||||
const WebRtc_UWord8* payloadData,
|
const WebRtc_UWord8* payloadData,
|
||||||
const WebRtc_UWord16 payloadSize,
|
const WebRtc_UWord16 payloadSize,
|
||||||
const RTPFragmentationHeader* fragmentation)
|
const RTPFragmentationHeader* fragmentation)
|
||||||
{
|
{
|
||||||
@@ -104,7 +104,7 @@ Channel::SendData(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_channelCritSect->Enter();
|
_channelCritSect->Enter();
|
||||||
if(_saveBitStream)
|
if(_saveBitStream)
|
||||||
{
|
{
|
||||||
@@ -135,9 +135,9 @@ Channel::SendData(
|
|||||||
return status;
|
return status;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
Channel::CalcStatistics(
|
Channel::CalcStatistics(
|
||||||
WebRtcRTPHeader& rtpInfo,
|
WebRtcRTPHeader& rtpInfo,
|
||||||
WebRtc_UWord16 payloadSize)
|
WebRtc_UWord16 payloadSize)
|
||||||
{
|
{
|
||||||
int n;
|
int n;
|
||||||
@@ -146,7 +146,7 @@ Channel::CalcStatistics(
|
|||||||
{
|
{
|
||||||
// payload-type is changed.
|
// payload-type is changed.
|
||||||
// we have to terminate the calculations on the previous payload type
|
// we have to terminate the calculations on the previous payload type
|
||||||
// we ignore the last packet in that payload type just to make things
|
// we ignore the last packet in that payload type just to make things
|
||||||
// easier.
|
// easier.
|
||||||
for(n = 0; n < MAX_NUM_PAYLOADS; n++)
|
for(n = 0; n < MAX_NUM_PAYLOADS; n++)
|
||||||
{
|
{
|
||||||
@@ -180,12 +180,12 @@ Channel::CalcStatistics(
|
|||||||
assert(lastFrameSizeSample > 0);
|
assert(lastFrameSizeSample > 0);
|
||||||
int k = 0;
|
int k = 0;
|
||||||
while((currentPayloadStr->frameSizeStats[k].frameSizeSample !=
|
while((currentPayloadStr->frameSizeStats[k].frameSizeSample !=
|
||||||
lastFrameSizeSample) &&
|
lastFrameSizeSample) &&
|
||||||
(currentPayloadStr->frameSizeStats[k].frameSizeSample != 0))
|
(currentPayloadStr->frameSizeStats[k].frameSizeSample != 0))
|
||||||
{
|
{
|
||||||
k++;
|
k++;
|
||||||
}
|
}
|
||||||
ACMTestFrameSizeStats* currentFrameSizeStats =
|
ACMTestFrameSizeStats* currentFrameSizeStats =
|
||||||
&(currentPayloadStr->frameSizeStats[k]);
|
&(currentPayloadStr->frameSizeStats[k]);
|
||||||
currentFrameSizeStats->frameSizeSample = (WebRtc_Word16)lastFrameSizeSample;
|
currentFrameSizeStats->frameSizeSample = (WebRtc_Word16)lastFrameSizeSample;
|
||||||
|
|
||||||
@@ -197,15 +197,15 @@ Channel::CalcStatistics(
|
|||||||
// increment the total number of bytes (this is based on
|
// increment the total number of bytes (this is based on
|
||||||
// the previous payload we don't know the frame-size of
|
// the previous payload we don't know the frame-size of
|
||||||
// the current payload.
|
// the current payload.
|
||||||
currentFrameSizeStats->totalPayloadLenByte +=
|
currentFrameSizeStats->totalPayloadLenByte +=
|
||||||
currentPayloadStr->lastPayloadLenByte;
|
currentPayloadStr->lastPayloadLenByte;
|
||||||
// store the maximum payload-size (this is based on
|
// store the maximum payload-size (this is based on
|
||||||
// the previous payload we don't know the frame-size of
|
// the previous payload we don't know the frame-size of
|
||||||
// the current payload.
|
// the current payload.
|
||||||
if(currentFrameSizeStats->maxPayloadLen <
|
if(currentFrameSizeStats->maxPayloadLen <
|
||||||
currentPayloadStr->lastPayloadLenByte)
|
currentPayloadStr->lastPayloadLenByte)
|
||||||
{
|
{
|
||||||
currentFrameSizeStats->maxPayloadLen =
|
currentFrameSizeStats->maxPayloadLen =
|
||||||
currentPayloadStr->lastPayloadLenByte;
|
currentPayloadStr->lastPayloadLenByte;
|
||||||
}
|
}
|
||||||
// store the current values for the next time
|
// store the current values for the next time
|
||||||
@@ -247,7 +247,6 @@ _leftChannel(true),
|
|||||||
_lastInTimestamp(0),
|
_lastInTimestamp(0),
|
||||||
_packetLoss(0),
|
_packetLoss(0),
|
||||||
_useFECTestWithPacketLoss(false),
|
_useFECTestWithPacketLoss(false),
|
||||||
_chID(chID),
|
|
||||||
_beginTime(TickTime::MillisecondTimestamp()),
|
_beginTime(TickTime::MillisecondTimestamp()),
|
||||||
_totalBytes(0)
|
_totalBytes(0)
|
||||||
{
|
{
|
||||||
@@ -270,7 +269,7 @@ _totalBytes(0)
|
|||||||
{
|
{
|
||||||
_saveBitStream = true;
|
_saveBitStream = true;
|
||||||
char bitStreamFileName[500];
|
char bitStreamFileName[500];
|
||||||
sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
|
sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
|
||||||
_bitStreamFile = fopen(bitStreamFileName, "wb");
|
_bitStreamFile = fopen(bitStreamFileName, "wb");
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@@ -284,14 +283,14 @@ Channel::~Channel()
|
|||||||
delete _channelCritSect;
|
delete _channelCritSect;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
Channel::RegisterReceiverACM(AudioCodingModule* acm)
|
Channel::RegisterReceiverACM(AudioCodingModule* acm)
|
||||||
{
|
{
|
||||||
_receiverACM = acm;
|
_receiverACM = acm;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
Channel::ResetStats()
|
Channel::ResetStats()
|
||||||
{
|
{
|
||||||
int n;
|
int n;
|
||||||
@@ -316,7 +315,7 @@ Channel::ResetStats()
|
|||||||
_channelCritSect->Leave();
|
_channelCritSect->Leave();
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word16
|
WebRtc_Word16
|
||||||
Channel::Stats(CodecInst& codecInst, ACMTestPayloadStats& payloadStats)
|
Channel::Stats(CodecInst& codecInst, ACMTestPayloadStats& payloadStats)
|
||||||
{
|
{
|
||||||
_channelCritSect->Enter();
|
_channelCritSect->Enter();
|
||||||
@@ -342,12 +341,12 @@ Channel::Stats(CodecInst& codecInst, ACMTestPayloadStats& payloadStats)
|
|||||||
_channelCritSect->Leave();
|
_channelCritSect->Leave();
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
payloadStats.frameSizeStats[n].usageLenSec =
|
payloadStats.frameSizeStats[n].usageLenSec =
|
||||||
(double)payloadStats.frameSizeStats[n].totalEncodedSamples
|
(double)payloadStats.frameSizeStats[n].totalEncodedSamples
|
||||||
/ (double)codecInst.plfreq;
|
/ (double)codecInst.plfreq;
|
||||||
|
|
||||||
payloadStats.frameSizeStats[n].rateBitPerSec =
|
payloadStats.frameSizeStats[n].rateBitPerSec =
|
||||||
payloadStats.frameSizeStats[n].totalPayloadLenByte * 8 /
|
payloadStats.frameSizeStats[n].totalPayloadLenByte * 8 /
|
||||||
payloadStats.frameSizeStats[n].usageLenSec;
|
payloadStats.frameSizeStats[n].usageLenSec;
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -355,7 +354,7 @@ Channel::Stats(CodecInst& codecInst, ACMTestPayloadStats& payloadStats)
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
Channel::Stats(WebRtc_UWord32* numPackets)
|
Channel::Stats(WebRtc_UWord32* numPackets)
|
||||||
{
|
{
|
||||||
_channelCritSect->Enter();
|
_channelCritSect->Enter();
|
||||||
@@ -375,18 +374,18 @@ Channel::Stats(WebRtc_UWord32* numPackets)
|
|||||||
{
|
{
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
numPackets[k] +=
|
numPackets[k] +=
|
||||||
_payloadStats[k].frameSizeStats[n].numPackets;
|
_payloadStats[k].frameSizeStats[n].numPackets;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_channelCritSect->Leave();
|
_channelCritSect->Leave();
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
Channel::Stats(WebRtc_UWord8* payloadType, WebRtc_UWord32* payloadLenByte)
|
Channel::Stats(WebRtc_UWord8* payloadType, WebRtc_UWord32* payloadLenByte)
|
||||||
{
|
{
|
||||||
_channelCritSect->Enter();
|
_channelCritSect->Enter();
|
||||||
|
|
||||||
int k;
|
int k;
|
||||||
int n;
|
int n;
|
||||||
memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(WebRtc_UWord32));
|
memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(WebRtc_UWord32));
|
||||||
@@ -418,7 +417,7 @@ Channel::PrintStats(CodecInst& codecInst)
|
|||||||
{
|
{
|
||||||
ACMTestPayloadStats payloadStats;
|
ACMTestPayloadStats payloadStats;
|
||||||
Stats(codecInst, payloadStats);
|
Stats(codecInst, payloadStats);
|
||||||
printf("%s %d kHz\n",
|
printf("%s %d kHz\n",
|
||||||
codecInst.plname,
|
codecInst.plname,
|
||||||
codecInst.plfreq / 1000);
|
codecInst.plfreq / 1000);
|
||||||
printf("=====================================================\n");
|
printf("=====================================================\n");
|
||||||
@@ -435,19 +434,19 @@ Channel::PrintStats(CodecInst& codecInst)
|
|||||||
{
|
{
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
printf("Frame-size.................... %d samples\n",
|
printf("Frame-size.................... %d samples\n",
|
||||||
payloadStats.frameSizeStats[k].frameSizeSample);
|
payloadStats.frameSizeStats[k].frameSizeSample);
|
||||||
printf("Average Rate.................. %.0f bits/sec\n",
|
printf("Average Rate.................. %.0f bits/sec\n",
|
||||||
payloadStats.frameSizeStats[k].rateBitPerSec);
|
payloadStats.frameSizeStats[k].rateBitPerSec);
|
||||||
printf("Maximum Payload-Size.......... %d Bytes\n",
|
printf("Maximum Payload-Size.......... %d Bytes\n",
|
||||||
payloadStats.frameSizeStats[k].maxPayloadLen);
|
payloadStats.frameSizeStats[k].maxPayloadLen);
|
||||||
printf("Maximum Instantaneous Rate.... %.0f bits/sec\n",
|
printf("Maximum Instantaneous Rate.... %.0f bits/sec\n",
|
||||||
((double)payloadStats.frameSizeStats[k].maxPayloadLen * 8.0 *
|
((double)payloadStats.frameSizeStats[k].maxPayloadLen * 8.0 *
|
||||||
(double)codecInst.plfreq) /
|
(double)codecInst.plfreq) /
|
||||||
(double)payloadStats.frameSizeStats[k].frameSizeSample);
|
(double)payloadStats.frameSizeStats[k].frameSizeSample);
|
||||||
printf("Number of Packets............. %u\n",
|
printf("Number of Packets............. %u\n",
|
||||||
(unsigned int)payloadStats.frameSizeStats[k].numPackets);
|
(unsigned int)payloadStats.frameSizeStats[k].numPackets);
|
||||||
printf("Duration...................... %0.3f sec\n\n",
|
printf("Duration...................... %0.3f sec\n\n",
|
||||||
payloadStats.frameSizeStats[k].usageLenSec);
|
payloadStats.frameSizeStats[k].usageLenSec);
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -473,6 +472,6 @@ Channel::BitRate()
|
|||||||
rate = ((double)_totalBytes * 8.0)/ (double)(currTime - _beginTime);
|
rate = ((double)_totalBytes * 8.0)/ (double)(currTime - _beginTime);
|
||||||
_channelCritSect->Leave();
|
_channelCritSect->Leave();
|
||||||
return rate;
|
return rate;
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||||
*
|
*
|
||||||
* Use of this source code is governed by a BSD-style license
|
* Use of this source code is governed by a BSD-style license
|
||||||
* that can be found in the LICENSE file in the root of the source
|
* that can be found in the LICENSE file in the root of the source
|
||||||
@@ -32,7 +32,7 @@ struct ACMTestFrameSizeStats
|
|||||||
WebRtc_UWord64 totalEncodedSamples;
|
WebRtc_UWord64 totalEncodedSamples;
|
||||||
double rateBitPerSec;
|
double rateBitPerSec;
|
||||||
double usageLenSec;
|
double usageLenSec;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
struct ACMTestPayloadStats
|
struct ACMTestPayloadStats
|
||||||
@@ -56,36 +56,36 @@ public:
|
|||||||
const FrameType frameType,
|
const FrameType frameType,
|
||||||
const WebRtc_UWord8 payloadType,
|
const WebRtc_UWord8 payloadType,
|
||||||
const WebRtc_UWord32 timeStamp,
|
const WebRtc_UWord32 timeStamp,
|
||||||
const WebRtc_UWord8* payloadData,
|
const WebRtc_UWord8* payloadData,
|
||||||
const WebRtc_UWord16 payloadSize,
|
const WebRtc_UWord16 payloadSize,
|
||||||
const RTPFragmentationHeader* fragmentation);
|
const RTPFragmentationHeader* fragmentation);
|
||||||
|
|
||||||
void RegisterReceiverACM(
|
void RegisterReceiverACM(
|
||||||
AudioCodingModule *acm);
|
AudioCodingModule *acm);
|
||||||
|
|
||||||
void ResetStats();
|
void ResetStats();
|
||||||
|
|
||||||
WebRtc_Word16 Stats(
|
WebRtc_Word16 Stats(
|
||||||
CodecInst& codecInst,
|
CodecInst& codecInst,
|
||||||
ACMTestPayloadStats& payloadStats);
|
ACMTestPayloadStats& payloadStats);
|
||||||
|
|
||||||
void Stats(
|
void Stats(
|
||||||
WebRtc_UWord32* numPackets);
|
WebRtc_UWord32* numPackets);
|
||||||
|
|
||||||
void Stats(
|
void Stats(
|
||||||
WebRtc_UWord8* payloadLenByte,
|
WebRtc_UWord8* payloadLenByte,
|
||||||
WebRtc_UWord32* payloadType);
|
WebRtc_UWord32* payloadType);
|
||||||
|
|
||||||
void PrintStats(
|
void PrintStats(
|
||||||
CodecInst& codecInst);
|
CodecInst& codecInst);
|
||||||
|
|
||||||
void SetIsStereo(bool isStereo)
|
void SetIsStereo(bool isStereo)
|
||||||
{
|
{
|
||||||
_isStereo = isStereo;
|
_isStereo = isStereo;
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_UWord32 LastInTimestamp();
|
WebRtc_UWord32 LastInTimestamp();
|
||||||
|
|
||||||
void SetFECTestWithPacketLoss(bool usePacketLoss)
|
void SetFECTestWithPacketLoss(bool usePacketLoss)
|
||||||
{
|
{
|
||||||
_useFECTestWithPacketLoss = usePacketLoss;
|
_useFECTestWithPacketLoss = usePacketLoss;
|
||||||
@@ -115,7 +115,6 @@ private:
|
|||||||
// FEC Test variables
|
// FEC Test variables
|
||||||
WebRtc_Word16 _packetLoss;
|
WebRtc_Word16 _packetLoss;
|
||||||
bool _useFECTestWithPacketLoss;
|
bool _useFECTestWithPacketLoss;
|
||||||
WebRtc_Word16 _chID;
|
|
||||||
WebRtc_UWord64 _beginTime;
|
WebRtc_UWord64 _beginTime;
|
||||||
WebRtc_UWord64 _totalBytes;
|
WebRtc_UWord64 _totalBytes;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -50,8 +50,6 @@ Sender::Sender()
|
|||||||
: _acm(NULL),
|
: _acm(NULL),
|
||||||
_pcmFile(),
|
_pcmFile(),
|
||||||
_audioFrame(),
|
_audioFrame(),
|
||||||
_payloadSize(0),
|
|
||||||
_timeStamp(0),
|
|
||||||
_packetization(NULL) {
|
_packetization(NULL) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -61,8 +61,6 @@ class Sender {
|
|||||||
AudioCodingModule* _acm;
|
AudioCodingModule* _acm;
|
||||||
PCMFile _pcmFile;
|
PCMFile _pcmFile;
|
||||||
AudioFrame _audioFrame;
|
AudioFrame _audioFrame;
|
||||||
WebRtc_UWord16 _payloadSize;
|
|
||||||
WebRtc_UWord32 _timeStamp;
|
|
||||||
TestPacketization* _packetization;
|
TestPacketization* _packetization;
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -81,7 +79,6 @@ class Receiver {
|
|||||||
|
|
||||||
private:
|
private:
|
||||||
AudioCodingModule* _acm;
|
AudioCodingModule* _acm;
|
||||||
bool _rtpEOF;
|
|
||||||
RTPStream* _rtpStream;
|
RTPStream* _rtpStream;
|
||||||
PCMFile _pcmFile;
|
PCMFile _pcmFile;
|
||||||
WebRtc_Word16* _playoutBuffer;
|
WebRtc_Word16* _playoutBuffer;
|
||||||
@@ -110,7 +107,7 @@ class EncodeDecodeTest: public ACMTest {
|
|||||||
protected:
|
protected:
|
||||||
Sender _sender;
|
Sender _sender;
|
||||||
Receiver _receiver;
|
Receiver _receiver;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
|
||||||
|
|||||||
@@ -48,7 +48,6 @@ class TestPackStereo : public AudioPacketizationCallback {
|
|||||||
private:
|
private:
|
||||||
AudioCodingModule* receiver_acm_;
|
AudioCodingModule* receiver_acm_;
|
||||||
WebRtc_Word16 seq_no_;
|
WebRtc_Word16 seq_no_;
|
||||||
WebRtc_UWord8 payload_data_[60 * 32 * 2 * 2];
|
|
||||||
WebRtc_UWord32 timestamp_diff_;
|
WebRtc_UWord32 timestamp_diff_;
|
||||||
WebRtc_UWord32 last_in_timestamp_;
|
WebRtc_UWord32 last_in_timestamp_;
|
||||||
WebRtc_UWord64 total_bytes_;
|
WebRtc_UWord64 total_bytes_;
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||||
*
|
*
|
||||||
* Use of this source code is governed by a BSD-style license
|
* Use of this source code is governed by a BSD-style license
|
||||||
* that can be found in the LICENSE file in the root of the source
|
* that can be found in the LICENSE file in the root of the source
|
||||||
@@ -53,9 +53,6 @@ private:
|
|||||||
PCMFile _outFileRefA;
|
PCMFile _outFileRefA;
|
||||||
PCMFile _outFileRefB;
|
PCMFile _outFileRefB;
|
||||||
|
|
||||||
DTMFDetector* _dtmfDetectorA;
|
|
||||||
DTMFDetector* _dtmfDetectorB;
|
|
||||||
|
|
||||||
int _testMode;
|
int _testMode;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -355,8 +355,6 @@ private:
|
|||||||
bool _doStopRec; // For rec if not shared device
|
bool _doStopRec; // For rec if not shared device
|
||||||
bool _macBookPro;
|
bool _macBookPro;
|
||||||
bool _macBookProPanRight;
|
bool _macBookProPanRight;
|
||||||
bool _stereoRender;
|
|
||||||
bool _stereoRenderRequested;
|
|
||||||
|
|
||||||
AudioConverterRef _captureConverter;
|
AudioConverterRef _captureConverter;
|
||||||
AudioConverterRef _renderConverter;
|
AudioConverterRef _renderConverter;
|
||||||
@@ -376,7 +374,6 @@ private:
|
|||||||
WebRtc_Word32 _renderDelayOffsetSamples;
|
WebRtc_Word32 _renderDelayOffsetSamples;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
WebRtc_UWord16 _playBufDelay; // playback delay
|
|
||||||
WebRtc_UWord16 _playBufDelayFixed; // fixed playback delay
|
WebRtc_UWord16 _playBufDelayFixed; // fixed playback delay
|
||||||
|
|
||||||
WebRtc_UWord16 _playWarning;
|
WebRtc_UWord16 _playWarning;
|
||||||
|
|||||||
@@ -18,8 +18,7 @@ namespace webrtc
|
|||||||
|
|
||||||
AudioDeviceUtilityMac::AudioDeviceUtilityMac(const WebRtc_Word32 id) :
|
AudioDeviceUtilityMac::AudioDeviceUtilityMac(const WebRtc_Word32 id) :
|
||||||
_critSect(*CriticalSectionWrapper::CreateCriticalSection()),
|
_critSect(*CriticalSectionWrapper::CreateCriticalSection()),
|
||||||
_id(id),
|
_id(id)
|
||||||
_lastError(AudioDeviceModule::kAdmErrNone)
|
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
|
WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
|
||||||
"%s created", __FUNCTION__);
|
"%s created", __FUNCTION__);
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||||
*
|
*
|
||||||
* Use of this source code is governed by a BSD-style license
|
* Use of this source code is governed by a BSD-style license
|
||||||
* that can be found in the LICENSE file in the root of the source
|
* that can be found in the LICENSE file in the root of the source
|
||||||
@@ -29,7 +29,6 @@ public:
|
|||||||
private:
|
private:
|
||||||
CriticalSectionWrapper& _critSect;
|
CriticalSectionWrapper& _critSect;
|
||||||
WebRtc_Word32 _id;
|
WebRtc_Word32 _id;
|
||||||
AudioDeviceModule::ErrorCode _lastError;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
|||||||
@@ -76,8 +76,7 @@ class AudioEventObserverAPI: public AudioDeviceObserver {
|
|||||||
class AudioTransportAPI: public AudioTransport {
|
class AudioTransportAPI: public AudioTransport {
|
||||||
public:
|
public:
|
||||||
AudioTransportAPI(AudioDeviceModule* audioDevice)
|
AudioTransportAPI(AudioDeviceModule* audioDevice)
|
||||||
: audio_device_(audioDevice),
|
: rec_count_(0),
|
||||||
rec_count_(0),
|
|
||||||
play_count_(0) {
|
play_count_(0) {
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -129,7 +128,6 @@ class AudioTransportAPI: public AudioTransport {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
AudioDeviceModule* audio_device_;
|
|
||||||
WebRtc_UWord32 rec_count_;
|
WebRtc_UWord32 rec_count_;
|
||||||
WebRtc_UWord32 play_count_;
|
WebRtc_UWord32 play_count_;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -68,8 +68,7 @@ const char* GetResource(const char* resource)
|
|||||||
namespace webrtc
|
namespace webrtc
|
||||||
{
|
{
|
||||||
|
|
||||||
AudioEventObserver::AudioEventObserver(AudioDeviceModule* audioDevice) :
|
AudioEventObserver::AudioEventObserver(AudioDeviceModule* audioDevice)
|
||||||
_audioDevice(audioDevice)
|
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -85,8 +85,6 @@ public:
|
|||||||
public:
|
public:
|
||||||
ErrorCode _error;
|
ErrorCode _error;
|
||||||
WarningCode _warning;
|
WarningCode _warning;
|
||||||
private:
|
|
||||||
AudioDeviceModule* _audioDevice;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// ----------------------------------------------------------------------------
|
// ----------------------------------------------------------------------------
|
||||||
|
|||||||
@@ -38,8 +38,7 @@ class FrameQueue
|
|||||||
public:
|
public:
|
||||||
FrameQueue()
|
FrameQueue()
|
||||||
:
|
:
|
||||||
_queueRWLock(*webrtc::RWLockWrapper::CreateRWLock()),
|
_queueRWLock(*webrtc::RWLockWrapper::CreateRWLock())
|
||||||
_prevTS(-1)
|
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -56,7 +55,6 @@ public:
|
|||||||
private:
|
private:
|
||||||
webrtc::RWLockWrapper& _queueRWLock;
|
webrtc::RWLockWrapper& _queueRWLock;
|
||||||
std::queue<FrameQueueTuple *> _frameBufferQueue;
|
std::queue<FrameQueueTuple *> _frameBufferQueue;
|
||||||
WebRtc_Word64 _prevTS;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// feedback signal to encoder
|
// feedback signal to encoder
|
||||||
|
|||||||
@@ -77,8 +77,6 @@ public:
|
|||||||
WebRtc_UWord32 decoderSpecificSize = 0,
|
WebRtc_UWord32 decoderSpecificSize = 0,
|
||||||
void* decoderSpecificInfo = NULL) :
|
void* decoderSpecificInfo = NULL) :
|
||||||
_encodedVideoBuffer(buffer),
|
_encodedVideoBuffer(buffer),
|
||||||
_decoderSpecificInfo(decoderSpecificInfo),
|
|
||||||
_decoderSpecificSize(decoderSpecificSize),
|
|
||||||
_encodeComplete(false) {}
|
_encodeComplete(false) {}
|
||||||
WebRtc_Word32 Encoded(webrtc::EncodedImage& encodedImage,
|
WebRtc_Word32 Encoded(webrtc::EncodedImage& encodedImage,
|
||||||
const webrtc::CodecSpecificInfo* codecSpecificInfo,
|
const webrtc::CodecSpecificInfo* codecSpecificInfo,
|
||||||
@@ -89,8 +87,6 @@ public:
|
|||||||
webrtc::VideoFrameType EncodedFrameType() const;
|
webrtc::VideoFrameType EncodedFrameType() const;
|
||||||
private:
|
private:
|
||||||
TestVideoEncodedBuffer* _encodedVideoBuffer;
|
TestVideoEncodedBuffer* _encodedVideoBuffer;
|
||||||
void* _decoderSpecificInfo;
|
|
||||||
WebRtc_UWord32 _decoderSpecificSize;
|
|
||||||
bool _encodeComplete;
|
bool _encodeComplete;
|
||||||
webrtc::VideoFrameType _encodedFrameType;
|
webrtc::VideoFrameType _encodedFrameType;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -52,7 +52,6 @@ private:
|
|||||||
WebRtc_UWord32 _skipCnt;
|
WebRtc_UWord32 _skipCnt;
|
||||||
webrtc::VideoCodingModule* _VCMReceiver;
|
webrtc::VideoCodingModule* _VCMReceiver;
|
||||||
webrtc::FrameType _frameType;
|
webrtc::FrameType _frameType;
|
||||||
WebRtc_UWord8* _payloadData; // max payload size??
|
|
||||||
WebRtc_UWord16 _seqNo;
|
WebRtc_UWord16 _seqNo;
|
||||||
NormalTest& _test;
|
NormalTest& _test;
|
||||||
}; // end of VCMEncodeCompleteCallback
|
}; // end of VCMEncodeCompleteCallback
|
||||||
|
|||||||
@@ -78,7 +78,6 @@ private:
|
|||||||
float _encodedBytes;
|
float _encodedBytes;
|
||||||
VideoCodingModule* _VCMReceiver;
|
VideoCodingModule* _VCMReceiver;
|
||||||
FrameType _frameType;
|
FrameType _frameType;
|
||||||
WebRtc_UWord8* _payloadData;
|
|
||||||
WebRtc_UWord16 _seqNo;
|
WebRtc_UWord16 _seqNo;
|
||||||
bool _encodeComplete;
|
bool _encodeComplete;
|
||||||
WebRtc_Word32 _width;
|
WebRtc_Word32 _width;
|
||||||
@@ -94,7 +93,6 @@ class VCMRTPEncodeCompleteCallback: public VCMPacketizationCallback
|
|||||||
public:
|
public:
|
||||||
VCMRTPEncodeCompleteCallback(RtpRtcp* rtp) :
|
VCMRTPEncodeCompleteCallback(RtpRtcp* rtp) :
|
||||||
_encodedBytes(0),
|
_encodedBytes(0),
|
||||||
_seqNo(0),
|
|
||||||
_encodeComplete(false),
|
_encodeComplete(false),
|
||||||
_RTPModule(rtp) {}
|
_RTPModule(rtp) {}
|
||||||
|
|
||||||
@@ -128,8 +126,6 @@ public:
|
|||||||
private:
|
private:
|
||||||
float _encodedBytes;
|
float _encodedBytes;
|
||||||
FrameType _frameType;
|
FrameType _frameType;
|
||||||
WebRtc_UWord8* _payloadData;
|
|
||||||
WebRtc_UWord16 _seqNo;
|
|
||||||
bool _encodeComplete;
|
bool _encodeComplete;
|
||||||
RtpRtcp* _RTPModule;
|
RtpRtcp* _RTPModule;
|
||||||
WebRtc_Word16 _width;
|
WebRtc_Word16 _width;
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||||
*
|
*
|
||||||
* Use of this source code is governed by a BSD-style license
|
* Use of this source code is governed by a BSD-style license
|
||||||
* that can be found in the LICENSE file in the root of the source
|
* that can be found in the LICENSE file in the root of the source
|
||||||
@@ -51,7 +51,7 @@ public:
|
|||||||
|
|
||||||
virtual int UpdateSize(int width, int height);
|
virtual int UpdateSize(int width, int height);
|
||||||
|
|
||||||
// Setup
|
// Setup
|
||||||
int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
|
int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
|
||||||
int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
|
int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
|
||||||
|
|
||||||
@@ -87,8 +87,6 @@ private:
|
|||||||
int _stretchedHeight;
|
int _stretchedHeight;
|
||||||
int _oldStretchedHeight;
|
int _oldStretchedHeight;
|
||||||
int _oldStretchedWidth;
|
int _oldStretchedWidth;
|
||||||
int _xOldWidth;
|
|
||||||
int _yOldHeight;
|
|
||||||
unsigned char* _buffer;
|
unsigned char* _buffer;
|
||||||
int _bufferSize;
|
int _bufferSize;
|
||||||
int _incommingBufferSize;
|
int _incommingBufferSize;
|
||||||
|
|||||||
@@ -34,8 +34,6 @@ _stretchedWidth( 0),
|
|||||||
_stretchedHeight( 0),
|
_stretchedHeight( 0),
|
||||||
_oldStretchedHeight( 0),
|
_oldStretchedHeight( 0),
|
||||||
_oldStretchedWidth( 0),
|
_oldStretchedWidth( 0),
|
||||||
_xOldWidth( 0),
|
|
||||||
_yOldHeight( 0),
|
|
||||||
_buffer( 0),
|
_buffer( 0),
|
||||||
_bufferSize( 0),
|
_bufferSize( 0),
|
||||||
_incommingBufferSize( 0),
|
_incommingBufferSize( 0),
|
||||||
@@ -426,7 +424,7 @@ int VideoRenderNSOpenGL::ChangeWindow(CocoaRenderView* newWindowRef)
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Check if the thread and event already exist.
|
/* Check if the thread and event already exist.
|
||||||
* If so then they will simply be restarted
|
* If so then they will simply be restarted
|
||||||
* If not then create them and continue
|
* If not then create them and continue
|
||||||
*/
|
*/
|
||||||
@@ -619,7 +617,7 @@ int VideoRenderNSOpenGL::setRenderTargetFullScreen()
|
|||||||
[_windowRef setFrame:screenRect];
|
[_windowRef setFrame:screenRect];
|
||||||
[_windowRef setBounds:screenRect];
|
[_windowRef setBounds:screenRect];
|
||||||
|
|
||||||
|
|
||||||
_fullScreenWindow = [[CocoaFullScreenWindow alloc]init];
|
_fullScreenWindow = [[CocoaFullScreenWindow alloc]init];
|
||||||
[_fullScreenWindow grabFullScreen];
|
[_fullScreenWindow grabFullScreen];
|
||||||
[[[_fullScreenWindow window] contentView] addSubview:_windowRef];
|
[[[_fullScreenWindow window] contentView] addSubview:_windowRef];
|
||||||
@@ -655,18 +653,18 @@ VideoRenderNSOpenGL::~VideoRenderNSOpenGL()
|
|||||||
{
|
{
|
||||||
if(_fullScreenWindow)
|
if(_fullScreenWindow)
|
||||||
{
|
{
|
||||||
// Detach CocoaRenderView from full screen view back to
|
// Detach CocoaRenderView from full screen view back to
|
||||||
// it's original parent.
|
// it's original parent.
|
||||||
[_windowRef removeFromSuperview];
|
[_windowRef removeFromSuperview];
|
||||||
if(_windowRefSuperView)
|
if(_windowRefSuperView)
|
||||||
{
|
{
|
||||||
[_windowRefSuperView addSubview:_windowRef];
|
[_windowRefSuperView addSubview:_windowRef];
|
||||||
[_windowRef setFrame:_windowRefSuperViewFrame];
|
[_windowRef setFrame:_windowRefSuperViewFrame];
|
||||||
}
|
}
|
||||||
|
|
||||||
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, 0, "%s:%d Attempting to release fullscreen window", __FUNCTION__, __LINE__);
|
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, 0, "%s:%d Attempting to release fullscreen window", __FUNCTION__, __LINE__);
|
||||||
[_fullScreenWindow releaseFullScreen];
|
[_fullScreenWindow releaseFullScreen];
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||||
*
|
*
|
||||||
* Use of this source code is governed by a BSD-style license
|
* Use of this source code is governed by a BSD-style license
|
||||||
* that can be found in the LICENSE file in the root of the source
|
* that can be found in the LICENSE file in the root of the source
|
||||||
@@ -60,7 +60,6 @@ class FrameWriterImpl : public FrameWriter {
|
|||||||
private:
|
private:
|
||||||
std::string output_filename_;
|
std::string output_filename_;
|
||||||
int frame_length_in_bytes_;
|
int frame_length_in_bytes_;
|
||||||
int number_of_frames_;
|
|
||||||
FILE* output_file_;
|
FILE* output_file_;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,9 @@
|
|||||||
|
|
||||||
{
|
{
|
||||||
'conditions': [
|
'conditions': [
|
||||||
['OS=="win"', {
|
# TODO(kjellander): Support UseoFMFC on VS2010.
|
||||||
|
# http://code.google.com/p/webrtc/issues/detail?id=709
|
||||||
|
['OS=="win" and MSVS_VERSION < "2010"', {
|
||||||
'targets': [
|
'targets': [
|
||||||
# WinTest - GUI test for Windows
|
# WinTest - GUI test for Windows
|
||||||
{
|
{
|
||||||
@@ -21,10 +23,10 @@
|
|||||||
## VoiceEngine
|
## VoiceEngine
|
||||||
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine_core',
|
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine_core',
|
||||||
## VideoEngine
|
## VideoEngine
|
||||||
'video_engine_core',
|
'video_engine_core',
|
||||||
],
|
],
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
'./interface',
|
'./interface',
|
||||||
'../../../../', # common_types.h and typedefs.h
|
'../../../../', # common_types.h and typedefs.h
|
||||||
'../commonTestClasses/'
|
'../commonTestClasses/'
|
||||||
],
|
],
|
||||||
@@ -34,7 +36,7 @@
|
|||||||
'ChannelDlg.cc',
|
'ChannelDlg.cc',
|
||||||
'ChannelDlg.h',
|
'ChannelDlg.h',
|
||||||
'ChannelPool.cc',
|
'ChannelPool.cc',
|
||||||
'ChannelPool.h',
|
'ChannelPool.h',
|
||||||
'renderStartImage.jpg',
|
'renderStartImage.jpg',
|
||||||
'renderTimeoutImage.jpg',
|
'renderTimeoutImage.jpg',
|
||||||
'res\Capture.rc2',
|
'res\Capture.rc2',
|
||||||
@@ -52,7 +54,7 @@
|
|||||||
'CaptureDevicePool.cc',
|
'CaptureDevicePool.cc',
|
||||||
'tbExternalTransport.h',
|
'tbExternalTransport.h',
|
||||||
'CaptureDevicePool.h',
|
'CaptureDevicePool.h',
|
||||||
|
|
||||||
],
|
],
|
||||||
'configurations': {
|
'configurations': {
|
||||||
'Common_Base': {
|
'Common_Base': {
|
||||||
|
|||||||
@@ -203,7 +203,6 @@ TEST_F(StreamSynchronizationTest, AudioDelay) {
|
|||||||
int current_audio_delay_ms = 0;
|
int current_audio_delay_ms = 0;
|
||||||
int delay_ms = 200;
|
int delay_ms = 200;
|
||||||
int extra_audio_delay_ms = 0;
|
int extra_audio_delay_ms = 0;
|
||||||
int current_extra_delay_ms = 0;
|
|
||||||
int total_video_delay_ms = 0;
|
int total_video_delay_ms = 0;
|
||||||
|
|
||||||
EXPECT_EQ(0, DelayedVideo(delay_ms, current_audio_delay_ms,
|
EXPECT_EQ(0, DelayedVideo(delay_ms, current_audio_delay_ms,
|
||||||
@@ -212,7 +211,7 @@ TEST_F(StreamSynchronizationTest, AudioDelay) {
|
|||||||
// The audio delay is not allowed to change more than this in 1 second.
|
// The audio delay is not allowed to change more than this in 1 second.
|
||||||
EXPECT_EQ(kMaxAudioDiffMs, extra_audio_delay_ms);
|
EXPECT_EQ(kMaxAudioDiffMs, extra_audio_delay_ms);
|
||||||
current_audio_delay_ms = extra_audio_delay_ms;
|
current_audio_delay_ms = extra_audio_delay_ms;
|
||||||
current_extra_delay_ms = extra_audio_delay_ms;
|
int current_extra_delay_ms = extra_audio_delay_ms;
|
||||||
|
|
||||||
send_time_->IncreaseTimeMs(1000);
|
send_time_->IncreaseTimeMs(1000);
|
||||||
receive_time_->IncreaseTimeMs(800);
|
receive_time_->IncreaseTimeMs(800);
|
||||||
@@ -273,7 +272,6 @@ TEST_F(StreamSynchronizationTest, BothDelayedVideoLater) {
|
|||||||
int audio_delay_ms = 100;
|
int audio_delay_ms = 100;
|
||||||
int video_delay_ms = 300;
|
int video_delay_ms = 300;
|
||||||
int extra_audio_delay_ms = 0;
|
int extra_audio_delay_ms = 0;
|
||||||
int current_extra_delay_ms = 0;
|
|
||||||
int total_video_delay_ms = 0;
|
int total_video_delay_ms = 0;
|
||||||
|
|
||||||
EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
|
EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
|
||||||
@@ -285,7 +283,7 @@ TEST_F(StreamSynchronizationTest, BothDelayedVideoLater) {
|
|||||||
// The audio delay is not allowed to change more than this in 1 second.
|
// The audio delay is not allowed to change more than this in 1 second.
|
||||||
EXPECT_EQ(kMaxAudioDiffMs, extra_audio_delay_ms);
|
EXPECT_EQ(kMaxAudioDiffMs, extra_audio_delay_ms);
|
||||||
current_audio_delay_ms = extra_audio_delay_ms;
|
current_audio_delay_ms = extra_audio_delay_ms;
|
||||||
current_extra_delay_ms = extra_audio_delay_ms;
|
int current_extra_delay_ms = extra_audio_delay_ms;
|
||||||
|
|
||||||
send_time_->IncreaseTimeMs(1000);
|
send_time_->IncreaseTimeMs(1000);
|
||||||
receive_time_->IncreaseTimeMs(800);
|
receive_time_->IncreaseTimeMs(800);
|
||||||
@@ -358,7 +356,6 @@ TEST_F(StreamSynchronizationTest, BothDelayedAudioLater) {
|
|||||||
int audio_delay_ms = 300;
|
int audio_delay_ms = 300;
|
||||||
int video_delay_ms = 100;
|
int video_delay_ms = 100;
|
||||||
int extra_audio_delay_ms = 0;
|
int extra_audio_delay_ms = 0;
|
||||||
int current_extra_delay_ms = 0;
|
|
||||||
int total_video_delay_ms = 0;
|
int total_video_delay_ms = 0;
|
||||||
|
|
||||||
EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
|
EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
|
||||||
@@ -369,7 +366,6 @@ TEST_F(StreamSynchronizationTest, BothDelayedAudioLater) {
|
|||||||
EXPECT_EQ(kMaxVideoDiffMs, total_video_delay_ms);
|
EXPECT_EQ(kMaxVideoDiffMs, total_video_delay_ms);
|
||||||
EXPECT_EQ(0, extra_audio_delay_ms);
|
EXPECT_EQ(0, extra_audio_delay_ms);
|
||||||
current_audio_delay_ms = extra_audio_delay_ms;
|
current_audio_delay_ms = extra_audio_delay_ms;
|
||||||
current_extra_delay_ms = extra_audio_delay_ms;
|
|
||||||
|
|
||||||
send_time_->IncreaseTimeMs(1000);
|
send_time_->IncreaseTimeMs(1000);
|
||||||
receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
|
receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
|
||||||
@@ -384,7 +380,6 @@ TEST_F(StreamSynchronizationTest, BothDelayedAudioLater) {
|
|||||||
EXPECT_EQ(2 * kMaxVideoDiffMs, total_video_delay_ms);
|
EXPECT_EQ(2 * kMaxVideoDiffMs, total_video_delay_ms);
|
||||||
EXPECT_EQ(0, extra_audio_delay_ms);
|
EXPECT_EQ(0, extra_audio_delay_ms);
|
||||||
current_audio_delay_ms = extra_audio_delay_ms;
|
current_audio_delay_ms = extra_audio_delay_ms;
|
||||||
current_extra_delay_ms = extra_audio_delay_ms;
|
|
||||||
|
|
||||||
send_time_->IncreaseTimeMs(1000);
|
send_time_->IncreaseTimeMs(1000);
|
||||||
receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
|
receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
|
||||||
@@ -398,7 +393,6 @@ TEST_F(StreamSynchronizationTest, BothDelayedAudioLater) {
|
|||||||
&total_video_delay_ms));
|
&total_video_delay_ms));
|
||||||
EXPECT_EQ(audio_delay_ms - video_delay_ms, total_video_delay_ms);
|
EXPECT_EQ(audio_delay_ms - video_delay_ms, total_video_delay_ms);
|
||||||
EXPECT_EQ(0, extra_audio_delay_ms);
|
EXPECT_EQ(0, extra_audio_delay_ms);
|
||||||
current_extra_delay_ms = extra_audio_delay_ms;
|
|
||||||
|
|
||||||
// Simulate that NetEQ introduces some audio delay.
|
// Simulate that NetEQ introduces some audio delay.
|
||||||
current_audio_delay_ms = 50;
|
current_audio_delay_ms = 50;
|
||||||
@@ -415,7 +409,6 @@ TEST_F(StreamSynchronizationTest, BothDelayedAudioLater) {
|
|||||||
EXPECT_EQ(audio_delay_ms - video_delay_ms + current_audio_delay_ms,
|
EXPECT_EQ(audio_delay_ms - video_delay_ms + current_audio_delay_ms,
|
||||||
total_video_delay_ms);
|
total_video_delay_ms);
|
||||||
EXPECT_EQ(0, extra_audio_delay_ms);
|
EXPECT_EQ(0, extra_audio_delay_ms);
|
||||||
current_extra_delay_ms = extra_audio_delay_ms;
|
|
||||||
|
|
||||||
// Simulate that NetEQ reduces its delay.
|
// Simulate that NetEQ reduces its delay.
|
||||||
current_audio_delay_ms = 10;
|
current_audio_delay_ms = 10;
|
||||||
|
|||||||
@@ -99,22 +99,15 @@
|
|||||||
'source/vie_window_manager_factory_win.cc',
|
'source/vie_window_manager_factory_win.cc',
|
||||||
],
|
],
|
||||||
'conditions': [
|
'conditions': [
|
||||||
# TODO(andrew): this likely isn't an actual dependency. It should be
|
|
||||||
# included in webrtc.gyp or video_engine.gyp instead.
|
|
||||||
['OS=="android"', {
|
['OS=="android"', {
|
||||||
'libraries': [
|
'libraries': [
|
||||||
'-lGLESv2',
|
'-lGLESv2',
|
||||||
'-llog',
|
'-llog',
|
||||||
],
|
],
|
||||||
}],
|
}],
|
||||||
['OS=="win"', {
|
|
||||||
'dependencies': [
|
|
||||||
'vie_win_test',
|
|
||||||
],
|
|
||||||
}],
|
|
||||||
['OS=="linux"', {
|
['OS=="linux"', {
|
||||||
# TODO(andrew): these should be provided directly by the projects
|
# TODO(andrew): These should be provided directly by the projects
|
||||||
# # which require them instead.
|
# which require them instead.
|
||||||
'libraries': [
|
'libraries': [
|
||||||
'-lXext',
|
'-lXext',
|
||||||
'-lX11',
|
'-lX11',
|
||||||
|
|||||||
@@ -50,7 +50,6 @@ class ViEFileCaptureDevice {
|
|||||||
webrtc::CriticalSectionWrapper* mutex_;
|
webrtc::CriticalSectionWrapper* mutex_;
|
||||||
|
|
||||||
WebRtc_UWord32 frame_length_;
|
WebRtc_UWord32 frame_length_;
|
||||||
WebRtc_UWord8* frame_buffer_;
|
|
||||||
WebRtc_UWord32 width_;
|
WebRtc_UWord32 width_;
|
||||||
WebRtc_UWord32 height_;
|
WebRtc_UWord32 height_;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -104,7 +104,9 @@
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
'conditions': [
|
'conditions': [
|
||||||
['OS=="win"', {
|
# TODO(kjellander): Support UseoFMFC on VS2010.
|
||||||
|
# http://code.google.com/p/webrtc/issues/detail?id=709
|
||||||
|
['OS=="win" and MSVS_VERSION < "2010"', {
|
||||||
'targets': [
|
'targets': [
|
||||||
# WinTest - GUI test for Windows
|
# WinTest - GUI test for Windows
|
||||||
{
|
{
|
||||||
|
|||||||
Reference in New Issue
Block a user