Prepare to roll Chromium to 149181.
- This roll brings in VS2010 by default. The buildbots need updating (issue710). - We'll roll to 149181 later (past current Canary) to fix a Mac gyp issue: https://chromiumcodereview.appspot.com/10824105 - Chromium is now using a later libvpx than us. We should investigate rolling our standalone build. - Fix set-but-unused-warning - Fix -Wunused-private-field warnings on Mac. TBR=kjellander@webrtc.org BUG=issue709,issue710 TEST=trybots Review URL: https://webrtc-codereview.appspot.com/709007 git-svn-id: http://webrtc.googlecode.com/svn/trunk@2544 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
@@ -81,8 +81,6 @@ _processEventB(NULL),
|
||||
_apiEventB(NULL),
|
||||
_codecCntrA(0),
|
||||
_codecCntrB(0),
|
||||
_testCntrA(1),
|
||||
_testCntrB(1),
|
||||
_thereIsEncoderA(false),
|
||||
_thereIsEncoderB(false),
|
||||
_thereIsDecoderA(false),
|
||||
@@ -118,7 +116,7 @@ _testNumB(1)
|
||||
_receiveVADActivityA[n] = 0;
|
||||
_receiveVADActivityB[n] = 0;
|
||||
}
|
||||
|
||||
|
||||
_movingDot[40] = '\0';
|
||||
|
||||
for(int n = 0; n <40; n++)
|
||||
@@ -172,7 +170,7 @@ APITest::~APITest()
|
||||
// return _outFile.Open(fileName, frequencyHz, "wb");
|
||||
//}
|
||||
|
||||
WebRtc_Word16
|
||||
WebRtc_Word16
|
||||
APITest::SetUp()
|
||||
{
|
||||
_acmA = AudioCodingModule::Create(1);
|
||||
@@ -260,7 +258,7 @@ APITest::SetUp()
|
||||
|
||||
char fileName[500];
|
||||
WebRtc_UWord16 frequencyHz;
|
||||
|
||||
|
||||
printf("\n\nAPI Test\n");
|
||||
printf("========\n");
|
||||
printf("Hit enter to accept the default values indicated in []\n\n");
|
||||
@@ -301,7 +299,7 @@ APITest::SetUp()
|
||||
_channel_B2A = new Channel(1);
|
||||
CHECK_ERROR_MT(_acmB->RegisterTransportCallback(_channel_B2A));
|
||||
_channel_B2A->RegisterReceiverACM(_acmA);
|
||||
|
||||
|
||||
//--- EVENT TIMERS
|
||||
// A
|
||||
_pullEventA = EventWrapper::Create();
|
||||
@@ -321,7 +319,7 @@ APITest::SetUp()
|
||||
_outFreqHzB = _outFileB.SamplingFrequency();
|
||||
|
||||
|
||||
//Trace::SetEncryptedTraceFile("ACMAPITestEncrypted.txt");
|
||||
//Trace::SetEncryptedTraceFile("ACMAPITestEncrypted.txt");
|
||||
|
||||
char print[11];
|
||||
|
||||
@@ -357,29 +355,29 @@ APITest::SetUp()
|
||||
#endif
|
||||
_vadCallbackA = new VADCallback;
|
||||
_vadCallbackB = new VADCallback;
|
||||
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool
|
||||
bool
|
||||
APITest::PushAudioThreadA(void* obj)
|
||||
{
|
||||
return static_cast<APITest*>(obj)->PushAudioRunA();
|
||||
}
|
||||
|
||||
bool
|
||||
bool
|
||||
APITest::PushAudioThreadB(void* obj)
|
||||
{
|
||||
return static_cast<APITest*>(obj)->PushAudioRunB();
|
||||
}
|
||||
|
||||
bool
|
||||
bool
|
||||
APITest::PullAudioThreadA(void* obj)
|
||||
{
|
||||
return static_cast<APITest*>(obj)->PullAudioRunA();
|
||||
}
|
||||
|
||||
bool
|
||||
bool
|
||||
APITest::PullAudioThreadB(void* obj)
|
||||
{
|
||||
return static_cast<APITest*>(obj)->PullAudioRunB();
|
||||
@@ -409,7 +407,7 @@ APITest::APIThreadB(void* obj)
|
||||
return static_cast<APITest*>(obj)->APIRunB();
|
||||
}
|
||||
|
||||
bool
|
||||
bool
|
||||
APITest::PullAudioRunA()
|
||||
{
|
||||
_pullEventA->Wait(100);
|
||||
@@ -437,7 +435,7 @@ APITest::PullAudioRunA()
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
bool
|
||||
APITest::PullAudioRunB()
|
||||
{
|
||||
_pullEventB->Wait(100);
|
||||
@@ -462,11 +460,11 @@ APITest::PullAudioRunB()
|
||||
_outFileB.Write10MsData(audioFrame);
|
||||
}
|
||||
_receiveVADActivityB[(int)audioFrame.vad_activity_]++;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
bool
|
||||
APITest::PushAudioRunA()
|
||||
{
|
||||
_pushEventA->Wait(100);
|
||||
@@ -487,7 +485,7 @@ APITest::PushAudioRunA()
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
bool
|
||||
APITest::PushAudioRunB()
|
||||
{
|
||||
_pushEventB->Wait(100);
|
||||
@@ -567,7 +565,7 @@ APITest::RunTest(char thread)
|
||||
{
|
||||
_testNumA = (_testNumB + 1 + (rand() % 6)) % 7;
|
||||
testNum = _testNumA;
|
||||
|
||||
|
||||
_movingDot[_dotPositionA] = ' ';
|
||||
if(_dotPositionA == 0)
|
||||
{
|
||||
@@ -577,7 +575,7 @@ APITest::RunTest(char thread)
|
||||
{
|
||||
_dotMoveDirectionA = -1;
|
||||
}
|
||||
_dotPositionA += _dotMoveDirectionA;
|
||||
_dotPositionA += _dotMoveDirectionA;
|
||||
_movingDot[_dotPositionA] = (_dotMoveDirectionA > 0)? '>':'<';
|
||||
}
|
||||
else
|
||||
@@ -594,7 +592,7 @@ APITest::RunTest(char thread)
|
||||
{
|
||||
_dotMoveDirectionB = -1;
|
||||
}
|
||||
_dotPositionB += _dotMoveDirectionB;
|
||||
_dotPositionB += _dotMoveDirectionB;
|
||||
_movingDot[_dotPositionB] = (_dotMoveDirectionB > 0)? '>':'<';
|
||||
}
|
||||
//fprintf(stderr, "%c: %d \n", thread, testNum);
|
||||
@@ -617,7 +615,7 @@ APITest::RunTest(char thread)
|
||||
TestDelay('A');
|
||||
break;
|
||||
case 3:
|
||||
TestSendVAD('A');
|
||||
TestSendVAD('A');
|
||||
break;
|
||||
case 4:
|
||||
TestRegisteration('A');
|
||||
@@ -641,7 +639,7 @@ APITest::RunTest(char thread)
|
||||
|
||||
bool
|
||||
APITest::APIRunA()
|
||||
{
|
||||
{
|
||||
_apiEventA->Wait(50);
|
||||
|
||||
bool randomTest;
|
||||
@@ -664,7 +662,7 @@ APITest::APIRunA()
|
||||
TestDelay('A');
|
||||
}
|
||||
// VAD TEST
|
||||
TestSendVAD('A');
|
||||
TestSendVAD('A');
|
||||
TestRegisteration('A');
|
||||
TestReceiverVAD('A');
|
||||
#ifdef WEBRTC_DTMF_DETECTION
|
||||
@@ -676,7 +674,7 @@ APITest::APIRunA()
|
||||
|
||||
bool
|
||||
APITest::APIRunB()
|
||||
{
|
||||
{
|
||||
_apiEventB->Wait(50);
|
||||
bool randomTest;
|
||||
{
|
||||
@@ -688,7 +686,7 @@ APITest::APIRunB()
|
||||
{
|
||||
RunTest('B');
|
||||
}
|
||||
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -700,46 +698,46 @@ APITest::Perform()
|
||||
//--- THREADS
|
||||
// A
|
||||
// PUSH
|
||||
ThreadWrapper* myPushAudioThreadA = ThreadWrapper::CreateThread(PushAudioThreadA,
|
||||
ThreadWrapper* myPushAudioThreadA = ThreadWrapper::CreateThread(PushAudioThreadA,
|
||||
this, kNormalPriority, "PushAudioThreadA");
|
||||
CHECK_THREAD_NULLITY(myPushAudioThreadA, "Unable to start A::PUSH thread");
|
||||
// PULL
|
||||
ThreadWrapper* myPullAudioThreadA = ThreadWrapper::CreateThread(PullAudioThreadA,
|
||||
ThreadWrapper* myPullAudioThreadA = ThreadWrapper::CreateThread(PullAudioThreadA,
|
||||
this, kNormalPriority, "PullAudioThreadA");
|
||||
CHECK_THREAD_NULLITY(myPullAudioThreadA, "Unable to start A::PULL thread");
|
||||
// Process
|
||||
ThreadWrapper* myProcessThreadA = ThreadWrapper::CreateThread(ProcessThreadA,
|
||||
ThreadWrapper* myProcessThreadA = ThreadWrapper::CreateThread(ProcessThreadA,
|
||||
this, kNormalPriority, "ProcessThreadA");
|
||||
CHECK_THREAD_NULLITY(myProcessThreadA, "Unable to start A::Process thread");
|
||||
// API
|
||||
ThreadWrapper* myAPIThreadA = ThreadWrapper::CreateThread(APIThreadA,
|
||||
// API
|
||||
ThreadWrapper* myAPIThreadA = ThreadWrapper::CreateThread(APIThreadA,
|
||||
this, kNormalPriority, "APIThreadA");
|
||||
CHECK_THREAD_NULLITY(myAPIThreadA, "Unable to start A::API thread");
|
||||
// B
|
||||
// PUSH
|
||||
ThreadWrapper* myPushAudioThreadB = ThreadWrapper::CreateThread(PushAudioThreadB,
|
||||
ThreadWrapper* myPushAudioThreadB = ThreadWrapper::CreateThread(PushAudioThreadB,
|
||||
this, kNormalPriority, "PushAudioThreadB");
|
||||
CHECK_THREAD_NULLITY(myPushAudioThreadB, "Unable to start B::PUSH thread");
|
||||
// PULL
|
||||
ThreadWrapper* myPullAudioThreadB = ThreadWrapper::CreateThread(PullAudioThreadB,
|
||||
ThreadWrapper* myPullAudioThreadB = ThreadWrapper::CreateThread(PullAudioThreadB,
|
||||
this, kNormalPriority, "PullAudioThreadB");
|
||||
CHECK_THREAD_NULLITY(myPullAudioThreadB, "Unable to start B::PULL thread");
|
||||
// Process
|
||||
ThreadWrapper* myProcessThreadB = ThreadWrapper::CreateThread(ProcessThreadB,
|
||||
ThreadWrapper* myProcessThreadB = ThreadWrapper::CreateThread(ProcessThreadB,
|
||||
this, kNormalPriority, "ProcessThreadB");
|
||||
CHECK_THREAD_NULLITY(myProcessThreadB, "Unable to start B::Process thread");
|
||||
// API
|
||||
ThreadWrapper* myAPIThreadB = ThreadWrapper::CreateThread(APIThreadB,
|
||||
ThreadWrapper* myAPIThreadB = ThreadWrapper::CreateThread(APIThreadB,
|
||||
this, kNormalPriority, "APIThreadB");
|
||||
CHECK_THREAD_NULLITY(myAPIThreadB, "Unable to start B::API thread");
|
||||
|
||||
|
||||
|
||||
//_apiEventA->StartTimer(true, 5000);
|
||||
//_apiEventB->StartTimer(true, 5000);
|
||||
|
||||
_processEventA->StartTimer(true, 10);
|
||||
_processEventB->StartTimer(true, 10);
|
||||
|
||||
|
||||
_pullEventA->StartTimer(true, 10);
|
||||
_pullEventB->StartTimer(true, 10);
|
||||
|
||||
@@ -764,7 +762,7 @@ APITest::Perform()
|
||||
|
||||
//completeEvent->Wait(0xFFFFFFFF);//(unsigned long)((unsigned long)TEST_DURATION_SEC * (unsigned long)1000));
|
||||
delete completeEvent;
|
||||
|
||||
|
||||
myPushAudioThreadA->Stop();
|
||||
myPullAudioThreadA->Stop();
|
||||
myProcessThreadA->Stop();
|
||||
@@ -802,12 +800,12 @@ APITest::CheckVADStatus(char side)
|
||||
_acmA->RegisterVADCallback(NULL);
|
||||
_vadCallbackA->Reset();
|
||||
_acmA->RegisterVADCallback(_vadCallbackA);
|
||||
|
||||
|
||||
if(!_randomTest)
|
||||
{
|
||||
if(_verbose)
|
||||
{
|
||||
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d",
|
||||
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d",
|
||||
dtxEnabled? "ON":"OFF",
|
||||
vadEnabled? "ON":"OFF",
|
||||
(int)vadMode);
|
||||
@@ -818,7 +816,7 @@ APITest::CheckVADStatus(char side)
|
||||
else
|
||||
{
|
||||
Wait(5000);
|
||||
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n",
|
||||
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n",
|
||||
dtxEnabled? "ON":"OFF",
|
||||
vadEnabled? "ON":"OFF",
|
||||
(int)vadMode,
|
||||
@@ -847,12 +845,12 @@ APITest::CheckVADStatus(char side)
|
||||
_acmB->RegisterVADCallback(NULL);
|
||||
_vadCallbackB->Reset();
|
||||
_acmB->RegisterVADCallback(_vadCallbackB);
|
||||
|
||||
|
||||
if(!_randomTest)
|
||||
{
|
||||
if(_verbose)
|
||||
{
|
||||
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d",
|
||||
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d",
|
||||
dtxEnabled? "ON":"OFF",
|
||||
vadEnabled? "ON":"OFF",
|
||||
(int)vadMode);
|
||||
@@ -863,7 +861,7 @@ APITest::CheckVADStatus(char side)
|
||||
else
|
||||
{
|
||||
Wait(5000);
|
||||
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n",
|
||||
fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n",
|
||||
dtxEnabled? "ON":"OFF",
|
||||
vadEnabled? "ON":"OFF",
|
||||
(int)vadMode,
|
||||
@@ -898,7 +896,7 @@ APITest::TestDelay(char side)
|
||||
|
||||
WebRtc_UWord32 inTimestamp = 0;
|
||||
WebRtc_UWord32 outTimestamp = 0;
|
||||
double estimDelay = 0;
|
||||
double estimDelay = 0;
|
||||
|
||||
double averageEstimDelay = 0;
|
||||
double averageDelay = 0;
|
||||
@@ -923,7 +921,7 @@ APITest::TestDelay(char side)
|
||||
CHECK_ERROR_MT(myACM->SetMinimumPlayoutDelay(*myMinDelay));
|
||||
|
||||
|
||||
inTimestamp = myChannel->LastInTimestamp();
|
||||
inTimestamp = myChannel->LastInTimestamp();
|
||||
CHECK_ERROR_MT(myACM->PlayoutTimestamp(outTimestamp));
|
||||
|
||||
if(!_randomTest)
|
||||
@@ -935,11 +933,11 @@ APITest::TestDelay(char side)
|
||||
{
|
||||
myEvent->Wait(1000);
|
||||
|
||||
inTimestamp = myChannel->LastInTimestamp();
|
||||
inTimestamp = myChannel->LastInTimestamp();
|
||||
CHECK_ERROR_MT(myACM->PlayoutTimestamp(outTimestamp));
|
||||
|
||||
//std::cout << outTimestamp << std::endl << std::flush;
|
||||
estimDelay = (double)((WebRtc_UWord32)(inTimestamp - outTimestamp)) /
|
||||
estimDelay = (double)((WebRtc_UWord32)(inTimestamp - outTimestamp)) /
|
||||
((double)myACM->ReceiveFrequency() / 1000.0);
|
||||
|
||||
estimDelayCB.Update(estimDelay);
|
||||
@@ -970,7 +968,7 @@ APITest::TestDelay(char side)
|
||||
}
|
||||
|
||||
*myMinDelay = (rand() % 1000) + 1;
|
||||
|
||||
|
||||
ACMNetworkStatistics networkStat;
|
||||
CHECK_ERROR_MT(myACM->NetworkStatistics(networkStat));
|
||||
|
||||
@@ -978,12 +976,12 @@ APITest::TestDelay(char side)
|
||||
{
|
||||
fprintf(stdout, "\n\nJitter Statistics at Side %c\n", side);
|
||||
fprintf(stdout, "--------------------------------------\n");
|
||||
fprintf(stdout, "buffer-size............. %d\n", networkStat.currentBufferSize);
|
||||
fprintf(stdout, "buffer-size............. %d\n", networkStat.currentBufferSize);
|
||||
fprintf(stdout, "Preferred buffer-size... %d\n", networkStat.preferredBufferSize);
|
||||
fprintf(stdout, "Peaky jitter mode........%d\n", networkStat.jitterPeaksFound);
|
||||
fprintf(stdout, "packet-size rate........ %d\n", networkStat.currentPacketLossRate);
|
||||
fprintf(stdout, "discard rate............ %d\n", networkStat.currentDiscardRate);
|
||||
fprintf(stdout, "expand rate............. %d\n", networkStat.currentExpandRate);
|
||||
fprintf(stdout, "discard rate............ %d\n", networkStat.currentDiscardRate);
|
||||
fprintf(stdout, "expand rate............. %d\n", networkStat.currentExpandRate);
|
||||
fprintf(stdout, "Preemptive rate......... %d\n", networkStat.currentPreemptiveRate);
|
||||
fprintf(stdout, "Accelerate rate......... %d\n", networkStat.currentAccelerateRate);
|
||||
fprintf(stdout, "Clock-drift............. %d\n", networkStat.clockDriftPPM);
|
||||
@@ -1020,7 +1018,7 @@ APITest::TestRegisteration(char sendSide)
|
||||
fprintf(stdout, " Unregister/register Receive Codec\n");
|
||||
fprintf(stdout, "---------------------------------------------------------\n");
|
||||
}
|
||||
|
||||
|
||||
switch(sendSide)
|
||||
{
|
||||
case 'A':
|
||||
@@ -1179,7 +1177,7 @@ APITest::TestPlayout(char receiveSide)
|
||||
|
||||
CHECK_ERROR_MT(receiveFreqHz);
|
||||
CHECK_ERROR_MT(playoutFreqHz);
|
||||
|
||||
|
||||
char bgnString[25];
|
||||
switch(*bgnMode)
|
||||
{
|
||||
@@ -1400,7 +1398,7 @@ APITest::TestSendVAD(char side)
|
||||
// Fault Test
|
||||
CHECK_PROTECTED_MT(myACM->SetVAD(false, true, (ACMVADMode)-1));
|
||||
CHECK_PROTECTED_MT(myACM->SetVAD(false, true, (ACMVADMode)4));
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
@@ -1477,14 +1475,14 @@ APITest::ChangeCodec(char side)
|
||||
myChannel = _channel_B2A;
|
||||
}
|
||||
|
||||
myACM->ResetEncoder();
|
||||
myACM->ResetEncoder();
|
||||
Wait(100);
|
||||
|
||||
// Register the next codec
|
||||
do
|
||||
{
|
||||
*codecCntr = (*codecCntr < AudioCodingModule::NumberOfCodecs() - 1)?
|
||||
(*codecCntr + 1):0;
|
||||
*codecCntr = (*codecCntr < AudioCodingModule::NumberOfCodecs() - 1)?
|
||||
(*codecCntr + 1):0;
|
||||
|
||||
if(*codecCntr == 0)
|
||||
{
|
||||
@@ -1494,7 +1492,7 @@ APITest::ChangeCodec(char side)
|
||||
*thereIsEncoder = false;
|
||||
}
|
||||
CHECK_ERROR_MT(myACM->InitializeSender());
|
||||
Wait(1000);
|
||||
Wait(1000);
|
||||
|
||||
// After Initialization CN is lost, re-register them
|
||||
if(AudioCodingModule::Codec("CN", myCodec, 8000, 1) >= 0)
|
||||
@@ -1541,8 +1539,8 @@ APITest::ChangeCodec(char side)
|
||||
Wait(500);
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
|
||||
void
|
||||
APITest::LookForDTMF(char side)
|
||||
{
|
||||
if(!_randomTest)
|
||||
@@ -1550,11 +1548,11 @@ APITest::LookForDTMF(char side)
|
||||
fprintf(stdout, "\n\nLooking for DTMF Signal in Side %c\n", side);
|
||||
fprintf(stdout, "----------------------------------------\n");
|
||||
}
|
||||
|
||||
|
||||
if(side == 'A')
|
||||
{
|
||||
_acmB->RegisterIncomingMessagesCallback(NULL);
|
||||
_acmA->RegisterIncomingMessagesCallback(_dtmfCallback);
|
||||
_acmA->RegisterIncomingMessagesCallback(_dtmfCallback);
|
||||
Wait(1000);
|
||||
_acmA->RegisterIncomingMessagesCallback(NULL);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
@@ -30,7 +30,7 @@ public:
|
||||
void Perform();
|
||||
private:
|
||||
WebRtc_Word16 SetUp();
|
||||
|
||||
|
||||
static bool PushAudioThreadA(void* obj);
|
||||
static bool PullAudioThreadA(void* obj);
|
||||
static bool ProcessThreadA(void* obj);
|
||||
@@ -56,24 +56,24 @@ private:
|
||||
// set/get receiver VAD status & mode.
|
||||
void TestReceiverVAD(char side);
|
||||
|
||||
//
|
||||
//
|
||||
void TestSendVAD(char side);
|
||||
|
||||
void CurrentCodec(char side);
|
||||
|
||||
|
||||
void ChangeCodec(char side);
|
||||
|
||||
|
||||
void Wait(WebRtc_UWord32 waitLengthMs);
|
||||
|
||||
void LookForDTMF(char side);
|
||||
|
||||
void RunTest(char thread);
|
||||
|
||||
bool PushAudioRunA();
|
||||
|
||||
bool PushAudioRunA();
|
||||
bool PullAudioRunA();
|
||||
bool ProcessRunA();
|
||||
bool APIRunA();
|
||||
|
||||
|
||||
bool PullAudioRunB();
|
||||
bool PushAudioRunB();
|
||||
bool ProcessRunB();
|
||||
@@ -84,11 +84,11 @@ private:
|
||||
//--- ACMs
|
||||
AudioCodingModule* _acmA;
|
||||
AudioCodingModule* _acmB;
|
||||
|
||||
|
||||
//--- Channels
|
||||
Channel* _channel_A2B;
|
||||
Channel* _channel_B2A;
|
||||
|
||||
|
||||
//--- I/O files
|
||||
// A
|
||||
PCMFile _inFileA;
|
||||
@@ -96,13 +96,13 @@ private:
|
||||
// B
|
||||
PCMFile _outFileB;
|
||||
PCMFile _inFileB;
|
||||
|
||||
|
||||
//--- I/O params
|
||||
// A
|
||||
WebRtc_Word32 _outFreqHzA;
|
||||
// B
|
||||
WebRtc_Word32 _outFreqHzB;
|
||||
|
||||
|
||||
// Should we write to file.
|
||||
// we might skip writing to file if we
|
||||
// run the test for a long time.
|
||||
@@ -123,10 +123,6 @@ private:
|
||||
WebRtc_UWord8 _codecCntrA;
|
||||
WebRtc_UWord8 _codecCntrB;
|
||||
|
||||
// keep track of tests
|
||||
WebRtc_UWord8 _testCntrA;
|
||||
WebRtc_UWord8 _testCntrB;
|
||||
|
||||
// Is set to true if there is no encoder in either side
|
||||
bool _thereIsEncoderA;
|
||||
bool _thereIsEncoderB;
|
||||
@@ -144,7 +140,7 @@ private:
|
||||
WebRtc_Word32 _minDelayA;
|
||||
WebRtc_Word32 _minDelayB;
|
||||
bool _payloadUsed[32];
|
||||
|
||||
|
||||
AudioPlayoutMode _playoutModeA;
|
||||
AudioPlayoutMode _playoutModeB;
|
||||
|
||||
@@ -155,14 +151,14 @@ private:
|
||||
int _receiveVADActivityA[3];
|
||||
int _receiveVADActivityB[3];
|
||||
bool _verbose;
|
||||
|
||||
|
||||
int _dotPositionA;
|
||||
int _dotMoveDirectionA;
|
||||
int _dotPositionB;
|
||||
int _dotMoveDirectionB;
|
||||
|
||||
char _movingDot[41];
|
||||
|
||||
|
||||
DTMFDetector* _dtmfCallback;
|
||||
VADCallback* _vadCallbackA;
|
||||
VADCallback* _vadCallbackB;
|
||||
|
||||
@@ -19,12 +19,12 @@
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
WebRtc_Word32
|
||||
WebRtc_Word32
|
||||
Channel::SendData(
|
||||
const FrameType frameType,
|
||||
const WebRtc_UWord8 payloadType,
|
||||
const WebRtc_UWord32 timeStamp,
|
||||
const WebRtc_UWord8* payloadData,
|
||||
const WebRtc_UWord8* payloadData,
|
||||
const WebRtc_UWord16 payloadSize,
|
||||
const RTPFragmentationHeader* fragmentation)
|
||||
{
|
||||
@@ -104,7 +104,7 @@ Channel::SendData(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
_channelCritSect->Enter();
|
||||
if(_saveBitStream)
|
||||
{
|
||||
@@ -135,9 +135,9 @@ Channel::SendData(
|
||||
return status;
|
||||
}
|
||||
|
||||
void
|
||||
void
|
||||
Channel::CalcStatistics(
|
||||
WebRtcRTPHeader& rtpInfo,
|
||||
WebRtcRTPHeader& rtpInfo,
|
||||
WebRtc_UWord16 payloadSize)
|
||||
{
|
||||
int n;
|
||||
@@ -146,7 +146,7 @@ Channel::CalcStatistics(
|
||||
{
|
||||
// payload-type is changed.
|
||||
// we have to terminate the calculations on the previous payload type
|
||||
// we ignore the last packet in that payload type just to make things
|
||||
// we ignore the last packet in that payload type just to make things
|
||||
// easier.
|
||||
for(n = 0; n < MAX_NUM_PAYLOADS; n++)
|
||||
{
|
||||
@@ -180,12 +180,12 @@ Channel::CalcStatistics(
|
||||
assert(lastFrameSizeSample > 0);
|
||||
int k = 0;
|
||||
while((currentPayloadStr->frameSizeStats[k].frameSizeSample !=
|
||||
lastFrameSizeSample) &&
|
||||
lastFrameSizeSample) &&
|
||||
(currentPayloadStr->frameSizeStats[k].frameSizeSample != 0))
|
||||
{
|
||||
k++;
|
||||
}
|
||||
ACMTestFrameSizeStats* currentFrameSizeStats =
|
||||
ACMTestFrameSizeStats* currentFrameSizeStats =
|
||||
&(currentPayloadStr->frameSizeStats[k]);
|
||||
currentFrameSizeStats->frameSizeSample = (WebRtc_Word16)lastFrameSizeSample;
|
||||
|
||||
@@ -197,15 +197,15 @@ Channel::CalcStatistics(
|
||||
// increment the total number of bytes (this is based on
|
||||
// the previous payload we don't know the frame-size of
|
||||
// the current payload.
|
||||
currentFrameSizeStats->totalPayloadLenByte +=
|
||||
currentFrameSizeStats->totalPayloadLenByte +=
|
||||
currentPayloadStr->lastPayloadLenByte;
|
||||
// store the maximum payload-size (this is based on
|
||||
// the previous payload we don't know the frame-size of
|
||||
// the current payload.
|
||||
if(currentFrameSizeStats->maxPayloadLen <
|
||||
if(currentFrameSizeStats->maxPayloadLen <
|
||||
currentPayloadStr->lastPayloadLenByte)
|
||||
{
|
||||
currentFrameSizeStats->maxPayloadLen =
|
||||
currentFrameSizeStats->maxPayloadLen =
|
||||
currentPayloadStr->lastPayloadLenByte;
|
||||
}
|
||||
// store the current values for the next time
|
||||
@@ -247,7 +247,6 @@ _leftChannel(true),
|
||||
_lastInTimestamp(0),
|
||||
_packetLoss(0),
|
||||
_useFECTestWithPacketLoss(false),
|
||||
_chID(chID),
|
||||
_beginTime(TickTime::MillisecondTimestamp()),
|
||||
_totalBytes(0)
|
||||
{
|
||||
@@ -270,7 +269,7 @@ _totalBytes(0)
|
||||
{
|
||||
_saveBitStream = true;
|
||||
char bitStreamFileName[500];
|
||||
sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
|
||||
sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
|
||||
_bitStreamFile = fopen(bitStreamFileName, "wb");
|
||||
}
|
||||
else
|
||||
@@ -284,14 +283,14 @@ Channel::~Channel()
|
||||
delete _channelCritSect;
|
||||
}
|
||||
|
||||
void
|
||||
void
|
||||
Channel::RegisterReceiverACM(AudioCodingModule* acm)
|
||||
{
|
||||
_receiverACM = acm;
|
||||
return;
|
||||
}
|
||||
|
||||
void
|
||||
void
|
||||
Channel::ResetStats()
|
||||
{
|
||||
int n;
|
||||
@@ -316,7 +315,7 @@ Channel::ResetStats()
|
||||
_channelCritSect->Leave();
|
||||
}
|
||||
|
||||
WebRtc_Word16
|
||||
WebRtc_Word16
|
||||
Channel::Stats(CodecInst& codecInst, ACMTestPayloadStats& payloadStats)
|
||||
{
|
||||
_channelCritSect->Enter();
|
||||
@@ -342,12 +341,12 @@ Channel::Stats(CodecInst& codecInst, ACMTestPayloadStats& payloadStats)
|
||||
_channelCritSect->Leave();
|
||||
return 0;
|
||||
}
|
||||
payloadStats.frameSizeStats[n].usageLenSec =
|
||||
payloadStats.frameSizeStats[n].usageLenSec =
|
||||
(double)payloadStats.frameSizeStats[n].totalEncodedSamples
|
||||
/ (double)codecInst.plfreq;
|
||||
|
||||
payloadStats.frameSizeStats[n].rateBitPerSec =
|
||||
payloadStats.frameSizeStats[n].totalPayloadLenByte * 8 /
|
||||
payloadStats.frameSizeStats[n].rateBitPerSec =
|
||||
payloadStats.frameSizeStats[n].totalPayloadLenByte * 8 /
|
||||
payloadStats.frameSizeStats[n].usageLenSec;
|
||||
|
||||
}
|
||||
@@ -355,7 +354,7 @@ Channel::Stats(CodecInst& codecInst, ACMTestPayloadStats& payloadStats)
|
||||
return 0;
|
||||
}
|
||||
|
||||
void
|
||||
void
|
||||
Channel::Stats(WebRtc_UWord32* numPackets)
|
||||
{
|
||||
_channelCritSect->Enter();
|
||||
@@ -375,18 +374,18 @@ Channel::Stats(WebRtc_UWord32* numPackets)
|
||||
{
|
||||
break;
|
||||
}
|
||||
numPackets[k] +=
|
||||
numPackets[k] +=
|
||||
_payloadStats[k].frameSizeStats[n].numPackets;
|
||||
}
|
||||
}
|
||||
_channelCritSect->Leave();
|
||||
}
|
||||
|
||||
void
|
||||
void
|
||||
Channel::Stats(WebRtc_UWord8* payloadType, WebRtc_UWord32* payloadLenByte)
|
||||
{
|
||||
_channelCritSect->Enter();
|
||||
|
||||
|
||||
int k;
|
||||
int n;
|
||||
memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(WebRtc_UWord32));
|
||||
@@ -418,7 +417,7 @@ Channel::PrintStats(CodecInst& codecInst)
|
||||
{
|
||||
ACMTestPayloadStats payloadStats;
|
||||
Stats(codecInst, payloadStats);
|
||||
printf("%s %d kHz\n",
|
||||
printf("%s %d kHz\n",
|
||||
codecInst.plname,
|
||||
codecInst.plfreq / 1000);
|
||||
printf("=====================================================\n");
|
||||
@@ -435,19 +434,19 @@ Channel::PrintStats(CodecInst& codecInst)
|
||||
{
|
||||
break;
|
||||
}
|
||||
printf("Frame-size.................... %d samples\n",
|
||||
printf("Frame-size.................... %d samples\n",
|
||||
payloadStats.frameSizeStats[k].frameSizeSample);
|
||||
printf("Average Rate.................. %.0f bits/sec\n",
|
||||
printf("Average Rate.................. %.0f bits/sec\n",
|
||||
payloadStats.frameSizeStats[k].rateBitPerSec);
|
||||
printf("Maximum Payload-Size.......... %d Bytes\n",
|
||||
payloadStats.frameSizeStats[k].maxPayloadLen);
|
||||
printf("Maximum Instantaneous Rate.... %.0f bits/sec\n",
|
||||
((double)payloadStats.frameSizeStats[k].maxPayloadLen * 8.0 *
|
||||
(double)codecInst.plfreq) /
|
||||
((double)payloadStats.frameSizeStats[k].maxPayloadLen * 8.0 *
|
||||
(double)codecInst.plfreq) /
|
||||
(double)payloadStats.frameSizeStats[k].frameSizeSample);
|
||||
printf("Number of Packets............. %u\n",
|
||||
(unsigned int)payloadStats.frameSizeStats[k].numPackets);
|
||||
printf("Duration...................... %0.3f sec\n\n",
|
||||
printf("Duration...................... %0.3f sec\n\n",
|
||||
payloadStats.frameSizeStats[k].usageLenSec);
|
||||
|
||||
}
|
||||
@@ -473,6 +472,6 @@ Channel::BitRate()
|
||||
rate = ((double)_totalBytes * 8.0)/ (double)(currTime - _beginTime);
|
||||
_channelCritSect->Leave();
|
||||
return rate;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
@@ -32,7 +32,7 @@ struct ACMTestFrameSizeStats
|
||||
WebRtc_UWord64 totalEncodedSamples;
|
||||
double rateBitPerSec;
|
||||
double usageLenSec;
|
||||
|
||||
|
||||
};
|
||||
|
||||
struct ACMTestPayloadStats
|
||||
@@ -56,36 +56,36 @@ public:
|
||||
const FrameType frameType,
|
||||
const WebRtc_UWord8 payloadType,
|
||||
const WebRtc_UWord32 timeStamp,
|
||||
const WebRtc_UWord8* payloadData,
|
||||
const WebRtc_UWord8* payloadData,
|
||||
const WebRtc_UWord16 payloadSize,
|
||||
const RTPFragmentationHeader* fragmentation);
|
||||
|
||||
void RegisterReceiverACM(
|
||||
AudioCodingModule *acm);
|
||||
|
||||
|
||||
void ResetStats();
|
||||
|
||||
|
||||
WebRtc_Word16 Stats(
|
||||
CodecInst& codecInst,
|
||||
ACMTestPayloadStats& payloadStats);
|
||||
|
||||
|
||||
void Stats(
|
||||
WebRtc_UWord32* numPackets);
|
||||
|
||||
|
||||
void Stats(
|
||||
WebRtc_UWord8* payloadLenByte,
|
||||
WebRtc_UWord8* payloadLenByte,
|
||||
WebRtc_UWord32* payloadType);
|
||||
|
||||
|
||||
void PrintStats(
|
||||
CodecInst& codecInst);
|
||||
|
||||
|
||||
void SetIsStereo(bool isStereo)
|
||||
{
|
||||
_isStereo = isStereo;
|
||||
}
|
||||
|
||||
WebRtc_UWord32 LastInTimestamp();
|
||||
|
||||
|
||||
void SetFECTestWithPacketLoss(bool usePacketLoss)
|
||||
{
|
||||
_useFECTestWithPacketLoss = usePacketLoss;
|
||||
@@ -115,7 +115,6 @@ private:
|
||||
// FEC Test variables
|
||||
WebRtc_Word16 _packetLoss;
|
||||
bool _useFECTestWithPacketLoss;
|
||||
WebRtc_Word16 _chID;
|
||||
WebRtc_UWord64 _beginTime;
|
||||
WebRtc_UWord64 _totalBytes;
|
||||
};
|
||||
|
||||
@@ -50,8 +50,6 @@ Sender::Sender()
|
||||
: _acm(NULL),
|
||||
_pcmFile(),
|
||||
_audioFrame(),
|
||||
_payloadSize(0),
|
||||
_timeStamp(0),
|
||||
_packetization(NULL) {
|
||||
}
|
||||
|
||||
|
||||
@@ -61,8 +61,6 @@ class Sender {
|
||||
AudioCodingModule* _acm;
|
||||
PCMFile _pcmFile;
|
||||
AudioFrame _audioFrame;
|
||||
WebRtc_UWord16 _payloadSize;
|
||||
WebRtc_UWord32 _timeStamp;
|
||||
TestPacketization* _packetization;
|
||||
};
|
||||
|
||||
@@ -81,7 +79,6 @@ class Receiver {
|
||||
|
||||
private:
|
||||
AudioCodingModule* _acm;
|
||||
bool _rtpEOF;
|
||||
RTPStream* _rtpStream;
|
||||
PCMFile _pcmFile;
|
||||
WebRtc_Word16* _playoutBuffer;
|
||||
@@ -110,7 +107,7 @@ class EncodeDecodeTest: public ACMTest {
|
||||
protected:
|
||||
Sender _sender;
|
||||
Receiver _receiver;
|
||||
};
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
|
||||
@@ -48,7 +48,6 @@ class TestPackStereo : public AudioPacketizationCallback {
|
||||
private:
|
||||
AudioCodingModule* receiver_acm_;
|
||||
WebRtc_Word16 seq_no_;
|
||||
WebRtc_UWord8 payload_data_[60 * 32 * 2 * 2];
|
||||
WebRtc_UWord32 timestamp_diff_;
|
||||
WebRtc_UWord32 last_in_timestamp_;
|
||||
WebRtc_UWord64 total_bytes_;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
@@ -53,9 +53,6 @@ private:
|
||||
PCMFile _outFileRefA;
|
||||
PCMFile _outFileRefB;
|
||||
|
||||
DTMFDetector* _dtmfDetectorA;
|
||||
DTMFDetector* _dtmfDetectorB;
|
||||
|
||||
int _testMode;
|
||||
};
|
||||
|
||||
|
||||
@@ -355,8 +355,6 @@ private:
|
||||
bool _doStopRec; // For rec if not shared device
|
||||
bool _macBookPro;
|
||||
bool _macBookProPanRight;
|
||||
bool _stereoRender;
|
||||
bool _stereoRenderRequested;
|
||||
|
||||
AudioConverterRef _captureConverter;
|
||||
AudioConverterRef _renderConverter;
|
||||
@@ -376,7 +374,6 @@ private:
|
||||
WebRtc_Word32 _renderDelayOffsetSamples;
|
||||
|
||||
private:
|
||||
WebRtc_UWord16 _playBufDelay; // playback delay
|
||||
WebRtc_UWord16 _playBufDelayFixed; // fixed playback delay
|
||||
|
||||
WebRtc_UWord16 _playWarning;
|
||||
|
||||
@@ -18,8 +18,7 @@ namespace webrtc
|
||||
|
||||
AudioDeviceUtilityMac::AudioDeviceUtilityMac(const WebRtc_Word32 id) :
|
||||
_critSect(*CriticalSectionWrapper::CreateCriticalSection()),
|
||||
_id(id),
|
||||
_lastError(AudioDeviceModule::kAdmErrNone)
|
||||
_id(id)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
|
||||
"%s created", __FUNCTION__);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
@@ -29,7 +29,6 @@ public:
|
||||
private:
|
||||
CriticalSectionWrapper& _critSect;
|
||||
WebRtc_Word32 _id;
|
||||
AudioDeviceModule::ErrorCode _lastError;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@@ -76,8 +76,7 @@ class AudioEventObserverAPI: public AudioDeviceObserver {
|
||||
class AudioTransportAPI: public AudioTransport {
|
||||
public:
|
||||
AudioTransportAPI(AudioDeviceModule* audioDevice)
|
||||
: audio_device_(audioDevice),
|
||||
rec_count_(0),
|
||||
: rec_count_(0),
|
||||
play_count_(0) {
|
||||
}
|
||||
|
||||
@@ -129,7 +128,6 @@ class AudioTransportAPI: public AudioTransport {
|
||||
}
|
||||
|
||||
private:
|
||||
AudioDeviceModule* audio_device_;
|
||||
WebRtc_UWord32 rec_count_;
|
||||
WebRtc_UWord32 play_count_;
|
||||
};
|
||||
|
||||
@@ -68,8 +68,7 @@ const char* GetResource(const char* resource)
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
AudioEventObserver::AudioEventObserver(AudioDeviceModule* audioDevice) :
|
||||
_audioDevice(audioDevice)
|
||||
AudioEventObserver::AudioEventObserver(AudioDeviceModule* audioDevice)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
@@ -85,8 +85,6 @@ public:
|
||||
public:
|
||||
ErrorCode _error;
|
||||
WarningCode _warning;
|
||||
private:
|
||||
AudioDeviceModule* _audioDevice;
|
||||
};
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
@@ -38,8 +38,7 @@ class FrameQueue
|
||||
public:
|
||||
FrameQueue()
|
||||
:
|
||||
_queueRWLock(*webrtc::RWLockWrapper::CreateRWLock()),
|
||||
_prevTS(-1)
|
||||
_queueRWLock(*webrtc::RWLockWrapper::CreateRWLock())
|
||||
{
|
||||
}
|
||||
|
||||
@@ -56,7 +55,6 @@ public:
|
||||
private:
|
||||
webrtc::RWLockWrapper& _queueRWLock;
|
||||
std::queue<FrameQueueTuple *> _frameBufferQueue;
|
||||
WebRtc_Word64 _prevTS;
|
||||
};
|
||||
|
||||
// feedback signal to encoder
|
||||
|
||||
@@ -77,8 +77,6 @@ public:
|
||||
WebRtc_UWord32 decoderSpecificSize = 0,
|
||||
void* decoderSpecificInfo = NULL) :
|
||||
_encodedVideoBuffer(buffer),
|
||||
_decoderSpecificInfo(decoderSpecificInfo),
|
||||
_decoderSpecificSize(decoderSpecificSize),
|
||||
_encodeComplete(false) {}
|
||||
WebRtc_Word32 Encoded(webrtc::EncodedImage& encodedImage,
|
||||
const webrtc::CodecSpecificInfo* codecSpecificInfo,
|
||||
@@ -89,8 +87,6 @@ public:
|
||||
webrtc::VideoFrameType EncodedFrameType() const;
|
||||
private:
|
||||
TestVideoEncodedBuffer* _encodedVideoBuffer;
|
||||
void* _decoderSpecificInfo;
|
||||
WebRtc_UWord32 _decoderSpecificSize;
|
||||
bool _encodeComplete;
|
||||
webrtc::VideoFrameType _encodedFrameType;
|
||||
};
|
||||
|
||||
@@ -52,7 +52,6 @@ private:
|
||||
WebRtc_UWord32 _skipCnt;
|
||||
webrtc::VideoCodingModule* _VCMReceiver;
|
||||
webrtc::FrameType _frameType;
|
||||
WebRtc_UWord8* _payloadData; // max payload size??
|
||||
WebRtc_UWord16 _seqNo;
|
||||
NormalTest& _test;
|
||||
}; // end of VCMEncodeCompleteCallback
|
||||
|
||||
@@ -78,7 +78,6 @@ private:
|
||||
float _encodedBytes;
|
||||
VideoCodingModule* _VCMReceiver;
|
||||
FrameType _frameType;
|
||||
WebRtc_UWord8* _payloadData;
|
||||
WebRtc_UWord16 _seqNo;
|
||||
bool _encodeComplete;
|
||||
WebRtc_Word32 _width;
|
||||
@@ -94,7 +93,6 @@ class VCMRTPEncodeCompleteCallback: public VCMPacketizationCallback
|
||||
public:
|
||||
VCMRTPEncodeCompleteCallback(RtpRtcp* rtp) :
|
||||
_encodedBytes(0),
|
||||
_seqNo(0),
|
||||
_encodeComplete(false),
|
||||
_RTPModule(rtp) {}
|
||||
|
||||
@@ -128,8 +126,6 @@ public:
|
||||
private:
|
||||
float _encodedBytes;
|
||||
FrameType _frameType;
|
||||
WebRtc_UWord8* _payloadData;
|
||||
WebRtc_UWord16 _seqNo;
|
||||
bool _encodeComplete;
|
||||
RtpRtcp* _RTPModule;
|
||||
WebRtc_Word16 _width;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
@@ -51,7 +51,7 @@ public:
|
||||
|
||||
virtual int UpdateSize(int width, int height);
|
||||
|
||||
// Setup
|
||||
// Setup
|
||||
int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
|
||||
int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
|
||||
|
||||
@@ -87,8 +87,6 @@ private:
|
||||
int _stretchedHeight;
|
||||
int _oldStretchedHeight;
|
||||
int _oldStretchedWidth;
|
||||
int _xOldWidth;
|
||||
int _yOldHeight;
|
||||
unsigned char* _buffer;
|
||||
int _bufferSize;
|
||||
int _incommingBufferSize;
|
||||
|
||||
@@ -34,8 +34,6 @@ _stretchedWidth( 0),
|
||||
_stretchedHeight( 0),
|
||||
_oldStretchedHeight( 0),
|
||||
_oldStretchedWidth( 0),
|
||||
_xOldWidth( 0),
|
||||
_yOldHeight( 0),
|
||||
_buffer( 0),
|
||||
_bufferSize( 0),
|
||||
_incommingBufferSize( 0),
|
||||
@@ -426,7 +424,7 @@ int VideoRenderNSOpenGL::ChangeWindow(CocoaRenderView* newWindowRef)
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Check if the thread and event already exist.
|
||||
/* Check if the thread and event already exist.
|
||||
* If so then they will simply be restarted
|
||||
* If not then create them and continue
|
||||
*/
|
||||
@@ -619,7 +617,7 @@ int VideoRenderNSOpenGL::setRenderTargetFullScreen()
|
||||
[_windowRef setFrame:screenRect];
|
||||
[_windowRef setBounds:screenRect];
|
||||
|
||||
|
||||
|
||||
_fullScreenWindow = [[CocoaFullScreenWindow alloc]init];
|
||||
[_fullScreenWindow grabFullScreen];
|
||||
[[[_fullScreenWindow window] contentView] addSubview:_windowRef];
|
||||
@@ -655,18 +653,18 @@ VideoRenderNSOpenGL::~VideoRenderNSOpenGL()
|
||||
{
|
||||
if(_fullScreenWindow)
|
||||
{
|
||||
// Detach CocoaRenderView from full screen view back to
|
||||
// Detach CocoaRenderView from full screen view back to
|
||||
// it's original parent.
|
||||
[_windowRef removeFromSuperview];
|
||||
if(_windowRefSuperView)
|
||||
if(_windowRefSuperView)
|
||||
{
|
||||
[_windowRefSuperView addSubview:_windowRef];
|
||||
[_windowRef setFrame:_windowRefSuperViewFrame];
|
||||
}
|
||||
|
||||
|
||||
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, 0, "%s:%d Attempting to release fullscreen window", __FUNCTION__, __LINE__);
|
||||
[_fullScreenWindow releaseFullScreen];
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
@@ -60,7 +60,6 @@ class FrameWriterImpl : public FrameWriter {
|
||||
private:
|
||||
std::string output_filename_;
|
||||
int frame_length_in_bytes_;
|
||||
int number_of_frames_;
|
||||
FILE* output_file_;
|
||||
};
|
||||
|
||||
|
||||
@@ -8,7 +8,9 @@
|
||||
|
||||
{
|
||||
'conditions': [
|
||||
['OS=="win"', {
|
||||
# TODO(kjellander): Support UseoFMFC on VS2010.
|
||||
# http://code.google.com/p/webrtc/issues/detail?id=709
|
||||
['OS=="win" and MSVS_VERSION < "2010"', {
|
||||
'targets': [
|
||||
# WinTest - GUI test for Windows
|
||||
{
|
||||
@@ -21,10 +23,10 @@
|
||||
## VoiceEngine
|
||||
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine_core',
|
||||
## VideoEngine
|
||||
'video_engine_core',
|
||||
'video_engine_core',
|
||||
],
|
||||
'include_dirs': [
|
||||
'./interface',
|
||||
'./interface',
|
||||
'../../../../', # common_types.h and typedefs.h
|
||||
'../commonTestClasses/'
|
||||
],
|
||||
@@ -34,7 +36,7 @@
|
||||
'ChannelDlg.cc',
|
||||
'ChannelDlg.h',
|
||||
'ChannelPool.cc',
|
||||
'ChannelPool.h',
|
||||
'ChannelPool.h',
|
||||
'renderStartImage.jpg',
|
||||
'renderTimeoutImage.jpg',
|
||||
'res\Capture.rc2',
|
||||
@@ -52,7 +54,7 @@
|
||||
'CaptureDevicePool.cc',
|
||||
'tbExternalTransport.h',
|
||||
'CaptureDevicePool.h',
|
||||
|
||||
|
||||
],
|
||||
'configurations': {
|
||||
'Common_Base': {
|
||||
|
||||
@@ -203,7 +203,6 @@ TEST_F(StreamSynchronizationTest, AudioDelay) {
|
||||
int current_audio_delay_ms = 0;
|
||||
int delay_ms = 200;
|
||||
int extra_audio_delay_ms = 0;
|
||||
int current_extra_delay_ms = 0;
|
||||
int total_video_delay_ms = 0;
|
||||
|
||||
EXPECT_EQ(0, DelayedVideo(delay_ms, current_audio_delay_ms,
|
||||
@@ -212,7 +211,7 @@ TEST_F(StreamSynchronizationTest, AudioDelay) {
|
||||
// The audio delay is not allowed to change more than this in 1 second.
|
||||
EXPECT_EQ(kMaxAudioDiffMs, extra_audio_delay_ms);
|
||||
current_audio_delay_ms = extra_audio_delay_ms;
|
||||
current_extra_delay_ms = extra_audio_delay_ms;
|
||||
int current_extra_delay_ms = extra_audio_delay_ms;
|
||||
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(800);
|
||||
@@ -273,7 +272,6 @@ TEST_F(StreamSynchronizationTest, BothDelayedVideoLater) {
|
||||
int audio_delay_ms = 100;
|
||||
int video_delay_ms = 300;
|
||||
int extra_audio_delay_ms = 0;
|
||||
int current_extra_delay_ms = 0;
|
||||
int total_video_delay_ms = 0;
|
||||
|
||||
EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
|
||||
@@ -285,7 +283,7 @@ TEST_F(StreamSynchronizationTest, BothDelayedVideoLater) {
|
||||
// The audio delay is not allowed to change more than this in 1 second.
|
||||
EXPECT_EQ(kMaxAudioDiffMs, extra_audio_delay_ms);
|
||||
current_audio_delay_ms = extra_audio_delay_ms;
|
||||
current_extra_delay_ms = extra_audio_delay_ms;
|
||||
int current_extra_delay_ms = extra_audio_delay_ms;
|
||||
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(800);
|
||||
@@ -358,7 +356,6 @@ TEST_F(StreamSynchronizationTest, BothDelayedAudioLater) {
|
||||
int audio_delay_ms = 300;
|
||||
int video_delay_ms = 100;
|
||||
int extra_audio_delay_ms = 0;
|
||||
int current_extra_delay_ms = 0;
|
||||
int total_video_delay_ms = 0;
|
||||
|
||||
EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
|
||||
@@ -369,7 +366,6 @@ TEST_F(StreamSynchronizationTest, BothDelayedAudioLater) {
|
||||
EXPECT_EQ(kMaxVideoDiffMs, total_video_delay_ms);
|
||||
EXPECT_EQ(0, extra_audio_delay_ms);
|
||||
current_audio_delay_ms = extra_audio_delay_ms;
|
||||
current_extra_delay_ms = extra_audio_delay_ms;
|
||||
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
|
||||
@@ -384,7 +380,6 @@ TEST_F(StreamSynchronizationTest, BothDelayedAudioLater) {
|
||||
EXPECT_EQ(2 * kMaxVideoDiffMs, total_video_delay_ms);
|
||||
EXPECT_EQ(0, extra_audio_delay_ms);
|
||||
current_audio_delay_ms = extra_audio_delay_ms;
|
||||
current_extra_delay_ms = extra_audio_delay_ms;
|
||||
|
||||
send_time_->IncreaseTimeMs(1000);
|
||||
receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
|
||||
@@ -398,7 +393,6 @@ TEST_F(StreamSynchronizationTest, BothDelayedAudioLater) {
|
||||
&total_video_delay_ms));
|
||||
EXPECT_EQ(audio_delay_ms - video_delay_ms, total_video_delay_ms);
|
||||
EXPECT_EQ(0, extra_audio_delay_ms);
|
||||
current_extra_delay_ms = extra_audio_delay_ms;
|
||||
|
||||
// Simulate that NetEQ introduces some audio delay.
|
||||
current_audio_delay_ms = 50;
|
||||
@@ -415,7 +409,6 @@ TEST_F(StreamSynchronizationTest, BothDelayedAudioLater) {
|
||||
EXPECT_EQ(audio_delay_ms - video_delay_ms + current_audio_delay_ms,
|
||||
total_video_delay_ms);
|
||||
EXPECT_EQ(0, extra_audio_delay_ms);
|
||||
current_extra_delay_ms = extra_audio_delay_ms;
|
||||
|
||||
// Simulate that NetEQ reduces its delay.
|
||||
current_audio_delay_ms = 10;
|
||||
|
||||
@@ -99,22 +99,15 @@
|
||||
'source/vie_window_manager_factory_win.cc',
|
||||
],
|
||||
'conditions': [
|
||||
# TODO(andrew): this likely isn't an actual dependency. It should be
|
||||
# included in webrtc.gyp or video_engine.gyp instead.
|
||||
['OS=="android"', {
|
||||
'libraries': [
|
||||
'-lGLESv2',
|
||||
'-llog',
|
||||
],
|
||||
}],
|
||||
['OS=="win"', {
|
||||
'dependencies': [
|
||||
'vie_win_test',
|
||||
],
|
||||
}],
|
||||
['OS=="linux"', {
|
||||
# TODO(andrew): these should be provided directly by the projects
|
||||
# # which require them instead.
|
||||
# TODO(andrew): These should be provided directly by the projects
|
||||
# which require them instead.
|
||||
'libraries': [
|
||||
'-lXext',
|
||||
'-lX11',
|
||||
|
||||
@@ -50,7 +50,6 @@ class ViEFileCaptureDevice {
|
||||
webrtc::CriticalSectionWrapper* mutex_;
|
||||
|
||||
WebRtc_UWord32 frame_length_;
|
||||
WebRtc_UWord8* frame_buffer_;
|
||||
WebRtc_UWord32 width_;
|
||||
WebRtc_UWord32 height_;
|
||||
};
|
||||
|
||||
@@ -104,7 +104,9 @@
|
||||
},
|
||||
],
|
||||
'conditions': [
|
||||
['OS=="win"', {
|
||||
# TODO(kjellander): Support UseoFMFC on VS2010.
|
||||
# http://code.google.com/p/webrtc/issues/detail?id=709
|
||||
['OS=="win" and MSVS_VERSION < "2010"', {
|
||||
'targets': [
|
||||
# WinTest - GUI test for Windows
|
||||
{
|
||||
|
||||
Reference in New Issue
Block a user