Refactored Video capture Unit test to use gtest.

Fix Valgrind warnings on Linux.

BUG=
TEST=

Review URL: http://webrtc-codereview.appspot.com/296009

git-svn-id: http://webrtc.googlecode.com/svn/trunk@1100 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
perkj@webrtc.org 2011-12-05 09:58:55 +00:00
parent 0ae71b9ccb
commit 8627adc158
23 changed files with 470 additions and 2426 deletions

View File

@ -113,7 +113,8 @@ VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2()
{
delete _captureCritSect;
}
close(_deviceFd);
if (_deviceFd != -1)
close(_deviceFd);
}
WebRtc_Word32 VideoCaptureModuleV4L2::StartCapture(
@ -191,7 +192,11 @@ WebRtc_Word32 VideoCaptureModuleV4L2::StartCapture(
// initialize current width and height
_currentWidth = video_fmt.fmt.pix.width;
_currentHeight = video_fmt.fmt.pix.height;
_currentFrameRate=30; // No way of knowing on Linux.
_captureDelay = 120;
if(_currentWidth >= 800)
_currentFrameRate = 15;
else
_currentFrameRate = 30; // No way of knowing on Linux.
if (!AllocateVideoBuffers())
{

View File

@ -194,51 +194,24 @@
# Exclude the test targets when building with chromium.
'conditions': [
['build_with_chromium==0', {
'targets': [
{
'targets': [
{
'target_name': 'video_capture_module_test',
'type': 'executable',
'dependencies': [
'video_capture_module',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'webrtc_utility',
'video_render_module',
'webrtc_video_coding',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/../testing/gtest.gyp:gtest',
'<(webrtc_root)/../test/test.gyp:test_support_main',
],
'include_dirs': [
'../interface',
],
'sources': [
# sources
'../test/testAPI/cocoa_renderer.h',
'../test/testAPI/cocoa_renderer.mm',
'../test/testAPI/testDefines.h',
'../test/testAPI/testAPI.cpp',
'../test/testAPI/testAPI_mac.mm',
'../test/testAPI/testCameraEncoder.cpp',
'../test/testAPI/testCameraEncoder.h',
'../test/testAPI/testExternalCapture.cpp',
'../test/testAPI/testExternalCapture.h',
'../test/testAPI/testPlatformDependent.cpp',
'../test/testAPI/testPlatformDependent.h',
'../test/testAPI/Logger.h',
'../test/testAPI/Logger.cpp',
'../test/testAPI/Renderer.h',
'../test/testAPI/Renderer.cpp',
'../test/testAPI/renderer_win.cc',
'../test/testAPI/renderer_mac.mm',
'../test/testAPI/renderer_linux.cc',
# Note: renderer_android.cc is deliberately not listed since it
# doesn't build through gyp anyway.
], # source
'conditions': [
# DEFINE PLATFORM SPECIFIC SOURCE FILES
['OS!="mac"', {
'sources!': [
'../test/testAPI/cocoa_renderer.h',
'../test/testAPI/cocoa_renderer.mm',
],
}],
'../test/video_capture_unittest.cc',
],
'conditions': [
# DEFINE PLATFORM SPECIFIC INCLUDE AND CFLAGS
['OS=="mac" or OS=="linux"', {
'cflags': [

View File

@ -1,301 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "Logger.h"
#include "string.h"
#include "file_wrapper.h"
namespace webrtc
{
#ifdef _WIN32
#pragma warning(disable : 4996)
#endif
Logger::Logger() :
_logFile(*FileWrapper::Create())
{
}
Logger::~Logger(void)
{
if (_logFile.Open())
_logFile.CloseFile();
}
void Logger::Print(char* msg)
{
printf("%s\n",msg);
if (_logFile.Open())
{
_logFile.WriteText(msg);
}
}
#define BUFSIZE 256
void Logger::SetFileName(const char* fileName)
{
_logFile.CloseFile();
if (!fileName)
return;
_logFile.OpenFile(fileName, false, false, true);
char osVersion[BUFSIZE];
memset(osVersion, 0, sizeof(osVersion));
GetOSDisplayString(osVersion);
_logFile.WriteText(osVersion);
_logFile.WriteText("\n\n");
}
#ifdef _WIN32
#include <windows.h>
#include <tchar.h>
#include <stdio.h>
#include <strsafe.h>
typedef void (WINAPI *PGNSI)(LPSYSTEM_INFO);
typedef BOOL (WINAPI *PGPI)(DWORD, DWORD, DWORD, DWORD, PDWORD);
bool Logger::GetOSDisplayString( void* psz)
{
OSVERSIONINFOEX osvi;
SYSTEM_INFO si;
PGNSI pGNSI;
PGPI pGPI;
BOOL bOsVersionInfoEx;
DWORD dwType;
STRSAFE_LPWSTR pszOS = (STRSAFE_LPWSTR) psz;
size_t bufferSize = BUFSIZE/sizeof(TCHAR);
ZeroMemory(&si, sizeof(SYSTEM_INFO));
ZeroMemory(&osvi, sizeof(OSVERSIONINFOEX));
osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
if( !(bOsVersionInfoEx = GetVersionEx ((OSVERSIONINFO *) &osvi)) )
return 1;
// Call GetNativeSystemInfo if supported or GetSystemInfo otherwise.
pGNSI = (PGNSI) GetProcAddress(
GetModuleHandle(TEXT("kernel32.dll")),
"GetNativeSystemInfo");
if(NULL != pGNSI)
pGNSI(&si);
else GetSystemInfo(&si);
if ( VER_PLATFORM_WIN32_NT==osvi.dwPlatformId &&
osvi.dwMajorVersion > 4 )
{
StringCchCopy(pszOS, bufferSize, TEXT("Microsoft "));
// Test for the specific product.
if ( osvi.dwMajorVersion == 6 )
{
if( osvi.dwMinorVersion == 0 )
{
if( osvi.wProductType == VER_NT_WORKSTATION )
StringCchCat(pszOS, bufferSize, TEXT("Windows Vista "));
else StringCchCat(pszOS, bufferSize, TEXT("Windows Server 2008 " ));
}
if ( osvi.dwMinorVersion == 1 )
{
if( osvi.wProductType == VER_NT_WORKSTATION )
StringCchCat(pszOS, bufferSize, TEXT("Windows 7 "));
else StringCchCat(pszOS, bufferSize, TEXT("Windows Server 2008 R2 " ));
}
pGPI = (PGPI) GetProcAddress(
GetModuleHandle(TEXT("kernel32.dll")),
"GetProductInfo");
pGPI( osvi.dwMajorVersion, osvi.dwMinorVersion, 0, 0, &dwType);
switch( dwType )
{
case PRODUCT_ULTIMATE:
StringCchCat(pszOS, bufferSize, TEXT("Ultimate Edition" ));
break;
// case PRODUCT_PROFESSIONAL:
// StringCchCat(pszOS, bufferSize, TEXT("Professional" ));
break;
case PRODUCT_HOME_PREMIUM:
StringCchCat(pszOS, bufferSize, TEXT("Home Premium Edition" ));
break;
case PRODUCT_HOME_BASIC:
StringCchCat(pszOS, bufferSize, TEXT("Home Basic Edition" ));
break;
case PRODUCT_ENTERPRISE:
StringCchCat(pszOS, bufferSize, TEXT("Enterprise Edition" ));
break;
case PRODUCT_BUSINESS:
StringCchCat(pszOS, bufferSize, TEXT("Business Edition" ));
break;
case PRODUCT_STARTER:
StringCchCat(pszOS, bufferSize, TEXT("Starter Edition" ));
break;
case PRODUCT_CLUSTER_SERVER:
StringCchCat(pszOS, bufferSize, TEXT("Cluster Server Edition" ));
break;
case PRODUCT_DATACENTER_SERVER:
StringCchCat(pszOS, bufferSize, TEXT("Datacenter Edition" ));
break;
case PRODUCT_DATACENTER_SERVER_CORE:
StringCchCat(pszOS, bufferSize, TEXT("Datacenter Edition (core installation)" ));
break;
case PRODUCT_ENTERPRISE_SERVER:
StringCchCat(pszOS, bufferSize, TEXT("Enterprise Edition" ));
break;
case PRODUCT_ENTERPRISE_SERVER_CORE:
StringCchCat(pszOS, bufferSize, TEXT("Enterprise Edition (core installation)" ));
break;
case PRODUCT_ENTERPRISE_SERVER_IA64:
StringCchCat(pszOS, bufferSize, TEXT("Enterprise Edition for Itanium-based Systems" ));
break;
case PRODUCT_SMALLBUSINESS_SERVER:
StringCchCat(pszOS, bufferSize, TEXT("Small Business Server" ));
break;
case PRODUCT_SMALLBUSINESS_SERVER_PREMIUM:
StringCchCat(pszOS, bufferSize, TEXT("Small Business Server Premium Edition" ));
break;
case PRODUCT_STANDARD_SERVER:
StringCchCat(pszOS, bufferSize, TEXT("Standard Edition" ));
break;
case PRODUCT_STANDARD_SERVER_CORE:
StringCchCat(pszOS, bufferSize, TEXT("Standard Edition (core installation)" ));
break;
case PRODUCT_WEB_SERVER:
StringCchCat(pszOS, bufferSize, TEXT("Web Server Edition" ));
break;
}
}
if ( osvi.dwMajorVersion == 5 && osvi.dwMinorVersion == 2 )
{
if( GetSystemMetrics(SM_SERVERR2) )
StringCchCat(pszOS, bufferSize, TEXT( "Windows Server 2003 R2, "));
else if ( osvi.wSuiteMask & VER_SUITE_STORAGE_SERVER )
StringCchCat(pszOS, bufferSize, TEXT( "Windows Storage Server 2003"));
//else if ( osvi.wSuiteMask & VER_SUITE_WH_SERVER )
// StringCchCat(pszOS, bufferSize, TEXT( "Windows Home Server"));
else if( osvi.wProductType == VER_NT_WORKSTATION &&
si.wProcessorArchitecture==PROCESSOR_ARCHITECTURE_AMD64)
{
StringCchCat(pszOS, bufferSize, TEXT( "Windows XP Professional x64 Edition"));
}
else StringCchCat(pszOS, bufferSize, TEXT("Windows Server 2003, "));
// Test for the server type.
if ( osvi.wProductType != VER_NT_WORKSTATION )
{
if ( si.wProcessorArchitecture==PROCESSOR_ARCHITECTURE_IA64 )
{
if( osvi.wSuiteMask & VER_SUITE_DATACENTER )
StringCchCat(pszOS, bufferSize, TEXT( "Datacenter Edition for Itanium-based Systems" ));
else if( osvi.wSuiteMask & VER_SUITE_ENTERPRISE )
StringCchCat(pszOS, bufferSize, TEXT( "Enterprise Edition for Itanium-based Systems" ));
}
else if ( si.wProcessorArchitecture==PROCESSOR_ARCHITECTURE_AMD64 )
{
if( osvi.wSuiteMask & VER_SUITE_DATACENTER )
StringCchCat(pszOS, bufferSize, TEXT( "Datacenter x64 Edition" ));
else if( osvi.wSuiteMask & VER_SUITE_ENTERPRISE )
StringCchCat(pszOS, bufferSize, TEXT( "Enterprise x64 Edition" ));
else StringCchCat(pszOS, bufferSize, TEXT( "Standard x64 Edition" ));
}
else
{
if ( osvi.wSuiteMask & VER_SUITE_COMPUTE_SERVER )
StringCchCat(pszOS, bufferSize, TEXT( "Compute Cluster Edition" ));
else if( osvi.wSuiteMask & VER_SUITE_DATACENTER )
StringCchCat(pszOS, bufferSize, TEXT( "Datacenter Edition" ));
else if( osvi.wSuiteMask & VER_SUITE_ENTERPRISE )
StringCchCat(pszOS, bufferSize, TEXT( "Enterprise Edition" ));
else if ( osvi.wSuiteMask & VER_SUITE_BLADE )
StringCchCat(pszOS, bufferSize, TEXT( "Web Edition" ));
else StringCchCat(pszOS, bufferSize, TEXT( "Standard Edition" ));
}
}
}
if ( osvi.dwMajorVersion == 5 && osvi.dwMinorVersion == 1 )
{
StringCchCat(pszOS, bufferSize, TEXT("Windows XP "));
if( osvi.wSuiteMask & VER_SUITE_PERSONAL )
StringCchCat(pszOS, bufferSize, TEXT( "Home Edition" ));
else StringCchCat(pszOS, bufferSize, TEXT( "Professional" ));
}
if ( osvi.dwMajorVersion == 5 && osvi.dwMinorVersion == 0 )
{
StringCchCat(pszOS, bufferSize, TEXT("Windows 2000 "));
if ( osvi.wProductType == VER_NT_WORKSTATION )
{
StringCchCat(pszOS, bufferSize, TEXT( "Professional" ));
}
else
{
if( osvi.wSuiteMask & VER_SUITE_DATACENTER )
StringCchCat(pszOS, bufferSize, TEXT( "Datacenter Server" ));
else if( osvi.wSuiteMask & VER_SUITE_ENTERPRISE )
StringCchCat(pszOS, bufferSize, TEXT( "Advanced Server" ));
else StringCchCat(pszOS, bufferSize, TEXT( "Server" ));
}
}
// Include service pack (if any) and build number.
if( _tcslen(osvi.szCSDVersion) > 0 )
{
StringCchCat(pszOS, bufferSize, TEXT(" ") );
StringCchCat(pszOS, bufferSize, osvi.szCSDVersion);
}
TCHAR buf[80];
StringCchPrintf( buf, 80, TEXT(" (build %d)"), osvi.dwBuildNumber);
StringCchCat(pszOS, bufferSize, buf);
if ( osvi.dwMajorVersion >= 6 )
{
if ( si.wProcessorArchitecture==PROCESSOR_ARCHITECTURE_AMD64 )
StringCchCat(pszOS, bufferSize, TEXT( ", 64-bit" ));
else if (si.wProcessorArchitecture==PROCESSOR_ARCHITECTURE_INTEL )
StringCchCat(pszOS, bufferSize, TEXT(", 32-bit"));
}
StringCchPrintf( buf, 80, TEXT(" (number of processors %d)"), si.dwNumberOfProcessors);
StringCchCat(pszOS, bufferSize, buf);
return TRUE;
}
else
{
printf( "This sample does not support this version of Windows.\n");
return FALSE;
}
}
#elif defined(WEBRTC_MAC_INTEL)
bool Logger::GetOSDisplayString(void* psz)
{}
#elif defined(WEBRTC_LINUX)
bool Logger::GetOSDisplayString(void* /*psz*/)
{ return true;}
#endif
} // namespace webrtc

View File

@ -1,29 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#pragma once
#include <stdio.h>
namespace webrtc
{
class FileWrapper;
class Logger
{
public:
Logger(void);
~Logger(void);
void SetFileName(const char* fileName);
void Print(char* msg);
private:
static bool GetOSDisplayString(void* psz);
FileWrapper& _logFile;
};
} //namespace webrtc

View File

@ -1,182 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "Renderer.h"
#include <stdio.h>
#include "thread_wrapper.h"
#include "tick_util.h"
#if defined _WIN32
#include <tchar.h>
#endif
namespace webrtc
{
Renderer::Renderer(bool preview) :
_renderModule(NULL), _quiting(false), _renderWindow(NULL)
{
#ifndef _WIN32
if (-1 == WebRtcCreateWindow((void**)(&_renderWindow), 0, 352, 288))
{
printf("ERROR** INVALID SCREEN\n");
}
#endif // In Windows the thread running the message loop needs to create the window.
_messageThread = ThreadWrapper::CreateThread(RenderThread, this,
kLowPriority, "RenderThread");
unsigned int threadId;
_messageThread->Start(threadId);
while (!_renderWindow)
{
SLEEP(10);
}// Wait until messageThread has created the window
_renderModule = VideoRender::CreateVideoRender(0, (void*) _renderWindow,
false);
_renderProvider = _renderModule->AddIncomingRenderStream(0, 0, 0.0f, 0.0f,
1.0f, 1.0f);
WebRtc_UWord32 width;
WebRtc_UWord32 height;
_renderModule->GetScreenResolution(width, height);
#ifdef _WIN32
// GetScreenResolution is currently not implemented
RECT screenRect;
GetWindowRect(GetDesktopWindow(), &screenRect);
width=screenRect.right;
height=screenRect.bottom;
#endif
if (!preview)
{
#if defined(_WIN32)
SetWindowPos(_renderWindow,0,height/2,width,height/2,true);
#elif defined(WEBRTC_MAC_INTEL)
SetWindowPos(_renderWindow, 0, height, width, height, true);
#elif defined(WEBRTC_LINUX)
#endif
_videoFrame.VerifyAndAllocate(_frameWidth * _frameHeight * 3 / 2);
_videoFrame.SetHeight(_frameHeight);
_videoFrame.SetWidth(_frameWidth);
_videoFrame.SetLength(_videoFrame.Size());
memset(_videoFrame.Buffer(), 0, _videoFrame.Size());
}
else // Preview window
{
#if defined(_WIN32)
SetWindowPos(_renderWindow,width/2,0,width/2,height/2,false);
#elif defined(WEBRTC_MAC_INTEL)
SetWindowPos(_renderWindow, 0, height, width, height, false);
#elif defined(WEBRTC_LINUX)
#endif
}
_renderModule->StartRender(0);
}
Renderer::~Renderer(void)
{
VideoRender::DestroyVideoRender(_renderModule);
_quiting = true;
while (_renderWindow)
{
SLEEP(20);
}
_messageThread->Stop();
delete _messageThread;
}
bool Renderer::RenderThread(ThreadObj obj)
{
return static_cast<Renderer*> (obj)->RenderThreadProcess();
}
bool Renderer::RenderThreadProcess()
{
if (_quiting == false && _renderWindow == NULL) // Create the render window
{
WebRtcCreateWindow((void**)&_renderWindow, 0, 352, 288);
}
#ifdef _WIN32
MSG msg;
if(PeekMessage(&msg, NULL, 0, 0,PM_REMOVE))
{
TranslateMessage(&msg);
DispatchMessage(&msg);
}
#endif
if (_quiting == true)
{
#if defined _WIN32
::DestroyWindow(_renderWindow);
#endif
_renderWindow = NULL;
}
SLEEP(50);
return true;
}
void Renderer::PaintGreen()
{
_videoFrame.VerifyAndAllocate(_frameWidth * _frameHeight * 3 / 2);
_videoFrame.SetHeight(_frameHeight);
_videoFrame.SetWidth(_frameWidth);
_videoFrame.SetLength(_videoFrame.Size());
memset(_videoFrame.Buffer(), 127, _videoFrame.Size());
memset(_videoFrame.Buffer() + _videoFrame.Width() * _videoFrame.Height(),
0, _videoFrame.Width() * _videoFrame.Height() / 2);
_videoFrame.SetRenderTime(TickTime::MillisecondTimestamp());
_renderProvider->RenderFrame(0,_videoFrame);
}
void Renderer::RenderFrame(VideoFrame& videoFrame)
{
_renderProvider->RenderFrame(0, videoFrame);
}
void Renderer::PaintBlue()
{
_videoFrame.VerifyAndAllocate(_frameWidth * _frameHeight * 3 / 2);
_videoFrame.SetHeight(_frameHeight);
_videoFrame.SetWidth(_frameWidth);
_videoFrame.SetLength(_videoFrame.Size());
memset(_videoFrame.Buffer(), 127, _videoFrame.Size());
memset(_videoFrame.Buffer() + _videoFrame.Width() * _videoFrame.Height(),
255, _videoFrame.Width() * _videoFrame.Height() / 2);
_videoFrame.SetRenderTime(TickTime::MillisecondTimestamp());
_renderProvider->RenderFrame(0, _videoFrame);
}
void* Renderer::GetWindow()
{
return (void*) _renderWindow;
}
} // namespace webrtc

View File

@ -1,83 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#pragma once
#include "video_render.h"
#include "testDefines.h"
#ifdef _WIN32
#include <windows.h>
#elif defined (WEBRTC_ANDROID)
#include <JNI.h>
#elif defined(WEBRTC_LINUX)
typedef void* HWND;
#endif
#include "thread_wrapper.h"
namespace webrtc
{
// Creates a window and fills in the os-specific handle type or pointer
// into os_window_handle. Returns 0 on success.
int WebRtcCreateWindow(void** os_window_handle,
int window_number,
int width, int height);
// Sets the window position in an OS-specific manner.
void SetWindowPos(void* os_window_handle, int x, int y,
int width, int height, bool onTop);
class Renderer
{
public:
Renderer(bool preview = false);
~Renderer(void);
void RenderFrame(VideoFrame& videoFrame);
void PaintGreen();
void PaintBlue();
void* GetWindow();
#if defined (WEBRTC_ANDROID)
static void SetRenderWindow(jobject renderWindow);
#endif
private:
static bool RenderThread(ThreadObj);
bool RenderThreadProcess();
VideoRender* _renderModule;
VideoRenderCallback* _renderProvider;
VideoFrame _videoFrame;
bool _quiting;
ThreadWrapper* _messageThread;
static int _screen;
static const WebRtc_UWord32 _frameWidth = 352;
static const WebRtc_UWord32 _frameHeight = 288;
#if defined(_WIN32)
HWND _renderWindow;
#elif defined(WEBRTC_MAC_INTEL)
void* _renderWindow;
#elif defined (WEBRTC_ANDROID)
jobject _renderWindow; //this is a glsurface.
public:
static jobject g_renderWindow;
#elif defined(WEBRTC_LINUX)
typedef void* HWND;
HWND _renderWindow;
#endif
};
} // namespace webrtc

View File

@ -1,31 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_TEST_TESTAPI_COCOA_RENDERER_H_
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_TEST_TESTAPI_COCOA_RENDERER_H_
#import <Cocoa/Cocoa.h>
#import <OpenGL/gl.h>
#import <OpenGL/glu.h>
#import <OpenGL/OpenGL.h>
@interface CocoaRenderer : NSOpenGLView {
NSOpenGLContext* _nsOpenGLContext;
int _screen;
}
@property (nonatomic, retain)NSOpenGLContext* _nsOpenGLContext;
@property int screen;
- (void)initCocoaRenderer:(NSOpenGLPixelFormat*)fmt;
@end
#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_TEST_TESTAPI_COCOA_RENDERER_H_

View File

@ -1,19 +0,0 @@
//
// CocoaRenderer.mm
// testCocoaCommandLine
#import <Cocoa/Cocoa.h>
#import <AppKit/AppKit.h>
#import "cocoa_renderer.h"
@implementation CocoaRenderer
@synthesize _nsOpenGLContext;
@synthesize screen = _screen;
- (void)initCocoaRenderer:(NSOpenGLPixelFormat*)fmt{
self = [super initWithFrame:[self frame] pixelFormat:[fmt autorelease]];
if (self != nil)
{
_nsOpenGLContext = [self openGLContext];
}
}
@end

View File

@ -1,41 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "Renderer.h"
#define nil NULL
#define NO false
namespace webrtc {
jobject Renderer::g_renderWindow = NULL;
int WebRtcCreateWindow(void** os_specific_handle, int /*winNum*/,
int /*width*/, int /*height*/) {
// jobject is a pointer type, hence a pointer to it is a
// pointer-to-pointer, which makes it castable from void**.
jobject* window = (jobject*)os_specific_handle;
*window = Renderer::g_renderWindow;
return 0;
}
void SetWindowPos(void ** /*os_specific_handle*/, int /*x*/, int /*y*/,
int /*width*/, int /*height*/, bool /*onTop*/) {
// Do nothing.
}
void Renderer::SetRenderWindow(jobject renderWindow) {
__android_log_print(ANDROID_LOG_DEBUG,
"VideoCaptureModule -testAPI",
"Renderer::SetRenderWindow");
g_renderWindow=renderWindow;
}
} // namespace webrtc

View File

@ -1,25 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "Renderer.h"
namespace webrtc {
int WebRtcCreateWindow(void** os_specific_handle, int winNum,
int width, int height) {
return 0;
}
void SetWindowPos(void** os_specific_handle, int x, int y,
int width, int height, bool onTop) {
// Do nothing.
}
} // namespace webrtc

View File

@ -1,80 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "Renderer.h"
#include <stdio.h>
#import <AppKit/AppKit.h>
#import "cocoa_renderer.h"
#include "thread_wrapper.h"
#include "tick_util.h"
static int _screen = 0;
namespace webrtc {
int WebRtcCreateWindow(void** os_specific_handle, int winNum, int width, int height)
{
CocoaRenderer** cocoaRenderer = reinterpret_cast<CocoaRenderer**> (os_specific_handle);
NSAutoreleasePool* pool = [[NSAutoreleasePool alloc]init];
_screen = winNum = 0;
// In Cocoa, rendering is not done directly to a window like in Windows and Linux.
// It is rendererd to a Subclass of NSOpenGLView
// create cocoa container window
NSRect outWindowFrame = NSMakeRect(200, 800, width + 20, height + 20);
NSArray* screens = [NSScreen screens];
if(_screen >= [screens count])
{
// requesting screen
return -1;
}
NSScreen* screen = (NSScreen*)[screens objectAtIndex:_screen];
NSWindow* outWindow = [[NSWindow alloc] initWithContentRect:outWindowFrame
styleMask:NSTitledWindowMask
backing:NSBackingStoreBuffered
defer:NO screen:screen];
[outWindow orderOut:nil];
[outWindow setTitle:@"Cocoa Renderer"];
[outWindow setBackgroundColor:[NSColor blueColor]];
[[outWindow contentView] setAutoresizesSubviews:YES];
// create renderer and attach to window
NSRect cocoaRendererFrame = NSMakeRect(10, 10, width, height);
*cocoaRenderer = [[CocoaRenderer alloc] initWithFrame:cocoaRendererFrame];
[[outWindow contentView] addSubview:*cocoaRenderer];
// must tell Cocoa to draw the window, but any GUI work must be done on the main thread.
[outWindow performSelector:@selector(display)
onThread:[NSThread mainThread]
withObject:nil
waitUntilDone:YES];
[outWindow makeKeyAndOrderFront:NSApp];
[pool release];
return 0;
}
void SetWindowPos(void* os_specific_handle, int x, int y, int width, int height, bool onTop)
{
CocoaRenderer* cocoaRenderer = (CocoaRenderer*)os_specific_handle;
NSWindow* ownerWindow = [cocoaRenderer window];
NSRect ownerNewRect = NSMakeRect(x, y, width, height);
[ownerWindow setFrame:ownerNewRect display:YES];
}
} // namespace webrtc

View File

@ -1,95 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "Renderer.h"
#include <stdio.h>
#include <tchar.h>
#include <windows.h>
#include "thread_wrapper.h"
#include "tick_util.h"
#define SLEEP_10_SEC ::Sleep(10000)
#define GET_TIME_IN_MS timeGetTime
LRESULT CALLBACK WinProc( HWND hWnd,UINT uMsg,WPARAM wParam,LPARAM lParam)
{
switch(uMsg)
{
case WM_DESTROY:
break;
case WM_COMMAND:
break;
}
return DefWindowProc(hWnd,uMsg,wParam,lParam);
}
namespace webrtc {
int WebRtcCreateWindow(void** os_specific_handle, int winNum, int width, int height)
{
HWND* hwndMain = reinterpret_cast<HWND*> (os_specific_handle); // HWND is a pointer type
HINSTANCE hinst = GetModuleHandle(0);
WNDCLASSEX wcx;
wcx.hInstance = hinst;
wcx.lpszClassName = _T(" test camera delay");
wcx.lpfnWndProc = (WNDPROC)WinProc;
wcx.style = CS_DBLCLKS;
wcx.hIcon = LoadIcon (NULL, IDI_APPLICATION);
wcx.hIconSm = LoadIcon (NULL, IDI_APPLICATION);
wcx.hCursor = LoadCursor (NULL, IDC_ARROW);
wcx.lpszMenuName = NULL;
wcx.cbSize = sizeof (WNDCLASSEX);
wcx.cbClsExtra = 0;
wcx.cbWndExtra = 0;
wcx.hbrBackground = GetSysColorBrush(COLOR_3DFACE);
// Register our window class with the operating system.
RegisterClassEx (&wcx);
// Create the main window.
*hwndMain = CreateWindowEx(0, // no extended styles
wcx.lpszClassName, // class name
_T("Test Camera Delay"), // window name
WS_OVERLAPPED |WS_THICKFRAME, // overlapped window
0, // horizontal position
0, // vertical position
width, // width
height, // height
(HWND) NULL, // no parent or owner window
(HMENU) NULL, // class menu used
hinst, // instance handle
NULL); // no window creation data
if (!*hwndMain)
{
int error = GetLastError();
return -1;
}
// Show the window using the flag specified by the program
// that started the application, and send the application
// a WM_PAINT message.
ShowWindow(*hwndMain, SW_SHOWDEFAULT);
UpdateWindow(*hwndMain);
return 0;
}
void SetWindowPos(void* os_specific_handle, int x, int y, int width, int height, bool onTop)
{
HWND hwndMain = (HWND)os_specific_handle;
// Call the Windows API
SetWindowPos(hwndMain, HWND_TOP, x, y, width, height, 0);
}
} // namespace webrtc

View File

@ -1,64 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "testAPI.h"
#if defined(_WIN32)
#include <tchar.h>
#include <windows.h>
#include <cassert>
#include <fstream>
#include <iostream>
#include <string>
#elif defined(WEBRTC_LINUX)
#include <stdio.h>
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <sys/time.h>
#endif
using namespace std;
#include <stdio.h>
#include "testExternalCapture.h"
#include "testPlatformDependent.h"
#include "testCameraEncoder.h"
void RunApiTest() {
int test_result = 0;
webrtc::testExternalCapture test;
test_result = test.DoTest();
printf("\nExternal capture test result %d\n", test_result);
webrtc::testPlatformDependent platform_dependent;
test_result = platform_dependent.DoTest();
printf("\nPlatform dependent test result %d\n", test_result);
webrtc::testCameraEncoder camera_encoder;
test_result = camera_encoder.DoTest();
printf("\nCamera encoder test result %d\n", test_result);
getchar();
}
// Note: The Mac main is implemented in testApi.mm.
#if defined(_WIN32)
int _tmain(int argc, _TCHAR* argv[])
#elif defined(WEBRTC_LINUX)
int main(int argc, char* argv[])
#endif // WEBRTC LINUX
#if !defined(WEBRTC_MAC)
{
RunApiTest();
return 0;
}
#endif // !WEBRTC_MAC

View File

@ -1,18 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_TEST_TESTAPI_TESTAPI_H
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_TEST_TESTAPI_TESTAPI_H
// Runs the API test.
void RunApiTest();
#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_TEST_TESTAPI_TESTAPI_H

View File

@ -1,29 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "testAPI.h"
#import <Foundation/Foundation.h>
#import <Cocoa/Cocoa.h>
#import <AppKit/AppKit.h>
#import <QTKit/QTKit.h>
#import "cocoa_renderer.h"
#include <sys/time.h>
#include <iostream>
int main (int argc, const char * argv[]) {
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
[NSApplication sharedApplication];
RunApiTest();
[pool release];
return 0;
}

View File

@ -1,337 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "testCameraEncoder.h"
#include "trace.h"
#include "tick_util.h"
namespace webrtc
{
#ifndef _DEBUG
#undef assert
#define assert(_a) { \
if(!(_a)) \
{ \
LOG("Failed %s",#_a); \
} \
}
#endif
testCameraEncoder::testCameraEncoder(void)
{
Trace::CreateTrace();
Trace::SetLevelFilter(webrtc::kTraceAll);
Trace::SetTraceFile("testCameraEncoder.txt");
_captureInfo=VideoCaptureFactory::CreateDeviceInfo(5);
#ifdef RENDER_PREVIEW
_renderer=NULL;
_videoCoding=webrtc::VideoCodingModule::Createwebrtc::VideoCodingModule(5);
#endif
}
testCameraEncoder::~testCameraEncoder(void)
{
delete _captureInfo;
#ifdef RENDER_PREVIEW
if(_renderer)
delete _renderer;
if(_videoCoding)
{
webrtc::VideoCodingModule::Destroywebrtc::VideoCodingModule(_videoCoding);
}
#endif
Trace::ReturnTrace();
}
int testCameraEncoder::DoTest()
{
#ifdef RENDER_PREVIEW
if(!_renderer)
{
_renderer=new Renderer(true);
}
if(_videoCoding)
{
webrtc::VideoCodec inst;
memset(&inst,0,sizeof(inst));
inst.plType=122;
inst.width=640;
inst.height=480;
inst.codecType=webrtc::kVideoCodecH264;
strcpy(inst.plName,"H264");
_videoCoding->InitializeReceiver();
_videoCoding->RegisterReceiveCallback(this);
assert(_videoCoding->RegisterReceiveCodec(&inst,1,false)==0);
}
#endif
// Test one camera at the time
LOG("\n\nTesting Camera encoder\n");
for (WebRtc_UWord32 i=0;i<_captureInfo->NumberOfDevices();++i)
{
WebRtc_UWord8 name[256];
WebRtc_UWord8 uniqueID[256];
WebRtc_UWord8 productId[256];
_captureInfo->GetDeviceName(i,name,256,uniqueID,256,productId,256);
_captureModule= VideoCaptureFactory::Create(0,uniqueID);
_captureModule->AddRef();
_captureModule->RegisterCaptureDataCallback(*this);
VideoCaptureCapability capability;
LOG("Encoder support for device %s",uniqueID);
for (int capIndex=0;capIndex<
_captureInfo->NumberOfCapabilities(uniqueID);++capIndex)
{
assert(_captureInfo->GetCapability(uniqueID,capIndex,capability)==0);
if(capability.codecType==webrtc::kVideoCodecH264)
{
testCapability(capability);
}
else if(capability.codecType!=webrtc::kVideoCodecUnknown)
{
LOG("type %d width %d, height %d, framerate %d\n",
capability.codecType,capability.width,capability.height,capability.maxFPS);
testCapability(capability);
}
}
_captureModule->Release();
}
return 0;
}
int testCameraEncoder::testCapability(VideoCaptureCapability& capability)
{
webrtc::VideoCodec codec;
codec.height=(unsigned short)capability.height;
codec.width=(unsigned short) capability.width;
float bitrate=(float)(capability.height*capability.width*3)/1000; //3bits per pixel
codec.startBitrate=(unsigned int)bitrate;
codec.maxBitrate=codec.startBitrate*10;
codec.codecType=webrtc::kVideoCodecH264;
codec.codecSpecific.H264.profile=webrtc::kProfileBase;
_encodeInterface=NULL;
_encodeInterface=_captureModule->GetEncodeInterface(codec);
if(_encodeInterface)
assert(_encodeInterface);
_captureSettings.ResetAll();
_captureSettings.capability=capability;
assert(capability.width);
assert(capability.height);
assert(capability.maxFPS);
assert(capability.expectedCaptureDelay);
_captureSettings.lastRenderTimeMS=0;
_captureSettings.captureDelay=50;
WebRtc_UWord32 maxPayloadSize=1460;
LOG("\n\nTesting H264 width %d, height %d, framerate %d bitrate %d\n",
capability.width,capability.height,capability.maxFPS,codec.startBitrate);
_captureSettings.initStartTime=TickTime::MillisecondTimestamp();
assert(_captureModule->StartCapture(capability)==0);
_captureSettings.startTime=TickTime::MillisecondTimestamp();
_captureSettings.initStopTime=TickTime::MillisecondTimestamp();
if(_encodeInterface)
assert(_encodeInterface->ConfigureEncoder(codec,maxPayloadSize)==0);
WebRtc_Word32 testTime=10000;
while(TickTime::MillisecondTimestamp()-_captureSettings.startTime<testTime
&& _captureSettings.incomingFrames<200)
{
SLEEP(200);
}
int noIncomingFrames=_captureSettings.incomingFrames;
_captureSettings.bitrateMeasureTime=TickTime::MillisecondTimestamp();
WebRtc_UWord32 actualbitrate=(_captureSettings.noOfBytes*8)/
(WebRtc_UWord32)(_captureSettings.bitrateMeasureTime-_captureSettings.firstCapturedFrameTime);
_captureSettings.noOfBytes=0;
LOG("Current set bitrate %d, actual bitrate %d\n", codec.startBitrate,actualbitrate);
for(int bitRateChange=1;bitRateChange< 11;bitRateChange=bitRateChange*2)
{
float bitrate=(float)(capability.height*
capability.width*
(bitRateChange))/1000; //3bits per pixel
codec.startBitrate=(WebRtc_Word32) bitrate;
LOG("Changing bitrate to %d (%d bits per pixel/s)\n",
codec.startBitrate,bitRateChange);
assert(_encodeInterface->SetRates(codec.startBitrate,codec.maxFramerate)==0);
testTime=2000;
while(TickTime::MillisecondTimestamp()-
_captureSettings.bitrateMeasureTime<testTime)
{
SLEEP(200);
}
noIncomingFrames=_captureSettings.incomingFrames;
WebRtc_UWord32 actualbitrate=(_captureSettings.noOfBytes*8)/
(WebRtc_UWord32)(TickTime::MillisecondTimestamp()-
_captureSettings.bitrateMeasureTime);
_captureSettings.bitrateMeasureTime=TickTime::MillisecondTimestamp();
_captureSettings.noOfBytes=0;
LOG("Current set bitrate %d, actual bitrate %d\n",
codec.startBitrate,actualbitrate);
assert((actualbitrate<(1.2* codec.startBitrate))
&& (actualbitrate>0.8*codec.startBitrate));
}
_captureSettings.stopTime=TickTime::MillisecondTimestamp();
_captureSettings.stopStartTime=TickTime::MillisecondTimestamp();
assert(_captureModule->StopCapture()==0);
_captureSettings.stopStopTime=TickTime::MillisecondTimestamp();
EvaluateTestResult();
return 0;
}
void testCameraEncoder::OnIncomingCapturedFrame(const WebRtc_Word32 id,
VideoFrame& videoFrame,
webrtc::VideoCodecType codecType)
{
_captureSettings.incomingFrames++;
_captureSettings.noOfBytes+=videoFrame.Length();
int height = static_cast<int>(videoFrame.Height());
int width = static_cast<int>(videoFrame.Width());
assert(height==_captureSettings.capability.height);
assert(width==_captureSettings.capability.width);
assert(videoFrame.RenderTimeMs()>=(TickTime::MillisecondTimestamp()-30)); // RenderTimstamp should be the time now
if((videoFrame.RenderTimeMs()>_captureSettings.lastRenderTimeMS
+(1000*1.2)/_captureSettings.capability.maxFPS
&& _captureSettings.lastRenderTimeMS>0)
||
(videoFrame.RenderTimeMs()<_captureSettings.lastRenderTimeMS+(1000*0.8)
/_captureSettings.capability.maxFPS && _captureSettings.lastRenderTimeMS>0))
{
_captureSettings.timingWarnings++;
}
if(_captureSettings.lastRenderTimeMS==0)
{
_captureSettings.firstCapturedFrameTime=TickTime::MillisecondTimestamp();
}
_captureSettings.lastRenderTimeMS=videoFrame.RenderTimeMs();
if(codecType==webrtc::kVideoCodecH264)
{
WebRtc_UWord8* ptrBuffer=videoFrame.Buffer();
if(ptrBuffer[0]!=0 || ptrBuffer[1]!=0 || ptrBuffer[2]!=0 || ptrBuffer[3]!=1)
{
assert(!"frame does not start with NALU header");
}
if(ptrBuffer[4]==0x67)
{
_captureSettings.idrFrames++;
LOG("Got IDR frame frame no %d. total number of IDR frames %d \n",
_captureSettings.incomingFrames,_captureSettings.idrFrames);
}
}
#ifdef RENDER_PREVIEW
if(codecType==webrtc::kVideoCodecH264)
{
VideoEncodedData encodedFrame;
memset(&encodedFrame,0,sizeof(encodedFrame));
encodedFrame.codec=webrtc::kVideoCodecH264;
encodedFrame.encodedHeight=videoFrame.Height();
encodedFrame.encodedWidth=videoFrame.Width();
encodedFrame.renderTimeMs=videoFrame.RenderTimeMs();
encodedFrame.timeStamp=90* (WebRtc_UWord32) videoFrame.RenderTimeMs();
encodedFrame.payloadData=(WebRtc_UWord8*) malloc(videoFrame.Length());
memcpy(encodedFrame.payloadData,videoFrame.Buffer(),videoFrame.Length());
encodedFrame.payloadSize=videoFrame.Length();
encodedFrame.bufferSize=videoFrame.Length();
encodedFrame.payloadType=122;
_videoCoding->DecodeFromStorage(encodedFrame);
}
if(codecType==webrtc::kVideoCodecUnknown)
{
_renderer->RenderFrame(videoFrame);
}
#endif
}
void testCameraEncoder::OnCaptureDelayChanged(const WebRtc_Word32 id,
const WebRtc_Word32 delay)
{
_captureSettings.captureDelay=delay;
}
#ifdef RENDER_PREVIEW
WebRtc_Word32 testCameraEncoder::FrameToRender(VideoFrame& videoFrame)
{
_renderer->RenderFrame(videoFrame);
return 0;
}
#endif
void testCameraEncoder::EvaluateTestResult()
{
CaptureSetting& captureResult=_captureSettings;
WebRtc_UWord64 timeToFirstFrame=captureResult.firstCapturedFrameTime-captureResult.startTime;
WebRtc_UWord64 timeToStart=captureResult.initStopTime-captureResult.initStartTime;
WebRtc_UWord64 timeToStop=captureResult.stopStopTime-captureResult.stopStartTime;
assert(timeToStart<4000);
assert(timeToStop<3000);
assert((timeToFirstFrame<3500) && (timeToFirstFrame>0)); // Assert if it takes more than 3500ms to start.
WebRtc_Word64 expectedNumberOfFrames=((captureResult.stopTime
-captureResult.startTime
-timeToFirstFrame)
*captureResult.capability.maxFPS)/1000;
assert(captureResult.incomingFrames>0.6*expectedNumberOfFrames); // Make sure at least 60% of the expected frames have been received from the camera
LOG(" No Captured %d,expected %d, \n timingWarnings %d, time to first %lu\n"
" time to start %lu, time to stop %lu\n idr frames %u\n",
captureResult.incomingFrames,(int)(expectedNumberOfFrames),
captureResult.timingWarnings,
(long) timeToFirstFrame,
(long) timeToStart,
(long) timeToStop,
_captureSettings.idrFrames);
captureResult.ResetSettings();
}
} // namespace webrtc

View File

@ -1,135 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#pragma once
#include "video_capture_factory.h"
//#define RENDER_PREVIEW
#ifdef RENDER_PREVIEW
#include "Renderer.h"
#include "video_coding.h"
#include "module_common_types.h"
#endif
#if defined (WEBRTC_MAC_INTEL) || defined (WEBRTC_LINUX)
#include "Logger.h"
#else
#include "Logger.h"
#endif
#include "testDefines.h"
namespace webrtc
{
class testCameraEncoder: private VideoCaptureDataCallback
#ifdef RENDER_PREVIEW
,VCMReceiveCallback
#endif
{
public:
testCameraEncoder(void);
~testCameraEncoder(void);
int DoTest();
private:
int testCapability(VideoCaptureCapability& capability);
// Implement VideoCaptureDataCallback
virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id,
VideoFrame& videoFrame,
webrtc::VideoCodecType codecType);
virtual void OnCaptureDelayChanged(const WebRtc_Word32 id,
const WebRtc_Word32 delay);
void EvaluateTestResult();
#ifdef RENDER_PREVIEW
//Implements webrtc::VCMReceiveCallback
virtual WebRtc_Word32 FrameToRender(VideoFrame& videoFrame);
#endif
VideoCaptureModule* _captureModule;
VideoCaptureModule::DeviceInfo* _captureInfo;
VideoCaptureModule::VideoCaptureEncodeInterface* _encodeInterface;
#ifdef RENDER_PREVIEW
Renderer*_renderer;
webrtc::VideoCodingModule* _videoCoding;
#endif
struct CaptureSetting
{
VideoCaptureCapability capability;
WebRtc_Word32 captureDelay;
WebRtc_Word64 lastRenderTimeMS;
WebRtc_Word32 incomingFrames;
WebRtc_Word32 timingWarnings;
WebRtc_Word64 startTime;
WebRtc_Word64 stopTime;
WebRtc_Word64 initStartTime;
WebRtc_Word64 initStopTime;
WebRtc_Word64 stopStartTime;
WebRtc_Word64 stopStopTime;
WebRtc_Word64 bitrateMeasureTime;
WebRtc_Word32 noOfBytes;
WebRtc_Word32 idrFrames;
WebRtc_Word64 firstCapturedFrameTime;
CaptureSetting()
{
ResetAll();
}
void ResetSettings()
{
capability.width=0;
capability.height=0;
capability.maxFPS=0;
captureDelay=0;
lastRenderTimeMS=0;
incomingFrames=0;
timingWarnings=0;
startTime=0;
stopTime=0;
firstCapturedFrameTime=0;
noOfBytes=0;
idrFrames=0;
bitrateMeasureTime=0;
}
void ResetAll()
{
ResetSettings();
initStartTime=0;
initStopTime=0;
stopStartTime=0;
stopStopTime=0;
}
};
Logger _logger;
CaptureSetting _captureSettings;
};
} // namespace webrtc

View File

@ -1,54 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* testDefines.h
*/
#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_TEST_TESTAPI_TESTDEFINES_H_
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_TEST_TESTAPI_TESTDEFINES_H_
#if defined (WEBRTC_ANDROID)
#include <android/log.h>
#include <unistd.h>
#endif
#if defined(_WIN32)
#define SLEEP(x) Sleep(x)
#define SPRINTF(x, y, z, ...) sprintf_s(x, y, z, __VA_ARGS__)
#define LOG(...) { \
char msg[512]; \
sprintf_s(msg,512,__VA_ARGS__); \
_logger.Print(msg); \
}
#elif defined (WEBRTC_ANDROID)
#define LOG(...) { \
char msg[512]; \
sprintf(msg,__VA_ARGS__); \
__android_log_print(ANDROID_LOG_DEBUG, \
"VideoCaptureModule -testAPI", __VA_ARGS__); \
_logger.Print(msg); \
}
#define SLEEP(x) usleep(x*1000)
#define SPRINTF(x, y, z, ...) sprintf(x, z, __VA_ARGS__)
#else
#include <unistd.h>
#define SLEEP(x) usleep(x * 1000)
#define SPRINTF(x, y, z, ...) sprintf(x, z, __VA_ARGS__)
#define LOG(...) { \
char msg[512]; \
sprintf(msg, __VA_ARGS__); \
printf("%s\n", msg); \
}
#endif
#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_TEST_TESTAPI_TESTDEFINES_H_

View File

@ -1,184 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "testExternalCapture.h"
#include "tick_util.h"
#include "process_thread.h"
#include "stdio.h"
namespace webrtc
{
static int testExternalCaptureResult = 0;
#ifdef NDEBUG
#if defined(WEBRTC_MAC_INTEL)
#else
#undef assert
#define assert(p) if(!(p)){printf("Error line %d\n",__LINE__);testExternalCaptureResult=-1;}
#endif
#endif
void testExternalCapture::CreateInterface()
{
_captureModule = VideoCaptureFactory::Create(1, _captureInteface);
_captureModule->AddRef();
}
testExternalCapture::testExternalCapture(void)
: _captureInteface(NULL), _captureModule(NULL)
{
}
int testExternalCapture::CompareFrames(const VideoFrame& frame1,
const VideoFrame& frame2)
{
assert(frame1.Length()==frame2.Length());
assert(frame1.Width()==frame2.Width());
assert(frame1.Height()==frame2.Height());
//assert(frame1.RenderTimeMs()==frame2.RenderTimeMs());
for (unsigned int i = 0; i < frame1.Length(); ++i)
assert(*(frame1.Buffer()+i)==*(frame2.Buffer()+i));
return 0;
}
testExternalCapture::~testExternalCapture(void)
{
_captureModule->Release();
}
void testExternalCapture::OnIncomingCapturedFrame(
const WebRtc_Word32 ID,
VideoFrame& videoFrame,
webrtc::VideoCodecType codecType)
{
_resultFrame.CopyFrame(videoFrame);
_frameCount++;
}
void testExternalCapture::OnCaptureDelayChanged(const WebRtc_Word32 ID,
const WebRtc_Word32 delay)
{
}
void testExternalCapture::OnCaptureFrameRate(const WebRtc_Word32 id,
const WebRtc_UWord32 frameRate)
{
printf("OnCaptureFrameRate %d, frameRate %d\n", id, frameRate);
_reportedFrameRate = frameRate;
}
void testExternalCapture::OnNoPictureAlarm(const WebRtc_Word32 id,
const VideoCaptureAlarm alarm)
{
printf("OnNoPictureAlarm %d, alarm %d\n", id, alarm);
_captureAlarm = alarm;
}
int testExternalCapture::DoTest()
{
int height = 288;
int width = 352;
printf("Platform independent test\n");
CreateInterface();
ProcessThread* processModule = ProcessThread::CreateProcessThread();
processModule->Start();
processModule->RegisterModule(_captureModule);
_testFrame.VerifyAndAllocate(height * width * 3 / 2);
_testFrame.SetLength(height * width * 3 / 2);
_testFrame.SetHeight(height);
_testFrame.SetWidth(width);
memset(_testFrame.Buffer(), 0, 1);
assert(_captureModule->RegisterCaptureDataCallback(*this)==0);
assert(_captureModule->RegisterCaptureCallback(*this)==0);
assert(_captureModule->EnableFrameRateCallback(true)==0);
assert(_captureModule->EnableNoPictureAlarm(true)==0);
VideoCaptureCapability frameInfo;
frameInfo.width = width;
frameInfo.height = height;
frameInfo.rawType = webrtc::kVideoYV12;
assert(_captureInteface->IncomingFrame(_testFrame.Buffer(),
_testFrame.Length(),
frameInfo,0)==0);
CompareFrames(_testFrame, _resultFrame);
printf(" testing the IncomingFrameI420 interface.\n");
VideoFrameI420 frame_i420;
frame_i420.width = width;
frame_i420.height = height;
frame_i420.y_plane = _testFrame.Buffer();
frame_i420.u_plane = frame_i420.y_plane + (width * height);
frame_i420.v_plane = frame_i420.u_plane + ((width * height) >> 2);
frame_i420.y_pitch = width;
frame_i420.u_pitch = width / 2;
frame_i420.v_pitch = width / 2;
assert(_captureInteface->IncomingFrameI420(frame_i420, 0) == 0);
CompareFrames(_testFrame, _resultFrame);
printf(" testing local frame rate callback and no picture alarm.\n");
WebRtc_Word64 testTime = 3;
_reportedFrameRate = 0;
_captureAlarm = Cleared;
TickTime startTime = TickTime::Now();
while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000)
{
assert(_captureInteface->IncomingFrame(_testFrame.Buffer(),
_testFrame.Length(),
frameInfo,0)==0);
SLEEP(100);
}
assert(_reportedFrameRate==10);
SLEEP(500); // Make sure the no picture alarm is triggered
assert(_captureAlarm==Raised);
testTime = 3;
startTime = TickTime::Now();
while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000)
{
assert(_captureInteface->IncomingFrame(_testFrame.Buffer(),
_testFrame.Length(),
frameInfo,0)==0);
SLEEP(33);
}
assert(_captureAlarm==Cleared);
assert(_reportedFrameRate==30);
//Test start image
printf(" testing start send image.\n");
testTime = 3;
startTime = TickTime::Now();
_frameCount = 0;
assert(_captureModule->StartSendImage(_testFrame,15)==0);
while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000)
{
SLEEP(33);
}
assert(_captureModule->StopSendImage()==0);
assert(_frameCount>=testTime*15-1 && _frameCount<=testTime*15+1);
assert(_captureAlarm==Raised);
CompareFrames(_testFrame, _resultFrame);
SLEEP(1000);
assert(_frameCount>=testTime*15-1 && _frameCount<=testTime*15+1);
processModule->Stop();
ProcessThread::DestroyProcessThread(processModule);
return testExternalCaptureResult;
}
} // namespace webrtc

View File

@ -1,61 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_TEST_TESTAPI_TESTEXTERNALCAPTURE_H_
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_TEST_TESTAPI_TESTEXTERNALCAPTURE_H_
#include "testDefines.h"
#include "video_capture_factory.h"
namespace webrtc
{
class testExternalCapture
: public VideoCaptureDataCallback, public VideoCaptureFeedBack
{
public:
testExternalCapture(void);
~testExternalCapture(void);
void CreateInterface();
int DoTest();
// from VideoCaptureDataCallback
virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id,
VideoFrame& videoFrame,
VideoCodecType = kVideoCodecUnknown);
virtual void OnCaptureDelayChanged(const WebRtc_Word32 id,
const WebRtc_Word32 delay);
//VideoCaptureFeedBack
virtual void OnCaptureFrameRate(const WebRtc_Word32 id,
const WebRtc_UWord32 frameRate);
//VideoCaptureFeedBack
virtual void OnNoPictureAlarm(const WebRtc_Word32 id,
const VideoCaptureAlarm alarm);
private:
int CompareFrames(const VideoFrame& frame1, const VideoFrame& frame2);
VideoCaptureExternal* _captureInteface;
VideoCaptureModule* _captureModule;
VideoFrame _testFrame;
VideoFrame _resultFrame;
WebRtc_Word32 _reportedFrameRate;
VideoCaptureAlarm _captureAlarm;
WebRtc_Word32 _frameCount;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_TEST_TESTAPI_TESTEXTERNALCAPTURE_H_

View File

@ -1,494 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "testPlatformDependent.h"
#include <stdio.h>
#include "trace.h"
#include "tick_util.h"
namespace webrtc
{
static int testPlatformDependentResult = 0;
#ifdef _WIN32
#include <Windows.h>
#endif
#if defined( _DEBUG) && defined (_WIN32)
//#include "vld.h"
#endif
#ifdef NDEBUG
#if defined(WEBRTC_MAC_INTEL)
#else
#undef assert
#define assert(p) if(!(p)){LOG("Error line %d\n",__LINE__);testPlatformDependentResult=-1;}
#endif
#endif
testPlatformDependent::testPlatformDependent(void) :
_captureModule(NULL), _noOfCameras(0)
{
Trace::CreateTrace();
Trace::SetLevelFilter(webrtc::kTraceAll);
Trace::SetTraceFile("testPlatformDependent.txt");
_captureInfo = VideoCaptureFactory::CreateDeviceInfo(5);
#ifdef RENDER_PREVIEW
memset(_renderer, 0, sizeof(_renderer));
#endif
}
testPlatformDependent::~testPlatformDependent(void)
{
delete _captureInfo;
#ifdef RENDER_PREVIEW
if (_renderer[0])
delete _renderer[0];
if (_renderer[1])
delete _renderer[1];
if (_renderer[2])
delete _renderer[2];
if (_renderer[3])
delete _renderer[3];
#endif
Trace::ReturnTrace();
}
//FILE* file=NULL;
void testPlatformDependent::OnIncomingCapturedFrame(const WebRtc_Word32 id,
VideoFrame& videoFrame,
VideoCodecType /*codecType*/)
{
VerifyResultFrame(id, videoFrame);
//LOG("OnIncomingCapturedFrame, width %d height %d id %d length %d\n",
// videoFrame.Width(), videoFrame.Height(),id,videoFrame.Length());
/* if(file==NULL)
{
file = fopen("/sdcard/testPlatform.yuv","wb");
LOG("\nOnIncomingCapturedFrame, open file\n");
}
if(file)
{
fwrite(videoFrame.Buffer(),videoFrame.Length(),1,file);
fflush(file);
}*/
#ifdef RENDER_PREVIEW
if (id < 4 && _renderer[id])
{
_renderer[id]->RenderFrame(videoFrame);
}
#endif
}
void testPlatformDependent::OnCaptureDelayChanged(
const WebRtc_Word32 settingID,
const WebRtc_Word32 delay)
{
bool found = false;
for (WebRtc_UWord32 i = 0; i < _noOfCameras; ++i)
{
if (settingID == _captureSettings[i].settingID)
{
found = true;
_captureSettings[0].captureDelay = delay;
}
}
assert(found);
}
void testPlatformDependent::VerifyResultFrame(const WebRtc_Word32 settingID,
const VideoFrame& videoFrame)
{
bool found = false;
for (WebRtc_UWord32 i = 0; i < _noOfCameras; ++i)
{
if (settingID == _captureSettings[i].settingID)
{
found = true;
int height = static_cast<int>(videoFrame.Height());
int width = static_cast<int>(videoFrame.Width());
assert(height==_captureSettings[i].capability.height);
assert(width==_captureSettings[i].capability.width);
assert(videoFrame.RenderTimeMs()>=TickTime::MillisecondTimestamp()-30); // RenderTimstamp should be the time now
if ((videoFrame.RenderTimeMs()
> _captureSettings[i].lastRenderTimeMS + (1000 * 1.1)
/ _captureSettings[i].capability.maxFPS
&& _captureSettings[i].lastRenderTimeMS > 0)
|| (videoFrame.RenderTimeMs()
< _captureSettings[i].lastRenderTimeMS + (1000 * 0.9)
/ _captureSettings[i].capability.maxFPS
&& _captureSettings[i].lastRenderTimeMS > 0))
{
_captureSettings[i].timingWarnings++;
}
if (_captureSettings[i].lastRenderTimeMS == 0)
{
_captureSettings[i].firstCapturedFrameTime
= TickTime::MillisecondTimestamp();
}
_captureSettings[i].incomingFrames++;
_captureSettings[i].lastRenderTimeMS = videoFrame.RenderTimeMs();
}
}
assert(found);
}
WebRtc_Word32 testPlatformDependent::testCreateDelete(
const WebRtc_UWord8* uniqueID)
{
WebRtc_Word32 testTime = 8000;
WebRtc_Word32 numOfLoops = 7;
LOG("\n\nTesting create /delete - start stop of camera %s\n",(char*)uniqueID);
for (WebRtc_Word32 i = 0; i < numOfLoops; ++i)
{
LOG("Loop %d of %d\n",(int) i, (int) numOfLoops);
_captureSettings[0].settingID = 0;
#ifndef WEBRTC_MAC
_captureInfo->GetCapability(uniqueID, 0, _captureSettings[0].capability);
#else
_captureSettings[0].capability.width = 352;
_captureSettings[0].capability.height = 288;
_captureSettings[0].capability.maxFPS = 30;
_captureSettings[0].capability.rawType = kVideoUnknown;
#endif
_captureSettings[0].startTime = TickTime::MillisecondTimestamp();
_captureSettings[0].initStartTime = TickTime::MillisecondTimestamp();
_captureSettings[0].captureModule =
VideoCaptureFactory::Create(0, uniqueID);
_captureSettings[0].captureModule->AddRef();
assert(!_captureSettings[0].captureModule->CaptureStarted());
assert(_captureSettings[0].captureModule); // Test that it is created
assert(!_captureSettings[0].captureModule->RegisterCaptureDataCallback(*this));
VideoCaptureCapability capability;
assert(_captureSettings[0].captureModule->CaptureSettings(capability)==0);
assert(_captureSettings[0].captureModule->StartCapture(
_captureSettings[0].capability) ==0);
assert(_captureSettings[0].captureModule->CaptureStarted());
assert(_captureSettings[0].captureModule->CaptureSettings(
capability) ==0);
_captureSettings[0].initStopTime = TickTime::MillisecondTimestamp();
assert(capability==_captureSettings[0].capability);
WebRtc_Word64 timeNow = TickTime::MillisecondTimestamp();
while (_captureSettings[0].incomingFrames <= 5
&& testTime > timeNow - _captureSettings[0].startTime)
{
SLEEP(100);
timeNow = TickTime::MillisecondTimestamp();
}
_captureSettings[0].stopTime = TickTime::MillisecondTimestamp();
_captureSettings[0].stopStartTime = TickTime::MillisecondTimestamp();
assert(_captureSettings[0].captureModule->StopCapture()==0);
assert(!_captureSettings[0].captureModule->CaptureStarted());
_captureSettings[0].captureModule->Release();
_captureSettings[0].stopStopTime = TickTime::MillisecondTimestamp();
assert((_captureSettings[0].incomingFrames >= 5)); // Make sure at least 5 frames has been captured
EvaluateTestResult(_captureSettings[0]);
_captureSettings[0].ResetAll();
}
LOG("Test Done\n");
return testPlatformDependentResult;
}
WebRtc_Word32 testPlatformDependent::testCapabilities(
const WebRtc_UWord8* uniqueID)
{
#ifndef WEBRTC_MAC
LOG("\n\nTesting capture capabilities\n");
_captureSettings[0].captureModule = VideoCaptureFactory::Create(0, uniqueID);
assert(_captureSettings[0].captureModule); // Test that it is created
_captureSettings[0].captureModule->AddRef();
assert(!_captureSettings[0].captureModule->RegisterCaptureDataCallback(*this));
WebRtc_Word32 numOfCapabilities =
_captureInfo->NumberOfCapabilities(uniqueID);
assert(numOfCapabilities);
bool oneValidCap = false;
WebRtc_Word32 testTime = 4000;
for (WebRtc_Word32 j = 0; j < numOfCapabilities; ++j)
{
VideoCaptureCapability capability;
int b = (_captureInfo->GetCapability(uniqueID, j, capability) == 0);
assert(b);
assert(capability.width);
assert(capability.height);
assert(capability.maxFPS);
assert(capability.expectedCaptureDelay);
oneValidCap = true;
_captureSettings[0].lastRenderTimeMS = 0;
_captureSettings[0].settingID = 0;
_captureSettings[0].captureDelay = 50;
_captureSettings[0].capability = capability;
LOG("\n\n Starting camera: capability %d, width %u, height %u,"
" framerate %u, color %d.\n",
(int) j,(unsigned int)_captureSettings[0].capability.width,
(unsigned int)_captureSettings[0].capability.height,
(unsigned int) _captureSettings[0].capability.maxFPS,(int) capability.rawType);
_captureSettings[0].initStartTime = TickTime::MillisecondTimestamp();
assert(_captureSettings[0].captureModule->StartCapture(_captureSettings[0].capability)==0);
_captureSettings[0].startTime = TickTime::MillisecondTimestamp();
_captureSettings[0].initStopTime = TickTime::MillisecondTimestamp();
while (TickTime::MillisecondTimestamp() - _captureSettings[0].startTime
< testTime && _captureSettings[0].incomingFrames < 600)
{
SLEEP(200);
}
_captureSettings[0].stopTime = TickTime::MillisecondTimestamp();
_captureSettings[0].stopStartTime = TickTime::MillisecondTimestamp();
assert(_captureSettings[0].captureModule->StopCapture()==0);
_captureSettings[0].stopStopTime = TickTime::MillisecondTimestamp();
EvaluateTestResult(_captureSettings[0]);
}
assert(oneValidCap); // Make sure the camera support at least one capability
_captureSettings[0].captureModule->Release();
_captureSettings[0].ResetAll();
return testPlatformDependentResult;
#else
// GetCapability() not support on Mac
return 0;
#endif
}
WebRtc_Word32 testPlatformDependent::testMultipleCameras()
{
// Test multiple cameras
LOG("\n\nTesting all cameras simultanously\n");
_noOfCameras = _captureInfo->NumberOfDevices();
WebRtc_Word32 testTime = 20000;
for (WebRtc_UWord32 i = 0; i < _noOfCameras; ++i)
{
#ifdef RENDER_PREVIEW
if (!_renderer[i])
{
_renderer[i] = new Renderer(true);
}
#endif
WebRtc_UWord8 id[256];
_captureInfo->GetDeviceName(i, _captureSettings[i].captureName, 256,
id, 256);
WebRtc_UWord8* name = _captureSettings[i].captureName;
LOG("\n\n Found capture device %u\n name %s\n unique name %s\n"
,(unsigned int) i,(char*) name, (char*)id);
_captureSettings[i].captureModule = VideoCaptureFactory::Create(i, id);
_captureSettings[i].captureModule->AddRef();
assert(_captureSettings[i].captureModule); // Test that it is created
assert(!_captureSettings[i].captureModule->RegisterCaptureDataCallback(*this));
_captureSettings[i].lastRenderTimeMS = 0;
_captureSettings[i].settingID = i;
_captureSettings[i].captureDelay = 0;
_captureSettings[i].capability.maxFPS = 30;
_captureSettings[i].capability.width = 640;
_captureSettings[i].capability.height = 480;
LOG("\n\n Starting camera %s.\n",name);
_captureSettings[i].captureModule->StartCapture(
_captureSettings[i].capability);
_captureSettings[i].startTime = TickTime::MillisecondTimestamp();
}
SLEEP(testTime);
for (WebRtc_UWord32 i = 0; i < _noOfCameras; ++i)
{
_captureSettings[i].stopTime = TickTime::MillisecondTimestamp();
_captureSettings[i].captureModule->StopCapture();
EvaluateTestResult(_captureSettings[i]);
_captureSettings[i].captureModule->Release();
_captureSettings[i].ResetAll();
}
return testPlatformDependentResult;
}
void testPlatformDependent::SetRenderer(Renderer* renderer)
{
LOG("\ntestPlatformDependent::SetRenderer()\n");
#ifdef RENDER_PREVIEW
_renderer[0] = renderer;
#endif
}
WebRtc_Word32 testPlatformDependent::testRotation(const WebRtc_UWord8* uniqueID)
{
LOG("\n\nTesting capture Rotation\n");
_captureSettings[0].captureModule =
VideoCaptureFactory::Create(0, uniqueID);
_captureSettings[0].captureModule->AddRef();
assert(_captureSettings[0].captureModule); // Test that it is created
assert(!_captureSettings[0].captureModule->RegisterCaptureDataCallback(*this));
#ifndef WEBRTC_MAC
assert(_captureInfo->GetCapability(uniqueID,0,_captureSettings[0].capability)==0);
#else
// GetCapability not supported on Mac
_captureSettings[0].capability.width = 352;
_captureSettings[0].capability.height = 288;
_captureSettings[0].capability.maxFPS = 30;
_captureSettings[0].capability.rawType = kVideoUnknown;
#endif
WebRtc_Word32 testTime = 4000;
_captureSettings[0].lastRenderTimeMS = 0;
_captureSettings[0].settingID = 0;
_captureSettings[0].captureDelay = 50;
LOG("\n\n Starting camera: width %u, height %u, framerate %u, color %d.\n",
(unsigned int)_captureSettings[0].capability.width,
(unsigned int)_captureSettings[0].capability.height,
(unsigned int) _captureSettings[0].capability.maxFPS,
(int) _captureSettings[0].capability.rawType);
_captureSettings[0].initStartTime = TickTime::MillisecondTimestamp();
assert(_captureSettings[0].captureModule->StartCapture(_captureSettings[0].capability)==0);
_captureSettings[0].startTime = TickTime::MillisecondTimestamp();
_captureSettings[0].initStopTime = TickTime::MillisecondTimestamp();
LOG("\nSetting capture rotation 0\n");
assert(_captureSettings[0].captureModule->SetCaptureRotation(kCameraRotate0)==0);
while (TickTime::MillisecondTimestamp() - _captureSettings[0].startTime
< testTime)
{
SLEEP(200);
}
LOG("\nSetting capture rotation 90\n");
assert(_captureSettings[0].captureModule->SetCaptureRotation(kCameraRotate90)==0);
while (TickTime::MillisecondTimestamp() - _captureSettings[0].startTime
< testTime * 2)
{
SLEEP(200);
}
LOG("\nSetting capture rotation 180\n");
assert(_captureSettings[0].captureModule->SetCaptureRotation(kCameraRotate180)==0);
while (TickTime::MillisecondTimestamp() - _captureSettings[0].startTime
< testTime * 3)
{
SLEEP(200);
}
LOG("\nSetting capture rotation 270\n");
assert(_captureSettings[0].captureModule->SetCaptureRotation(kCameraRotate270)==0);
while (TickTime::MillisecondTimestamp() - _captureSettings[0].startTime
< testTime * 4)
{
SLEEP(200);
}
_captureSettings[0].stopTime = TickTime::MillisecondTimestamp();
_captureSettings[0].stopStartTime = TickTime::MillisecondTimestamp();
assert(_captureSettings[0].captureModule->StopCapture()==0);
_captureSettings[0].stopStopTime = TickTime::MillisecondTimestamp();
EvaluateTestResult(_captureSettings[0]);
_captureSettings[0].captureModule->Release();
_captureSettings[0].ResetAll();
return testPlatformDependentResult;
}
int testPlatformDependent::DoTest()
{
LOG("\ntestPlatformDependent::DoTest()\n");
#ifdef RENDER_PREVIEW
if (!_renderer[0])
{
_renderer[0] = new Renderer(true);
}
#endif
// Test one camera at the time
LOG("\n\nTesting one camera at the time\n");
_noOfCameras = _captureInfo->NumberOfDevices();
for (WebRtc_UWord32 i = 0; i < _noOfCameras; ++i)
{
WebRtc_UWord8 name[256];
WebRtc_UWord8 uniqueID[256];
WebRtc_UWord8 productId[256];
memset(productId, 0, sizeof(productId));
_captureInfo->GetDeviceName(i, name, 256, uniqueID, 256, productId, 256);
char logFileName[512];
SPRINTF(logFileName,512,"testPlatformDependent%s_%s.txt",(char*)name,(char*)productId);
_logger.SetFileName(logFileName);
WebRtc_Word32 cap = _captureInfo->NumberOfCapabilities(uniqueID);
LOG("\n\n Found capture device %u\n "
" name %s\n Capabilities %d, unique name %s \n",
(unsigned int) i,name,(int) cap,(char*) uniqueID);
testCreateDelete(uniqueID);
testCapabilities(uniqueID);
testRotation(uniqueID);
}
#ifndef WEBRTC_ANDROID
_logger.SetFileName("testPlatformDependent_multipleCameras.txt");
testMultipleCameras();
#endif
LOG("\n\ntestPlatformDependent done\n");
return 0;
}
void testPlatformDependent::EvaluateTestResult(CaptureSetting& captureResult)
{
WebRtc_UWord64 timeToFirstFrame = captureResult.firstCapturedFrameTime
- captureResult.startTime;
WebRtc_UWord64 timeToStart = captureResult.initStopTime
- captureResult.initStartTime;
WebRtc_UWord64 timeToStop = captureResult.stopStopTime
- captureResult.stopStartTime;
assert(timeToStart<4000);
assert(timeToStop<3000);
assert((timeToFirstFrame<3500) && (timeToFirstFrame>0)); // Assert if it takes more than 3500ms to start.
WebRtc_Word64 expectedNumberOfFrames = ((captureResult.stopTime
- captureResult.startTime - timeToFirstFrame)
* captureResult.capability.maxFPS) / 1000;
assert(captureResult.incomingFrames>0.50*expectedNumberOfFrames); // Make sure at least 50% of the expected frames have been received from the camera
LOG(" Test result.\n No Captured %d,expected %d, \n timingWarnings %d,"
" time to first %lu\n time to start %lu, time to stop %lu\n",
(int) captureResult.incomingFrames,(int)(expectedNumberOfFrames),
(int) captureResult.timingWarnings,(long) timeToFirstFrame,
(long) timeToStart,(long) timeToStop);
captureResult.ResetSettings();
}
} // namespace webrtc

View File

@ -1,127 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#pragma once
#include "testDefines.h"
#include "video_capture_factory.h"
#include "Logger.h"
//#define RENDER_PREVIEW //Does not work properly on Linux
#ifdef RENDER_PREVIEW
#include "Renderer.h"
#else
typedef void* Renderer;
#endif
namespace webrtc
{
struct CaptureSetting
{
WebRtc_Word32 settingID;
WebRtc_UWord8 captureName[256];
VideoCaptureCapability capability;
WebRtc_Word32 captureDelay;
WebRtc_Word64 lastRenderTimeMS;
WebRtc_Word32 incomingFrames;
WebRtc_Word32 timingWarnings;
WebRtc_Word64 startTime;
WebRtc_Word64 stopTime;
WebRtc_Word64 initStartTime;
WebRtc_Word64 initStopTime;
WebRtc_Word64 stopStartTime;
WebRtc_Word64 stopStopTime;
WebRtc_Word64 firstCapturedFrameTime;
VideoCaptureModule* captureModule;
CaptureSetting()
{
ResetAll();
}
void ResetSettings()
{
capability.width=0;
capability.height=0;
capability.maxFPS=0;
captureDelay=0;
lastRenderTimeMS=0;
incomingFrames=0;
timingWarnings=0;
startTime=0;
stopTime=0;
firstCapturedFrameTime=0;
}
void ResetAll()
{
ResetSettings();
settingID = -1;
captureModule=0;
initStartTime=0;
initStopTime=0;
stopStartTime=0;
stopStopTime=0;
}
};
class testPlatformDependent: public VideoCaptureDataCallback
{
public:
testPlatformDependent(void);
~testPlatformDependent(void);
int DoTest();
void SetRenderer(Renderer* renderer);
// from VideoCaptureDataCallback
virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id,
VideoFrame& videoFrame,
webrtc::VideoCodecType codecType);
virtual void OnCaptureDelayChanged(const WebRtc_Word32 id,
const WebRtc_Word32 delay);
private:
// Test multiple create delete start stop of one module
WebRtc_Word32 testCreateDelete(const WebRtc_UWord8* uniqueID);
WebRtc_Word32 testCapabilities(const WebRtc_UWord8* uniqueID);
WebRtc_Word32 testMultipleCameras();
WebRtc_Word32 testRotation(const WebRtc_UWord8* uniqueID);
void VerifyResultFrame(const WebRtc_Word32 id,const VideoFrame& videoFrame);
void EvaluateTestResult(CaptureSetting& captureResult);
VideoCaptureModule* _captureModule;
VideoCaptureModule::DeviceInfo* _captureInfo;
CaptureSetting _captureSettings[4];
WebRtc_UWord32 _noOfCameras;
#ifdef RENDER_PREVIEW
Renderer*_renderer[4];
#endif
Logger _logger;
};
} // namespace webrtc

View File

@ -0,0 +1,455 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stdio.h>
#include "gtest/gtest.h"
#include "process_thread.h"
#include "scoped_ptr.h"
#include "scoped_refptr.h"
#include "tick_util.h"
#include "video_capture.h"
#include "video_capture_factory.h"
using webrtc::TickTime;
using webrtc::VideoCaptureAlarm;
using webrtc::VideoCaptureCapability;
using webrtc::VideoCaptureDataCallback;
using webrtc::VideoCaptureFactory;
using webrtc::VideoCaptureFeedBack;
using webrtc::VideoCaptureModule;
#if defined(_WIN32)
#define SLEEP(x) Sleep(x)
#elif defined(WEBRTC_ANDROID)
#define SLEEP(x) usleep(x*1000)
#else
#include <unistd.h>
#define SLEEP(x) usleep(x * 1000)
#endif
#define WAIT_(ex, timeout, res) \
do { \
res = (ex); \
WebRtc_Word64 start = TickTime::MillisecondTimestamp(); \
while (!res && TickTime::MillisecondTimestamp() < start + timeout) { \
SLEEP(5); \
res = (ex); \
} \
} while (0);\
#define EXPECT_TRUE_WAIT(ex, timeout) \
do { \
bool res; \
WAIT_(ex, timeout, res); \
if (!res) EXPECT_TRUE(ex); \
} while (0);
static const int kTimeOut = 5000;
static const int kTestHeight = 288;
static const int kTestWidth = 352;
static const int kTestFramerate = 30;
// Compares the content of two video frames.
static bool CompareFrames(const webrtc::VideoFrame& frame1,
const webrtc::VideoFrame& frame2) {
bool result =
(frame1.Length() == frame2.Length()) &&
(frame1.Width() == frame2.Width()) &&
(frame1.Height() == frame2.Height());
for (unsigned int i = 0; i < frame1.Length() && result; ++i)
result = (*(frame1.Buffer()+i) == *(frame2.Buffer()+i));
return result;
}
// Compares the content of a I420 frame in planar form and video frame.
static bool CompareFrames(const webrtc::VideoFrameI420& frame1,
const webrtc::VideoFrame& frame2) {
if (frame1.width != frame2.Width() ||
frame1.height != frame2.Height()) {
return false;
}
// Compare Y
unsigned char* y_plane = frame1.y_plane;
for (unsigned int i = 0; i < frame2.Height(); ++i) {
for (unsigned int j = 0; j < frame2.Width(); ++j) {
if (*y_plane != *(frame2.Buffer()+i*frame2.Width() +j))
return false;
++y_plane;
}
y_plane += frame1.y_pitch - frame1.width;
}
// Compare U
unsigned char* u_plane = frame1.u_plane;
for (unsigned int i = 0; i < frame2.Height() /2; ++i) {
for (unsigned int j = 0; j < frame2.Width() /2; ++j) {
if (*u_plane !=*(
frame2.Buffer()+frame2.Width() * frame2.Height() +
i*frame2.Width() / 2 + j)) {
return false;
}
++u_plane;
}
u_plane += frame1.u_pitch - frame1.width / 2;
}
// Compare V
unsigned char* v_plane = frame1.v_plane;
for (unsigned int i = 0; i < frame2.Height() /2; ++i) {
for (unsigned int j = 0; j < frame2.Width() /2; ++j) {
if (*v_plane != *(
frame2.Buffer()+frame2.Width() * frame2.Height()* 5 / 4 +
i*frame2.Width() / 2 + j)) {
return false;
}
++v_plane;
}
v_plane += frame1.v_pitch - frame1.width / 2;
}
return true;
}
class TestVideoCaptureCallback : public VideoCaptureDataCallback {
public:
TestVideoCaptureCallback()
: capture_delay(0),
last_render_time_ms(0),
incoming_frames(0),
timing_warnings(0) {
}
~TestVideoCaptureCallback() {
if (timing_warnings > 0)
printf("No of timing warnings %d\n", timing_warnings);
}
virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id,
webrtc::VideoFrame& videoFrame,
webrtc::VideoCodecType codecType) {
int height = static_cast<int>(videoFrame.Height());
int width = static_cast<int>(videoFrame.Width());
EXPECT_EQ(height, capability.height);
EXPECT_EQ(width, capability.width);
// RenderTimstamp should be the time now.
EXPECT_TRUE(
videoFrame.RenderTimeMs() >= TickTime::MillisecondTimestamp()-30 &&
videoFrame.RenderTimeMs() <= TickTime::MillisecondTimestamp());
if ((videoFrame.RenderTimeMs() >
last_render_time_ms + (1000 * 1.1) / capability.maxFPS &&
last_render_time_ms > 0) ||
(videoFrame.RenderTimeMs() <
last_render_time_ms + (1000 * 0.9) / capability.maxFPS &&
last_render_time_ms > 0)) {
timing_warnings++;
}
incoming_frames++;
last_render_time_ms = videoFrame.RenderTimeMs();
last_frame.CopyFrame(videoFrame);
}
virtual void OnCaptureDelayChanged(const WebRtc_Word32 id,
const WebRtc_Word32 delay) {
capture_delay = delay;
}
VideoCaptureCapability capability;
int capture_delay;
WebRtc_Word64 last_render_time_ms;
int incoming_frames;
int timing_warnings;
webrtc::VideoFrame last_frame;
};
class TestVideoCaptureFeedBack : public VideoCaptureFeedBack {
public:
TestVideoCaptureFeedBack() : frame_rate(0), alarm(webrtc::Cleared) {}
virtual void OnCaptureFrameRate(const WebRtc_Word32 id,
const WebRtc_UWord32 frameRate) {
frame_rate = frameRate;
}
virtual void OnNoPictureAlarm(const WebRtc_Word32 id,
const VideoCaptureAlarm reported_alarm) {
alarm = reported_alarm;
}
unsigned int frame_rate;
VideoCaptureAlarm alarm;
};
class VideoCaptureTest : public testing::Test {
public:
VideoCaptureTest() : number_of_devices_(0) {}
void SetUp() {
device_info_.reset(VideoCaptureFactory::CreateDeviceInfo(5));
number_of_devices_ = device_info_->NumberOfDevices();
ASSERT_GT(number_of_devices_, 0u);
}
webrtc::scoped_refptr<VideoCaptureModule> OpenVideoCaptureDevice(
unsigned int device,
VideoCaptureDataCallback* callback) {
WebRtc_UWord8 device_name[256];
WebRtc_UWord8 unique_name[256];
EXPECT_EQ(0, device_info_->GetDeviceName(
device, device_name, 256, unique_name, 256));
webrtc::scoped_refptr<VideoCaptureModule> module(
VideoCaptureFactory::Create(device, unique_name));
if (module.get() == NULL)
return NULL;
EXPECT_FALSE(module->CaptureStarted());
EXPECT_EQ(0, module->RegisterCaptureDataCallback(*callback));
return module;
}
void StartCapture(VideoCaptureModule* capture_module,
VideoCaptureCapability capability) {
EXPECT_EQ(0, capture_module->StartCapture(capability));
EXPECT_TRUE(capture_module->CaptureStarted());
VideoCaptureCapability resulting_capability;
EXPECT_EQ(0, capture_module->CaptureSettings(resulting_capability));
EXPECT_EQ(capability, resulting_capability);
}
webrtc::scoped_ptr<VideoCaptureModule::DeviceInfo> device_info_;
unsigned int number_of_devices_;
};
TEST_F(VideoCaptureTest, CreateDelete) {
for (int i = 0; i < 5; ++i) {
WebRtc_Word64 start_time = TickTime::MillisecondTimestamp();
TestVideoCaptureCallback capture_observer;
webrtc::scoped_refptr<VideoCaptureModule> module(OpenVideoCaptureDevice(
0, &capture_observer));
ASSERT_TRUE(module.get() != NULL);
#ifndef WEBRTC_MAC
device_info_->GetCapability(module->CurrentDeviceName(), 0,
capture_observer.capability);
#else
capture_observer.capability.width = kTestWidth;
capture_observer.capability.height = kTestHeight;
capture_observer.capability.maxFPS = kTestFramerate;
capture_observer.capability.rawType = kVideoUnknown;
#endif
StartCapture(module.get(), capture_observer.capability);
// Less than 4s to start the camera.
EXPECT_LE(TickTime::MillisecondTimestamp() - start_time, 4000);
// Make sure 5 frames are captured.
EXPECT_TRUE_WAIT(capture_observer.incoming_frames >= 5, kTimeOut);
EXPECT_GT(capture_observer.capture_delay, 0);
WebRtc_Word64 stop_time = TickTime::MillisecondTimestamp();
EXPECT_EQ(0, module->StopCapture());
EXPECT_FALSE(module->CaptureStarted());
// Less than 3s to stop the camera.
EXPECT_LE(TickTime::MillisecondTimestamp() - stop_time, 3000);
}
}
TEST_F(VideoCaptureTest, Capabilities) {
#ifdef WEBRTC_MAC
printf("Video capture capabilities are not supported on Mac.\n");
return;
#endif
TestVideoCaptureCallback capture_observer;
webrtc::scoped_refptr<VideoCaptureModule> module(OpenVideoCaptureDevice(
0, &capture_observer));
ASSERT_TRUE(module.get() != NULL);
int number_of_capabilities = device_info_->NumberOfCapabilities(
module->CurrentDeviceName());
EXPECT_GT(number_of_capabilities, 0);
for (int i = 0; i < number_of_capabilities; ++i) {
device_info_->GetCapability(module->CurrentDeviceName(), i,
capture_observer.capability);
StartCapture(module.get(), capture_observer.capability);
// Make sure 5 frames are captured.
EXPECT_TRUE_WAIT(capture_observer.incoming_frames >= 5, kTimeOut);
capture_observer.incoming_frames = 0;
EXPECT_EQ(0, module->StopCapture());
}
}
TEST_F(VideoCaptureTest, TestTwoCameras) {
if (number_of_devices_ < 2) {
printf("There are not two cameras available. Aborting test. \n");
return;
}
TestVideoCaptureCallback capture_observer1;
webrtc::scoped_refptr<VideoCaptureModule> module1(OpenVideoCaptureDevice(
0, &capture_observer1));
ASSERT_TRUE(module1.get() != NULL);
#ifndef WEBRTC_MAC
device_info_->GetCapability(module1->CurrentDeviceName(), 0,
capture_observer1.capability);
#else
capture_observer1.capability.width = kTestWidth;
capture_observer1.capability.height = kTestHeight;
capture_observer1.capability.maxFPS = kTestFramerate;
capture_observer1.capability.rawType = kVideoUnknown;
#endif
TestVideoCaptureCallback capture_observer2;
webrtc::scoped_refptr<VideoCaptureModule> module2(OpenVideoCaptureDevice(
1, &capture_observer2));
ASSERT_TRUE(module1.get() != NULL);
#ifndef WEBRTC_MAC
device_info_->GetCapability(module2->CurrentDeviceName(), 0,
capture_observer2.capability);
#else
capture_observer2.capability.width = kTestWidth;
capture_observer2.capability.height = kTestHeight;
capture_observer2.capability.maxFPS = kTestFramerate;
capture_observer2.capability.rawType = kVideoUnknown;
#endif
StartCapture(module1.get(), capture_observer1.capability);
StartCapture(module2.get(), capture_observer2.capability);
EXPECT_TRUE_WAIT(capture_observer1.incoming_frames >= 5, kTimeOut);
EXPECT_TRUE_WAIT(capture_observer2.incoming_frames >= 5, kTimeOut);
}
// Test class for testing external capture and capture feedback information
// such as frame rate and picture alarm.
class VideoCaptureExternalTest : public testing::Test {
public:
void SetUp() {
capture_module_ = VideoCaptureFactory::Create(0, capture_input_interface_);
process_module_ = webrtc::ProcessThread::CreateProcessThread();
process_module_->Start();
process_module_->RegisterModule(capture_module_);
capture_callback_.capability.width = kTestWidth;
capture_callback_.capability.height = kTestHeight;
capture_callback_.capability.rawType = webrtc::kVideoYV12;
capture_callback_.capability.maxFPS = kTestFramerate;
test_frame_.VerifyAndAllocate(kTestWidth * kTestHeight * 3 / 2);
test_frame_.SetLength(kTestWidth * kTestHeight * 3 / 2);
test_frame_.SetHeight(kTestHeight);
test_frame_.SetWidth(kTestWidth);
SLEEP(1); // Wait 1ms so that two tests can't have the same timestamp.
memset(test_frame_.Buffer(), 127, test_frame_.Length());
EXPECT_EQ(0, capture_module_->RegisterCaptureDataCallback(
capture_callback_));
EXPECT_EQ(0, capture_module_->RegisterCaptureCallback(capture_feedback_));
EXPECT_EQ(0, capture_module_->EnableFrameRateCallback(true));
EXPECT_EQ(0, capture_module_->EnableNoPictureAlarm(true));
}
void TearDown() {
process_module_->Stop();
webrtc::ProcessThread::DestroyProcessThread(process_module_);
}
webrtc::VideoCaptureExternal* capture_input_interface_;
webrtc::scoped_refptr<VideoCaptureModule> capture_module_;
webrtc::ProcessThread* process_module_;
webrtc::VideoFrame test_frame_;
TestVideoCaptureCallback capture_callback_;
TestVideoCaptureFeedBack capture_feedback_;
};
// Test input of external video frames.
TEST_F(VideoCaptureExternalTest , TestExternalCapture) {
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(
test_frame_.Buffer(), test_frame_.Length(), capture_callback_.capability,
0));
EXPECT_TRUE(CompareFrames(test_frame_, capture_callback_.last_frame));
}
// Test input of planar I420 frames.
TEST_F(VideoCaptureExternalTest , TestExternalCaptureI420) {
webrtc::VideoFrameI420 frame_i420;
frame_i420.width = kTestWidth;
frame_i420.height = kTestHeight;
frame_i420.y_plane = test_frame_.Buffer();
frame_i420.u_plane = frame_i420.y_plane + (kTestWidth * kTestHeight);
frame_i420.v_plane = frame_i420.u_plane + ((kTestWidth * kTestHeight) >> 2);
frame_i420.y_pitch = kTestWidth;
frame_i420.u_pitch = kTestWidth / 2;
frame_i420.v_pitch = kTestWidth / 2;
EXPECT_EQ(0, capture_input_interface_->IncomingFrameI420(frame_i420, 0));
EXPECT_TRUE(CompareFrames(frame_i420, capture_callback_.last_frame));
}
// Test frame rate and no picture alarm.
TEST_F(VideoCaptureExternalTest , FrameRate) {
WebRtc_Word64 testTime = 3;
TickTime startTime = TickTime::Now();
capture_callback_.capability.maxFPS = 10;
while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) {
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(
test_frame_.Buffer(), test_frame_.Length(),
capture_callback_.capability, 0));
SLEEP(1000 / capture_callback_.capability.maxFPS);
}
EXPECT_TRUE(capture_feedback_.frame_rate >= 8 &&
capture_feedback_.frame_rate <= 10);
SLEEP(500);
EXPECT_EQ(webrtc::Raised, capture_feedback_.alarm);
startTime = TickTime::Now();
capture_callback_.capability.maxFPS = 30;
while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) {
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(
test_frame_.Buffer(), test_frame_.Length(),
capture_callback_.capability, 0));
SLEEP(1000 / capture_callback_.capability.maxFPS);
}
EXPECT_EQ(webrtc::Cleared, capture_feedback_.alarm);
// Frame rate might be less than 33 since we have paused providing
// frames for a while.
EXPECT_TRUE(capture_feedback_.frame_rate >= 25 &&
capture_feedback_.frame_rate <= 33);
}
// Test start image
TEST_F(VideoCaptureExternalTest , StartImage) {
capture_callback_.capability.maxFPS = 10;
EXPECT_EQ(0, capture_module_->StartSendImage(
test_frame_, capture_callback_.capability.maxFPS));
EXPECT_TRUE_WAIT(capture_callback_.incoming_frames == 5, kTimeOut);
EXPECT_EQ(0, capture_module_->StopSendImage());
SLEEP(200);
// Test that no more start images have arrived.
EXPECT_TRUE(capture_callback_.incoming_frames >= 4 &&
capture_callback_.incoming_frames <= 5);
EXPECT_TRUE(CompareFrames(test_frame_, capture_callback_.last_frame));
}