Add IncomingFrameI420 to ViEExternalCapture interface to take captured video frame buffer as 3 planes.

Review URL: http://webrtc-codereview.appspot.com/219004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@753 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
wu@webrtc.org 2011-10-14 17:16:04 +00:00
parent 14aaaf116a
commit f10ea31211
14 changed files with 237 additions and 54 deletions

View File

@ -84,6 +84,33 @@ enum VideoCaptureAlarm
Cleared = 1 Cleared = 1
}; };
// VideoFrameI420 doesn't take the ownership of the buffer.
// It's mostly used to group the parameters for external capture.
struct VideoFrameI420
{
VideoFrameI420() {
y_plane = NULL;
u_plane = NULL;
v_plane = NULL;
y_pitch = 0;
u_pitch = 0;
v_pitch = 0;
width = 0;
height = 0;
}
unsigned char* y_plane;
unsigned char* u_plane;
unsigned char* v_plane;
int y_pitch;
int u_pitch;
int v_pitch;
unsigned short width;
unsigned short height;
};
/* External Capture interface. Returned by Create /* External Capture interface. Returned by Create
and implemented by the capture module. and implemented by the capture module.
*/ */
@ -94,6 +121,8 @@ public:
WebRtc_Word32 videoFrameLength, WebRtc_Word32 videoFrameLength,
const VideoCaptureCapability& frameInfo, const VideoCaptureCapability& frameInfo,
WebRtc_Word64 captureTime = 0) = 0; WebRtc_Word64 captureTime = 0) = 0;
virtual WebRtc_Word32 IncomingFrameI420(const VideoFrameI420& video_frame,
WebRtc_Word64 captureTime = 0) = 0;
protected: protected:
~VideoCaptureExternal() {} ~VideoCaptureExternal() {}
}; };

View File

@ -253,6 +253,40 @@ WebRtc_Word32 VideoCaptureImpl::CaptureDelay()
CriticalSectionScoped cs(_apiCs); CriticalSectionScoped cs(_apiCs);
return _setCaptureDelay; return _setCaptureDelay;
} }
WebRtc_Word32 VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame,
WebRtc_Word32 width, WebRtc_Word32 height, WebRtc_Word64 capture_time,
VideoCodecType codec_type) {
UpdateFrameCount();// frame count used for local frame rate callback.
_startImageFrameIntervall = 0; // prevent the start image to be displayed.
const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay;
// Capture delay changed
if (_setCaptureDelay != _captureDelay) {
_setCaptureDelay = _captureDelay;
}
// Set the capture time
if (capture_time != 0) {
captureFrame.SetRenderTime(capture_time);
}
else {
captureFrame.SetRenderTime(TickTime::MillisecondTimestamp());
}
captureFrame.SetHeight(height);
captureFrame.SetWidth(width);
if (_dataCallBack) {
if (callOnCaptureDelayChanged) {
_dataCallBack->OnCaptureDelayChanged(_id, _captureDelay);
}
_dataCallBack->OnIncomingCapturedFrame(_id, captureFrame, codec_type);
}
return 0;
}
WebRtc_Word32 VideoCaptureImpl::IncomingFrame(WebRtc_UWord8* videoFrame, WebRtc_Word32 VideoCaptureImpl::IncomingFrame(WebRtc_UWord8* videoFrame,
WebRtc_Word32 videoFrameLength, WebRtc_Word32 videoFrameLength,
const VideoCaptureCapability& frameInfo, const VideoCaptureCapability& frameInfo,
@ -269,10 +303,6 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(WebRtc_UWord8* videoFrame,
const WebRtc_Word32 width = frameInfo.width; const WebRtc_Word32 width = frameInfo.width;
const WebRtc_Word32 height = frameInfo.height; const WebRtc_Word32 height = frameInfo.height;
UpdateFrameCount();// frame count used for local frame rate callback.
_startImageFrameIntervall = 0; // prevent the start image to be displayed.
if (frameInfo.codecType == kVideoCodecUnknown) // None encoded. Convert to I420. if (frameInfo.codecType == kVideoCodecUnknown) // None encoded. Convert to I420.
{ {
const VideoType vpLibType = videocapturemodule:: const VideoType vpLibType = videocapturemodule::
@ -318,33 +348,8 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(WebRtc_UWord8* videoFrame,
} }
} }
const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay; DeliverCapturedFrame(_captureFrame, width, height, captureTime, frameInfo.codecType);
if (_setCaptureDelay != _captureDelay) // Capture delay changed
{
_setCaptureDelay = _captureDelay;
}
// Set the capture time
if (captureTime != 0)
{
_captureFrame.SetRenderTime(captureTime);
}
else
{
_captureFrame.SetRenderTime(TickTime::MillisecondTimestamp());
}
_captureFrame.SetHeight(height);
_captureFrame.SetWidth(width);
if (_dataCallBack)
{
if (callOnCaptureDelayChanged)
{
_dataCallBack->OnCaptureDelayChanged(_id, _captureDelay);
}
_dataCallBack->OnIncomingCapturedFrame(_id, _captureFrame, frameInfo.codecType);
}
const WebRtc_UWord32 processTime = const WebRtc_UWord32 processTime =
(WebRtc_UWord32)(TickTime::Now() - startProcessTime).Milliseconds(); (WebRtc_UWord32)(TickTime::Now() - startProcessTime).Milliseconds();
@ -356,7 +361,60 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(WebRtc_UWord8* videoFrame,
} }
return 0; return 0;
}
WebRtc_Word32 VideoCaptureImpl::IncomingFrameI420(
const VideoFrameI420& video_frame, WebRtc_Word64 captureTime) {
CriticalSectionScoped cs(_callBackCs);
// Allocate I420 buffer
int frame_size = CalcBufferSize(kI420,
video_frame.width,
video_frame.height);
_captureFrame.VerifyAndAllocate(frame_size);
if (!_captureFrame.Buffer()) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to allocate frame buffer.");
return -1;
}
// Copy planes to the _captureFrame
int y_width = video_frame.width;
int uv_width = video_frame.width / 2;
int y_rows = video_frame.height;
int uv_rows = video_frame.height / 2; // I420
unsigned char* current_pointer = _captureFrame.Buffer();
unsigned char* y_plane = video_frame.y_plane;
unsigned char* u_plane = video_frame.u_plane;
unsigned char* v_plane = video_frame.v_plane;
// Copy Y
for (int i = 0; i < y_rows; ++i) {
memcpy(current_pointer, y_plane, y_width);
current_pointer += video_frame.y_pitch;
y_plane += video_frame.y_pitch;
}
// Copy U
for (int i = 0; i < uv_rows; ++i) {
memcpy(current_pointer, u_plane, uv_width);
current_pointer += video_frame.u_pitch;
u_plane += video_frame.u_pitch;
}
// Copy V
for (int i = 0; i < uv_rows; ++i) {
memcpy(current_pointer, v_plane, uv_width);
current_pointer += video_frame.v_pitch;
v_plane += video_frame.v_pitch;
}
_captureFrame.SetLength(frame_size);
DeliverCapturedFrame(_captureFrame,
video_frame.width,
video_frame.height,
captureTime,
kVideoCodecUnknown);
return 0;
} }
WebRtc_Word32 VideoCaptureImpl::SetCaptureRotation(VideoCaptureRotation rotation) WebRtc_Word32 VideoCaptureImpl::SetCaptureRotation(VideoCaptureRotation rotation)

View File

@ -95,6 +95,10 @@ public:
WebRtc_Word32 videoFrameLength, WebRtc_Word32 videoFrameLength,
const VideoCaptureCapability& frameInfo, const VideoCaptureCapability& frameInfo,
WebRtc_Word64 captureTime = 0); WebRtc_Word64 captureTime = 0);
virtual WebRtc_Word32 IncomingFrameI420(
const VideoFrameI420& video_frame,
WebRtc_Word64 captureTime = 0);
// Platform dependent // Platform dependent
virtual WebRtc_Word32 StartCapture(const VideoCaptureCapability& capability) virtual WebRtc_Word32 StartCapture(const VideoCaptureCapability& capability)
{ {
@ -120,6 +124,9 @@ protected:
private: private:
void UpdateFrameCount(); void UpdateFrameCount();
WebRtc_UWord32 CalculateFrameRate(const TickTime& now); WebRtc_UWord32 CalculateFrameRate(const TickTime& now);
WebRtc_Word32 DeliverCapturedFrame(
VideoFrame& captureFrame, WebRtc_Word32 width, WebRtc_Word32 height,
WebRtc_Word64 capture_time, VideoCodecType codec_type);
CriticalSectionWrapper& _callBackCs; CriticalSectionWrapper& _callBackCs;

View File

@ -30,7 +30,7 @@ Logger::~Logger(void)
} }
void Logger::Print(char* msg) void Logger::Print(char* msg)
{ {
printf(msg); printf("%s\n",msg);
if (_logFile.Open()) if (_logFile.Open())
{ {
_logFile.WriteText(msg); _logFile.WriteText(msg);

View File

@ -227,6 +227,7 @@ void Renderer::SetRenderWindow(jobject renderWindow)
int WebRtcCreateWindow(HWND &hwndMain,int winNum, int width, int height) int WebRtcCreateWindow(HWND &hwndMain,int winNum, int width, int height)
{ {
return 0;
} }
void SetWindowPos(HWND &hwndMain, int x, int y, int width, int height, bool onTop) void SetWindowPos(HWND &hwndMain, int x, int y, int width, int height, bool onTop)
{ {

View File

@ -32,7 +32,7 @@ testCameraEncoder::testCameraEncoder(void)
Trace::CreateTrace(); Trace::CreateTrace();
Trace::SetLevelFilter(webrtc::kTraceAll); Trace::SetLevelFilter(webrtc::kTraceAll);
Trace::SetTraceFile("testCameraEncoder.txt"); Trace::SetTraceFile("testCameraEncoder.txt");
_captureInfo=VideoCaptureModule::CreateDeviceInfo(5); _captureInfo=VideoCaptureFactory::CreateDeviceInfo(5);
#ifdef RENDER_PREVIEW #ifdef RENDER_PREVIEW
_renderer=NULL; _renderer=NULL;
_videoCoding=webrtc::VideoCodingModule::Createwebrtc::VideoCodingModule(5); _videoCoding=webrtc::VideoCodingModule::Createwebrtc::VideoCodingModule(5);
@ -91,7 +91,8 @@ int testCameraEncoder::DoTest()
WebRtc_UWord8 productId[256]; WebRtc_UWord8 productId[256];
_captureInfo->GetDeviceName(i,name,256,uniqueID,256,productId,256); _captureInfo->GetDeviceName(i,name,256,uniqueID,256,productId,256);
_captureModule= VideoCaptureModule::Create(0,uniqueID); _captureModule= VideoCaptureFactory::Create(0,uniqueID);
_captureModule->AddRef();
_captureModule->RegisterCaptureDataCallback(*this); _captureModule->RegisterCaptureDataCallback(*this);
VideoCaptureCapability capability; VideoCaptureCapability capability;
@ -113,7 +114,7 @@ int testCameraEncoder::DoTest()
} }
} }
VideoCaptureModule::Destroy(_captureModule); _captureModule->Release();
} }
return 0; return 0;
} }
@ -226,8 +227,10 @@ void testCameraEncoder::OnIncomingCapturedFrame(const WebRtc_Word32 id,
{ {
_captureSettings.incomingFrames++; _captureSettings.incomingFrames++;
_captureSettings.noOfBytes+=videoFrame.Length(); _captureSettings.noOfBytes+=videoFrame.Length();
assert(videoFrame.Height()==_captureSettings.capability.height); int height = static_cast<int>(videoFrame.Height());
assert(videoFrame.Width()==_captureSettings.capability.width); int width = static_cast<int>(videoFrame.Width());
assert(height==_captureSettings.capability.height);
assert(width==_captureSettings.capability.width);
assert(videoFrame.RenderTimeMs()>=(TickTime::MillisecondTimestamp()-30)); // RenderTimstamp should be the time now assert(videoFrame.RenderTimeMs()>=(TickTime::MillisecondTimestamp()-30)); // RenderTimstamp should be the time now
if((videoFrame.RenderTimeMs()>_captureSettings.lastRenderTimeMS if((videoFrame.RenderTimeMs()>_captureSettings.lastRenderTimeMS
+(1000*1.2)/_captureSettings.capability.maxFPS +(1000*1.2)/_captureSettings.capability.maxFPS

View File

@ -10,7 +10,7 @@
#pragma once #pragma once
#include "video_capture.h" #include "video_capture_factory.h"
//#define RENDER_PREVIEW //#define RENDER_PREVIEW

View File

@ -29,7 +29,8 @@ static int testExternalCaptureResult = 0;
void testExternalCapture::CreateInterface() void testExternalCapture::CreateInterface()
{ {
_captureModule = VideoCaptureModule::Create(1, _captureInteface); _captureModule = VideoCaptureFactory::Create(1, _captureInteface);
_captureModule->AddRef();
} }
testExternalCapture::testExternalCapture(void) testExternalCapture::testExternalCapture(void)
: _captureInteface(NULL), _captureModule(NULL) : _captureInteface(NULL), _captureModule(NULL)
@ -51,7 +52,7 @@ int testExternalCapture::CompareFrames(const VideoFrame& frame1,
testExternalCapture::~testExternalCapture(void) testExternalCapture::~testExternalCapture(void)
{ {
VideoCaptureModule::Destroy(_captureModule); _captureModule->Release();
} }
void testExternalCapture::OnIncomingCapturedFrame( void testExternalCapture::OnIncomingCapturedFrame(
@ -114,6 +115,19 @@ int testExternalCapture::DoTest()
frameInfo,0)==0); frameInfo,0)==0);
CompareFrames(_testFrame, _resultFrame); CompareFrames(_testFrame, _resultFrame);
printf(" testing the IncomingFrameI420 interface.\n");
VideoFrameI420 frame_i420;
frame_i420.width = width;
frame_i420.height = height;
frame_i420.y_plane = _testFrame.Buffer();
frame_i420.u_plane = frame_i420.y_plane + (width * height);
frame_i420.v_plane = frame_i420.u_plane + ((width * height) >> 2);
frame_i420.y_pitch = width;
frame_i420.u_pitch = width / 2;
frame_i420.v_pitch = width / 2;
assert(_captureInteface->IncomingFrameI420(frame_i420, 0) == 0);
CompareFrames(_testFrame, _resultFrame);
printf(" testing local frame rate callback and no picture alarm.\n"); printf(" testing local frame rate callback and no picture alarm.\n");
WebRtc_Word64 testTime = 3; WebRtc_Word64 testTime = 3;

View File

@ -12,7 +12,7 @@
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_TEST_TESTAPI_TESTEXTERNALCAPTURE_H_ #define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_TEST_TESTAPI_TESTEXTERNALCAPTURE_H_
#include "testDefines.h" #include "testDefines.h"
#include "video_capture.h" #include "video_capture_factory.h"
namespace webrtc namespace webrtc
{ {

View File

@ -41,7 +41,7 @@ testPlatformDependent::testPlatformDependent(void) :
Trace::CreateTrace(); Trace::CreateTrace();
Trace::SetLevelFilter(webrtc::kTraceAll); Trace::SetLevelFilter(webrtc::kTraceAll);
Trace::SetTraceFile("testPlatformDependent.txt"); Trace::SetTraceFile("testPlatformDependent.txt");
_captureInfo = VideoCaptureModule::CreateDeviceInfo(5); _captureInfo = VideoCaptureFactory::CreateDeviceInfo(5);
#ifdef RENDER_PREVIEW #ifdef RENDER_PREVIEW
memset(_renderer, 0, sizeof(_renderer)); memset(_renderer, 0, sizeof(_renderer));
#endif #endif
@ -125,8 +125,10 @@ void testPlatformDependent::VerifyResultFrame(const WebRtc_Word32 settingID,
{ {
found = true; found = true;
assert(videoFrame.Height()==_captureSettings[i].capability.height); int height = static_cast<int>(videoFrame.Height());
assert(videoFrame.Width()==_captureSettings[i].capability.width); int width = static_cast<int>(videoFrame.Width());
assert(height==_captureSettings[i].capability.height);
assert(width==_captureSettings[i].capability.width);
assert(videoFrame.RenderTimeMs()>=TickTime::MillisecondTimestamp()-30); // RenderTimstamp should be the time now assert(videoFrame.RenderTimeMs()>=TickTime::MillisecondTimestamp()-30); // RenderTimstamp should be the time now
if ((videoFrame.RenderTimeMs() if ((videoFrame.RenderTimeMs()
> _captureSettings[i].lastRenderTimeMS + (1000 * 1.1) > _captureSettings[i].lastRenderTimeMS + (1000 * 1.1)
@ -172,8 +174,9 @@ WebRtc_Word32 testPlatformDependent::testCreateDelete(
#endif #endif
_captureSettings[0].startTime = TickTime::MillisecondTimestamp(); _captureSettings[0].startTime = TickTime::MillisecondTimestamp();
_captureSettings[0].initStartTime = TickTime::MillisecondTimestamp(); _captureSettings[0].initStartTime = TickTime::MillisecondTimestamp();
_captureSettings[0].captureModule = VideoCaptureModule::Create(0, _captureSettings[0].captureModule =
uniqueID); VideoCaptureFactory::Create(0, uniqueID);
_captureSettings[0].captureModule->AddRef();
assert(!_captureSettings[0].captureModule->CaptureStarted()); assert(!_captureSettings[0].captureModule->CaptureStarted());
assert(_captureSettings[0].captureModule); // Test that it is created assert(_captureSettings[0].captureModule); // Test that it is created
assert(!_captureSettings[0].captureModule->RegisterCaptureDataCallback(*this)); assert(!_captureSettings[0].captureModule->RegisterCaptureDataCallback(*this));
@ -202,7 +205,7 @@ WebRtc_Word32 testPlatformDependent::testCreateDelete(
assert(_captureSettings[0].captureModule->StopCapture()==0); assert(_captureSettings[0].captureModule->StopCapture()==0);
assert(!_captureSettings[0].captureModule->CaptureStarted()); assert(!_captureSettings[0].captureModule->CaptureStarted());
VideoCaptureModule::Destroy(_captureSettings[0].captureModule); _captureSettings[0].captureModule->Release();
_captureSettings[0].stopStopTime = TickTime::MillisecondTimestamp(); _captureSettings[0].stopStopTime = TickTime::MillisecondTimestamp();
assert((_captureSettings[0].incomingFrames >= 5)); // Make sure at least 5 frames has been captured assert((_captureSettings[0].incomingFrames >= 5)); // Make sure at least 5 frames has been captured
@ -219,8 +222,9 @@ WebRtc_Word32 testPlatformDependent::testCapabilities(
#ifndef WEBRTC_MAC #ifndef WEBRTC_MAC
LOG("\n\nTesting capture capabilities\n"); LOG("\n\nTesting capture capabilities\n");
_captureSettings[0].captureModule = VideoCaptureModule::Create(0, uniqueID); _captureSettings[0].captureModule = VideoCaptureFactory::Create(0, uniqueID);
assert(_captureSettings[0].captureModule); // Test that it is created assert(_captureSettings[0].captureModule); // Test that it is created
_captureSettings[0].captureModule->AddRef();
assert(!_captureSettings[0].captureModule->RegisterCaptureDataCallback(*this)); assert(!_captureSettings[0].captureModule->RegisterCaptureDataCallback(*this));
@ -271,7 +275,7 @@ WebRtc_Word32 testPlatformDependent::testCapabilities(
EvaluateTestResult(_captureSettings[0]); EvaluateTestResult(_captureSettings[0]);
} }
assert(oneValidCap); // Make sure the camera support at least one capability assert(oneValidCap); // Make sure the camera support at least one capability
VideoCaptureModule::Destroy(_captureSettings[0].captureModule); _captureSettings[0].captureModule->Release();
_captureSettings[0].ResetAll(); _captureSettings[0].ResetAll();
return testPlatformDependentResult; return testPlatformDependentResult;
#else #else
@ -301,7 +305,8 @@ WebRtc_Word32 testPlatformDependent::testMultipleCameras()
WebRtc_UWord8* name = _captureSettings[i].captureName; WebRtc_UWord8* name = _captureSettings[i].captureName;
LOG("\n\n Found capture device %u\n name %s\n unique name %s\n" LOG("\n\n Found capture device %u\n name %s\n unique name %s\n"
,(unsigned int) i,(char*) name, (char*)id); ,(unsigned int) i,(char*) name, (char*)id);
_captureSettings[i].captureModule = VideoCaptureModule::Create(i, id); _captureSettings[i].captureModule = VideoCaptureFactory::Create(i, id);
_captureSettings[i].captureModule->AddRef();
assert(_captureSettings[i].captureModule); // Test that it is created assert(_captureSettings[i].captureModule); // Test that it is created
assert(!_captureSettings[i].captureModule->RegisterCaptureDataCallback(*this)); assert(!_captureSettings[i].captureModule->RegisterCaptureDataCallback(*this));
@ -326,7 +331,7 @@ WebRtc_Word32 testPlatformDependent::testMultipleCameras()
_captureSettings[i].captureModule->StopCapture(); _captureSettings[i].captureModule->StopCapture();
EvaluateTestResult(_captureSettings[i]); EvaluateTestResult(_captureSettings[i]);
VideoCaptureModule::Destroy(_captureSettings[i].captureModule); _captureSettings[i].captureModule->Release();
_captureSettings[i].ResetAll(); _captureSettings[i].ResetAll();
} }
return testPlatformDependentResult; return testPlatformDependentResult;
@ -344,7 +349,9 @@ WebRtc_Word32 testPlatformDependent::testRotation(const WebRtc_UWord8* uniqueID)
{ {
LOG("\n\nTesting capture Rotation\n"); LOG("\n\nTesting capture Rotation\n");
_captureSettings[0].captureModule = VideoCaptureModule::Create(0, uniqueID); _captureSettings[0].captureModule =
VideoCaptureFactory::Create(0, uniqueID);
_captureSettings[0].captureModule->AddRef();
assert(_captureSettings[0].captureModule); // Test that it is created assert(_captureSettings[0].captureModule); // Test that it is created
assert(!_captureSettings[0].captureModule->RegisterCaptureDataCallback(*this)); assert(!_captureSettings[0].captureModule->RegisterCaptureDataCallback(*this));
@ -411,7 +418,7 @@ WebRtc_Word32 testPlatformDependent::testRotation(const WebRtc_UWord8* uniqueID)
EvaluateTestResult(_captureSettings[0]); EvaluateTestResult(_captureSettings[0]);
VideoCaptureModule::Destroy(_captureSettings[0].captureModule); _captureSettings[0].captureModule->Release();
_captureSettings[0].ResetAll(); _captureSettings[0].ResetAll();
return testPlatformDependentResult; return testPlatformDependentResult;

View File

@ -12,7 +12,7 @@
#include "testDefines.h" #include "testDefines.h"
#include "video_capture.h" #include "video_capture_factory.h"
#include "Logger.h" #include "Logger.h"
//#define RENDER_PREVIEW //Does not work properly on Linux //#define RENDER_PREVIEW //Does not work properly on Linux

View File

@ -73,6 +73,31 @@ enum RotateCapturedFrame
RotateCapturedFrame_270 = 270 RotateCapturedFrame_270 = 270
}; };
struct ViEVideoFrameI420
{
ViEVideoFrameI420() {
y_plane = NULL;
u_plane = NULL;
v_plane = NULL;
y_pitch = 0;
u_pitch = 0;
v_pitch = 0;
width = 0;
height = 0;
}
unsigned char* y_plane;
unsigned char* u_plane;
unsigned char* v_plane;
int y_pitch;
int u_pitch;
int v_pitch;
unsigned short width;
unsigned short height;
};
// This class declares an abstract interface to be used when using an external // This class declares an abstract interface to be used when using an external
// capture device. The user implemented derived class is registered using // capture device. The user implemented derived class is registered using
// AllocateExternalCaptureDevice and is released using ReleaseCaptureDevice. // AllocateExternalCaptureDevice and is released using ReleaseCaptureDevice.
@ -89,6 +114,12 @@ public:
unsigned short width, unsigned short height, unsigned short width, unsigned short height,
RawVideoType videoType, RawVideoType videoType,
unsigned long long captureTime = 0) = 0; unsigned long long captureTime = 0) = 0;
// This method is specifically for delivering a new captured I420 frame to
// VideoEngine.
virtual int IncomingFrameI420(
const ViEVideoFrameI420& video_frame,
unsigned long long captureTime = 0) = 0;
}; };
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------

View File

@ -409,6 +409,34 @@ int ViECapturer::IncomingFrame(unsigned char* videoFrame,
return _externalCaptureModule->IncomingFrame(videoFrame, videoFrameLength, return _externalCaptureModule->IncomingFrame(videoFrame, videoFrameLength,
capability, captureTime); capability, captureTime);
} }
// This method is specifically for delivering a new captured I420 frame to
// VideoEngine.
int ViECapturer::IncomingFrameI420(
const ViEVideoFrameI420& video_frame,
unsigned long long captureTime) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(_engineId, _captureId),
"%ExternalCapture::IncomingFrame width %d, height %d, captureTime %u",
video_frame.width, video_frame.height, captureTime);
if (!_externalCaptureModule) {
return -1;
}
VideoFrameI420 frame;
frame.width = video_frame.width;
frame.height = video_frame.height;
frame.y_plane = video_frame.y_plane;
frame.u_plane = video_frame.u_plane;
frame.v_plane = video_frame.v_plane;
frame.y_pitch = video_frame.y_pitch;
frame.u_pitch = video_frame.u_pitch;
frame.v_pitch = video_frame.v_pitch;
return _externalCaptureModule->IncomingFrameI420(frame, captureTime);
}
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// OnIncomingCapturedFrame // OnIncomingCapturedFrame
// //

View File

@ -69,7 +69,12 @@ public:
RawVideoType videoType, RawVideoType videoType,
unsigned long long captureTime = 0); unsigned long long captureTime = 0);
// Use this capture device as encoder. Returns 0 if the codec is supported by this capture device. virtual int IncomingFrameI420(
const ViEVideoFrameI420& video_frame,
unsigned long long captureTime = 0);
// Use this capture device as encoder.
// Returns 0 if the codec is supported by this capture device.
virtual WebRtc_Word32 PreEncodeToViEEncoder(const VideoCodec& codec, virtual WebRtc_Word32 PreEncodeToViEEncoder(const VideoCodec& codec,
ViEEncoder& vieEncoder, ViEEncoder& vieEncoder,
WebRtc_Word32 vieEncoderId); WebRtc_Word32 vieEncoderId);