Switching WebRtc to LibYuv.

General Notes:
1. In general, API structure was not modified and is based on VPLIB. 
2. Modification to API: Return values are based on libyuv, i.e. 0 if ok, a negative value in case of an error (instead of length). 
3. All scaling (inteprolation) is now done via the scale interface. Crop/Pad is not being used.
4. VPLIB was completely removed. All tests are now part of the libyuv unit test (significantly more comprehensive and based on gtest).   
5. JPEG is yet to be implemented in LibYuv and therefore existing implementation remains.
Review URL: http://webrtc-codereview.appspot.com/258001

git-svn-id: http://webrtc.googlecode.com/svn/trunk@1140 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mikhal@webrtc.org 2011-12-09 02:46:22 +00:00
parent ffa0a9e9c9
commit 2ab104e6be
46 changed files with 201 additions and 653 deletions

View File

@ -9,7 +9,7 @@
{
'includes': [
'../common_settings.gypi', # Common settings
'vplib/main/source/vplib.gypi',
'libyuv/libyuv.gypi',
'jpeg/main/source/jpeg.gypi',
],
}

View File

@ -20,7 +20,6 @@
#include <stdio.h>
#include <string.h>
#include "vplib.h"
#include "jpeg.h"
#include "data_manager.h"
extern "C" {

View File

@ -15,7 +15,7 @@
'target_name': 'webrtc_jpeg',
'type': '<(library)',
'dependencies': [
'webrtc_vplib',
'webrtc_libyuv',
],
'include_dirs': [
'../../../interface',
@ -72,8 +72,7 @@
],
'include_dirs': [
'../interface',
'../../../vplib/main/interface',
'../source',
'../source',
],
'sources': [

View File

@ -13,7 +13,7 @@
#include <string.h> // memcpy
#include "test_buffer.h"
#include "vplib.h"
#include "common_video/libyuv/include/libyuv.h"
TestBuffer::TestBuffer():
_buffer(0),

View File

@ -9,211 +9,49 @@
*/
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
#include "frame_scaler.h"
#include "common_video/libyuv/include/scaler.h"
#include "trace.h"
#include "vplib.h"
#ifndef NO_INTERPOLATOR
#include "InterpolatorInterface.h"
#endif
namespace webrtc {
FrameScaler::FrameScaler()
: _ptrVideoInterpolator(0),
: _scaler(new Scaler()),
_scalerBuffer(),
_outWidth(0),
_outHeight(0),
_inWidth(0),
_inHeight(0)
{
}
_inHeight(0) {}
FrameScaler::~FrameScaler( )
{
#ifndef NO_INTERPOLATOR
if( _ptrVideoInterpolator != 0)
{
deleteInterpolator(_ptrVideoInterpolator);
}
#endif
}
FrameScaler::~FrameScaler( ) {}
WebRtc_Word32 FrameScaler::ResizeFrameIfNeeded(VideoFrame& videoFrame,
WebRtc_UWord32 outWidth,
WebRtc_UWord32 outHeight)
{
if( videoFrame.Length( ) == 0)
{
return -1;
}
WebRtc_UWord32 outHeight) {
if ( videoFrame.Length( ) == 0) {
return -1;
}
if((videoFrame.Width() != outWidth) || ( videoFrame.Height() != outHeight))
{
// Scale down by factor 2-4.
if(videoFrame.Width() % outWidth == 0 &&
videoFrame.Height() % outHeight == 0 &&
(videoFrame.Width() / outWidth) == (videoFrame.Height() / outHeight))
{
const WebRtc_Word32 multiple = videoFrame.Width() / outWidth;
WebRtc_UWord32 scaledWidth;
WebRtc_UWord32 scaledHeight;
switch(multiple)
{
case 2:
ScaleI420FrameQuarter(videoFrame.Width(), videoFrame.Height(),
videoFrame.Buffer());
if ((videoFrame.Width() != outWidth) || (videoFrame.Height() != outHeight)) {
_scaler->Set(videoFrame.Width(), videoFrame.Height(),
outWidth, outHeight,
kI420, kI420, kScaleBox);
videoFrame.SetLength(outWidth * outHeight * 3 / 2);
videoFrame.SetWidth( outWidth);
videoFrame.SetHeight(outHeight);
return 0;
case 3:
ScaleI420Down1_3(videoFrame.Width(), videoFrame.Height(),
videoFrame.Buffer(), videoFrame.Size(),
scaledWidth, scaledHeight);
videoFrame.SetLength((outWidth * outHeight * 3) / 2);
videoFrame.SetWidth(outWidth);
videoFrame.SetHeight(outHeight);
return 0;
case 4:
ScaleI420FrameQuarter(videoFrame.Width(), videoFrame.Height(),
videoFrame.Buffer());
ScaleI420FrameQuarter(videoFrame.Width() >> 1,
videoFrame.Height() >> 1,
videoFrame.Buffer());
videoFrame.SetLength((outWidth * outHeight * 3)/ 2);
videoFrame.SetWidth(outWidth);
videoFrame.SetHeight(outHeight);
return 0;
default:
break;
}
}
// Scale up by factor 2-4.
if(outWidth % videoFrame.Width() == 0 &&
outHeight % videoFrame.Height() == 0 &&
(outWidth / videoFrame.Width()) == (outHeight / videoFrame.Height()))
{
const WebRtc_Word32 multiple = outWidth / videoFrame.Width();
WebRtc_UWord32 scaledWidth = 0;
WebRtc_UWord32 scaledHeight = 0;
switch(multiple)
{
case 2:
videoFrame.VerifyAndAllocate((outHeight * outWidth * 3) / 2);
ScaleI420Up2(videoFrame.Width(), videoFrame.Height(),
videoFrame.Buffer(), videoFrame.Size(),
scaledWidth, scaledHeight);
videoFrame.SetLength((outWidth * outHeight * 3) / 2);
videoFrame.SetWidth(outWidth);
videoFrame.SetHeight(outHeight);
return 0;
case 3:
videoFrame.VerifyAndAllocate((outWidth * outHeight * 3) / 2);
ScaleI420Up2(videoFrame.Width(), videoFrame.Height(),
videoFrame.Buffer(), videoFrame.Size(),
scaledWidth, scaledHeight);
ScaleI420Up3_2(scaledWidth, scaledHeight, videoFrame.Buffer(),
videoFrame.Size(), scaledWidth, scaledHeight);
videoFrame.SetLength((outWidth * outHeight * 3) / 2);
videoFrame.SetWidth(outWidth);
videoFrame.SetHeight(outHeight);
return 0;
case 4:
videoFrame.VerifyAndAllocate((outWidth * outHeight * 3) / 2);
ScaleI420Up2(videoFrame.Width(), videoFrame.Height(),
videoFrame.Buffer(), videoFrame.Size(),
scaledWidth, scaledHeight);
ScaleI420Up2(scaledWidth, scaledHeight, videoFrame.Buffer(),
videoFrame.Size(), scaledWidth, scaledHeight);
videoFrame.SetLength((outWidth * outHeight * 3) / 2);
videoFrame.SetWidth(outWidth);
videoFrame.SetHeight(outHeight);
return 0;
default:
break;
}
}
// Use interpolator
#ifdef NO_INTERPOLATOR
assert(!"Interpolation not available");
#else
// Create new interpolator if the scaling changed.
if((_outWidth != outWidth) || (_outHeight != outHeight) ||
(_inWidth != videoFrame.Width()) ||
(_inHeight != videoFrame.Height()))
{
if(_ptrVideoInterpolator != 0)
{
deleteInterpolator(_ptrVideoInterpolator);
_ptrVideoInterpolator = 0;
}
_outWidth = outWidth;
_outHeight = outHeight;
_inWidth = videoFrame.Width();
_inHeight = videoFrame.Height();
}
if (!_ptrVideoInterpolator)
{
InterpolatorType interpolator = BiCubicBSpline;
if((_inWidth > ( _outWidth * 2)) ||
(_inWidth < ( _outWidth / 2)) ||
(_inHeight > ( _outHeight * 2)) ||
(_inHeight < ( _outHeight / 2)))
{
interpolator = BiCubicSine;
}
VideoFrameFormat inputFormat;
VideoFrameFormat outputFormat;
inputFormat.videoType = YUV420P;
inputFormat.xChannels = static_cast<short>(_inWidth);
inputFormat.yChannels = static_cast<short>(_inHeight);
outputFormat.videoType = YUV420P;
outputFormat.xChannels = static_cast<short>(_outWidth);
outputFormat.yChannels = static_cast<short>(_outHeight);
_interpolatorBuffer.VerifyAndAllocate(_outWidth * _outHeight *
3 / 2);
_ptrVideoInterpolator = createInterpolator(
interpolator,
&inputFormat,
&outputFormat);
if (_ptrVideoInterpolator == NULL)
{
WEBRTC_TRACE(
kTraceError,
kTraceVideo,
-1,
"FrameScaler::ResizeFrame(): Could not create\
interpolator");
return -1;
}
}
interpolateFrame(_ptrVideoInterpolator, videoFrame.Buffer(),
_interpolatorBuffer.Buffer());
videoFrame.VerifyAndAllocate(_interpolatorBuffer.Size());
videoFrame.SetLength(_outWidth * _outHeight * 3 / 2);
videoFrame.CopyFrame(videoFrame.Length(), _interpolatorBuffer.Buffer());
videoFrame.SetWidth(_outWidth);
videoFrame.SetHeight(_outHeight);
#endif // NO_INTERPOLATOR
}
return 0;
int reqSize = CalcBufferSize(kI420, _outWidth, _outHeight);
_scalerBuffer.VerifyAndAllocate(reqSize);
int ret = _scaler->Scale(videoFrame.Buffer(),
_scalerBuffer.Buffer(),
reqSize);
if (ret < 0)
return ret;
videoFrame.VerifyAndAllocate(reqSize);
videoFrame.CopyFrame(videoFrame.Length(), _scalerBuffer.Buffer());
videoFrame.SetWidth(_outWidth);
videoFrame.SetHeight(_outHeight);
}
return 0;
}
} // namespace webrtc
} // namespace webrtc
#endif // WEBRTC_MODULE_UTILITY_VIDEO
#endif // WEBRTC_MODULE_UTILITY_VIDEO

View File

@ -16,18 +16,12 @@
#include "engine_configurations.h"
#include "module_common_types.h"
#include "system_wrappers/interface/scoped_ptr.h"
#include "typedefs.h"
namespace webrtc
{
// TODO (perkj): add interpolator. Current implementation only support scaling
// (up or down) where the width and height are scaled by a constant factor 2-4.
// Also remove NO_INTERPOLATOR.
// Disable usage of the old intertpolator implementation.
#define NO_INTERPOLATOR 1
class Scaler;
class VideoFrame;
class FrameScaler
{
@ -41,10 +35,8 @@ public:
WebRtc_UWord32 outWidth,
WebRtc_UWord32 outHeight);
private:
typedef WebRtc_Word8* VideoInterpolator;
VideoInterpolator*_ptrVideoInterpolator;
VideoFrame _interpolatorBuffer;
scoped_ptr<Scaler> _scaler;
VideoFrame _scalerBuffer;
WebRtc_UWord32 _outWidth;
WebRtc_UWord32 _outHeight;
WebRtc_UWord32 _inWidth;

View File

@ -55,7 +55,6 @@
],
'include_dirs': [
'../../video_coding/main/interface',
'../../../common_video/vplib/main/interface',
],
'sources': [
'frame_scaler.cc',

View File

@ -13,19 +13,19 @@
'type': '<(library)',
'dependencies': [
'webrtc_utility',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
],
'include_dirs': [
'../interface',
'../../../interface',
'../../../../common_video/vplib/main/interface',
'<(webrtc_root)/common_video/libyuv/include',
],
'direct_dependent_settings': {
'include_dirs': [
'../interface',
'../../../interface',
'../../../../common_video/vplib/main/interface',
'<(webrtc_root)/common_video/libyuv/include',
],
},
'sources': [

View File

@ -304,9 +304,9 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(WebRtc_UWord8* videoFrame,
if (frameInfo.codecType == kVideoCodecUnknown) // None encoded. Convert to I420.
{
const VideoType vpLibType = videocapturemodule::
RawVideoTypeToVplibVideoType(frameInfo.rawType);
int size = CalcBufferSize(vpLibType, width, height);
const VideoType commonVideoType = videocapturemodule::
RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
int size = CalcBufferSize(commonVideoType, width, height);
if (size != videoFrameLength)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
@ -315,7 +315,8 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(WebRtc_UWord8* videoFrame,
}
// Allocate I420 buffer
_captureFrame.VerifyAndAllocate(CalcBufferSize(kI420, width, height));
int requiredLength = CalcBufferSize(kI420, width, height);
_captureFrame.VerifyAndAllocate(requiredLength);
if (!_captureFrame.Buffer())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
@ -324,19 +325,19 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(WebRtc_UWord8* videoFrame,
}
memset(_captureFrame.Buffer(), 0, _captureFrame.Size());
const WebRtc_Word32 conversionResult = ConvertToI420(vpLibType, videoFrame,
const WebRtc_Word32 conversionResult = ConvertToI420(commonVideoType, videoFrame,
width, height,
_captureFrame.Buffer(),
_requestedCapability.interlaced,
_rotateFrame);
if (conversionResult <= 0)
if (conversionResult < 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to convert capture frame from type %d to I420",
frameInfo.rawType);
return -1;
}
_captureFrame.SetLength(conversionResult);
_captureFrame.SetLength(requiredLength);
}
else // Encoded format
{
@ -426,13 +427,13 @@ WebRtc_Word32 VideoCaptureImpl::SetCaptureRotation(VideoCaptureRotation rotation
_rotateFrame = kRotateNone;
break;
case kCameraRotate90:
_rotateFrame = kRotateClockwise;
_rotateFrame = kRotate90;
break;
case kCameraRotate180:
_rotateFrame = kRotate180;
break;
case kCameraRotate270:
_rotateFrame = kRotateAntiClockwise;
_rotateFrame = kRotate270;
break;
}
return 0;

View File

@ -18,7 +18,7 @@
#include "video_capture.h"
#include "video_capture_config.h"
#include "tick_util.h"
#include "vplib.h"
#include "common_video/libyuv/include/libyuv.h"
namespace webrtc
{

View File

@ -14,7 +14,7 @@ namespace webrtc
{
namespace videocapturemodule
{
VideoType RawVideoTypeToVplibVideoType(RawVideoType type)
VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type)
{
switch (type)
{
@ -43,7 +43,7 @@ VideoType RawVideoTypeToVplibVideoType(RawVideoType type)
case kVideoNV12:
return kNV12;
default:
assert(!"RawVideoTypeToVplibVideoType unknown type");
assert(!"RawVideoTypeToCommonVideoVideoType unknown type");
}
return kUnknown;
}

View File

@ -12,13 +12,14 @@
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VPLIB_CONVERSIONS_H_
#include "video_capture.h"
#include "vplib.h"
#include "common_video/libyuv/include/libyuv.h"
// TODO (mikhal): Update file name
namespace webrtc
{
namespace videocapturemodule
{
VideoType RawVideoTypeToVplibVideoType(RawVideoType type);
VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type);
} // namespace videocapturemodule
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VPLIB_CONVERSIONS_H_

View File

@ -51,4 +51,4 @@
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
# vim: set expandtab tabstop=2 shiftwidth=2:

View File

@ -21,9 +21,9 @@
#include "event_wrapper.h"
#include "testsupport/fileutils.h"
#include "common_video/libyuv/include/libyuv.h"
#include "video_codec_interface.h"
#include "video_source.h"
#include "vplib.h"
#define SSIM_CALC 0 // by default, don't compute SSIM

View File

@ -10,7 +10,7 @@
#include "test.h"
#include "video_source.h"
#include "vplib.h"
#include "common_video/libyuv/include/libyuv.h"
#include "event_wrapper.h"
#include "thread_wrapper.h"
#include <iostream>

View File

@ -19,7 +19,7 @@
'<(webrtc_root)/../test/test.gyp:test_support',
'<(webrtc_root)/../testing/gtest.gyp:gtest',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
],
'include_dirs': [

View File

@ -14,7 +14,6 @@
#include <cassert>
#include "testsupport/fileutils.h"
#include "vplib.h"
VideoSource::VideoSource()
:

View File

@ -12,8 +12,7 @@
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_SOURCE_H_
#include <string>
#include "vplib.h"
#include "common_video/libyuv/include/libyuv.h"
enum VideoSize
{

View File

@ -32,4 +32,4 @@
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
# vim: set expandtab tabstop=2 shiftwidth=2:

View File

@ -20,7 +20,8 @@
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_H_
#include "interpolator.h"
#include "common_video/libyuv/include/scaler.h"
#include "video_codec_interface.h"
#include "vp8.h"
@ -138,7 +139,7 @@ private:
VP8Encoder* encoder_[kMaxSimulcastStreams];
bool encode_stream_[kMaxSimulcastStreams];
VideoFrameType frame_type_[kMaxSimulcastStreams];
interpolator* interpolator_[kMaxSimulcastStreams];
Scaler* scaler_[kMaxSimulcastStreams];
RawImage video_frame_[kMaxSimulcastStreams];
VideoCodec video_codec_;
};// end of VP8SimulcastEncoder class

View File

@ -13,11 +13,11 @@
'type': '<(library)',
'dependencies': [
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
],
'include_dirs': [
'../interface',
'<(webrtc_root)/common_video/interface',
'<(webrtc_root)/common_video/vplib/main/interface',
'<(webrtc_root)/modules/video_coding/codecs/interface',
'<(webrtc_root)/modules/interface',
],
@ -69,7 +69,7 @@
'dependencies': [
'test_framework',
'webrtc_vp8',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/../test/test.gyp:test_support',
'<(webrtc_root)/../testing/gtest.gyp:gtest',

View File

@ -27,14 +27,14 @@ VP8SimulcastEncoder::VP8SimulcastEncoder() {
encoder_[i] = NULL;
encode_stream_[i] = false;
frame_type_[i] = kKeyFrame;
interpolator_[i] = NULL;
scaler_[i] = NULL;
}
}
VP8SimulcastEncoder::~VP8SimulcastEncoder() {
for (int i = 0; i < kMaxSimulcastStreams; i++) {
delete encoder_[i];
delete interpolator_[i];
delete scaler_[i];
delete [] video_frame_[i]._buffer;
}
}
@ -43,8 +43,8 @@ WebRtc_Word32 VP8SimulcastEncoder::Release() {
for (int i = 0; i < kMaxSimulcastStreams; i++) {
delete encoder_[i];
encoder_[i] = NULL;
delete interpolator_[i];
interpolator_[i] = NULL;
delete scaler_[i];
scaler_[i] = NULL;
delete [] video_frame_[i]._buffer;
video_frame_[i]._buffer = NULL;
video_frame_[i]._size = 0;
@ -121,17 +121,12 @@ WebRtc_Word32 VP8SimulcastEncoder::InitEncode(const VideoCodec* codecSettings,
}
if (codecSettings->width != video_codec.width ||
codecSettings->height != video_codec.height) {
if (interpolator_[i] == NULL) {
interpolator_[i] = new interpolator();
if (scaler_[i] == NULL) {
scaler_[i] = new Scaler();
}
interpolator_[i]->Set(
codecSettings->width,
codecSettings->height,
video_codec.width,
video_codec.height,
kI420,
kI420,
kBilinear);
scaler_[i]->Set(codecSettings->width, codecSettings->height,
video_codec.width, video_codec.height,
kI420, kI420, kScaleBox);
if (video_frame_[i]._size <
(3u * video_codec.width * video_codec.height / 2u)) {
@ -183,18 +178,15 @@ WebRtc_Word32 VP8SimulcastEncoder::Encode(
for (int i = 0; i < numberOfStreams; i++) {
if (encoder_[i] && encode_stream_[i]) {
// need the simulcastIdx to keep track of which encoder encoded the frame
// Need the simulcastIdx to keep track of which encoder encoded the frame.
info.codecSpecific.VP8.simulcastIdx = i;
VideoFrameType requested_frame_type = frame_type_[i];
if (interpolator_[i]) {
interpolator_[i]->Interpolate(inputImage._buffer,
video_frame_[i]._buffer,
video_frame_[i]._size);
video_frame_[i]._length =
3 *
video_codec_.simulcastStream[i].width *
video_codec_.simulcastStream[i].height /
2;
if (scaler_[i]) {
int video_frame_size = static_cast<int>(video_frame_[i]._size);
scaler_[i]->Scale(inputImage._buffer,
video_frame_[i]._buffer,
video_frame_size);
video_frame_[i]._length = video_frame_[i]._size = video_frame_size;
ret_val = encoder_[i]->Encode(video_frame_[i],
&info,
&requested_frame_type);

View File

@ -14,7 +14,7 @@
'dependencies': [
'webrtc_i420',
'webrtc_vp8',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
],
'include_dirs': [

View File

@ -17,7 +17,7 @@
'rtp_rtcp',
'webrtc_utility',
'video_processing',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
],
'include_dirs': [
'../../../interface',

View File

@ -18,7 +18,7 @@
#include "test_callbacks.h"
#include "test_macros.h"
#include "testsupport/metrics/video_metrics.h"
#include "vplib.h"
#include "common_video/libyuv/include/libyuv.h"
using namespace webrtc;

View File

@ -13,7 +13,6 @@
#include <cassert>
#include "testsupport/fileutils.h"
#include "vplib.h"
VideoSource::VideoSource()
:

View File

@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
#define WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
#include "vplib.h"
#include "common_video/libyuv/include/libyuv.h"
#include "typedefs.h"
#include <string>

View File

@ -25,14 +25,17 @@ namespace webrtc {
#define VPM_GENERAL_ERROR -1
#define VPM_MEMORY -2
#define VPM_PARAMETER_ERROR -3
#define VPM_UNINITIALIZED -4
#define VPM_UNIMPLEMENTED -5
#define VPM_SCALE_ERROR -4
#define VPM_UNINITIALIZED -5
#define VPM_UNIMPLEMENTED -6
enum VideoFrameResampling
{
// TODO: Do we still need crop/pad?
kNoRescaling, // disables rescaling
kFastRescaling, // fast up/down scaling; crop/pad when needed.
kFastRescaling, // point
kBiLinear, // bi-linear interpolation
kBox, // Box inteprolation
};
} //namespace

View File

@ -18,369 +18,106 @@ VPMSimpleSpatialResampler::VPMSimpleSpatialResampler()
_resamplingMode(kFastRescaling),
_targetWidth(0),
_targetHeight(0),
_interpolatorPtr(NULL)
_scaler()
{
}
VPMSimpleSpatialResampler::~VPMSimpleSpatialResampler()
{
Release();
//
}
WebRtc_Word32
VPMSimpleSpatialResampler::Release()
{
if (_interpolatorPtr != NULL)
{
delete _interpolatorPtr;
_interpolatorPtr = NULL;
}
return VPM_OK;
}
WebRtc_Word32
VPMSimpleSpatialResampler::SetTargetFrameSize(WebRtc_UWord32 width,
WebRtc_UWord32 height)
VPMSimpleSpatialResampler::SetTargetFrameSize(WebRtc_Word32 width,
WebRtc_Word32 height)
{
if (_resamplingMode == kNoRescaling)
{
return VPM_OK;
}
if (width < 1 || height < 1)
{
return VPM_PARAMETER_ERROR;
}
_targetWidth = width;
_targetHeight = height;
if (_resamplingMode == kNoRescaling) {
return VPM_OK;
}
if (width < 1 || height < 1) {
return VPM_PARAMETER_ERROR;
}
_targetWidth = width;
_targetHeight = height;
return VPM_OK;
}
void
VPMSimpleSpatialResampler::SetInputFrameResampleMode(VideoFrameResampling
resamplingMode)
{
_resamplingMode = resamplingMode;
_resamplingMode = resamplingMode;
}
void
VPMSimpleSpatialResampler::Reset()
{
_resamplingMode = kFastRescaling;
_targetWidth = 0;
_targetHeight = 0;
_resamplingMode = kFastRescaling;
_targetWidth = 0;
_targetHeight = 0;
}
WebRtc_Word32
VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame,
VideoFrame& outFrame)
{
WebRtc_Word32 ret;
if (_resamplingMode == kNoRescaling)
return outFrame.CopyFrame(inFrame);
// Check if re-sampling is needed
if ((inFrame.Width() == (WebRtc_UWord32)_targetWidth) &&
(inFrame.Height() == (WebRtc_UWord32)_targetHeight)) {
return outFrame.CopyFrame(inFrame);
}
if (_resamplingMode == kNoRescaling)
{
return outFrame.CopyFrame(inFrame);
}
else if (_targetWidth < 1 || _targetHeight < 1)
{
return VPM_PARAMETER_ERROR;
}
// Setting scaler
//TODO: Modify scaler types
int retVal = 0;
retVal = _scaler.Set(inFrame.Width(), inFrame.Height(),
_targetWidth, _targetHeight, kI420, kI420, kScaleBox);
if (retVal < 0)
return retVal;
// Check if re-sampling is needed
if ((inFrame.Width() == _targetWidth) &&
(inFrame.Height() == _targetHeight))
{
return outFrame.CopyFrame(inFrame);
}
if (_resamplingMode == kBiLinear)
{
return BiLinearInterpolation(inFrame, outFrame);
}
outFrame.SetTimeStamp(inFrame.TimeStamp());
if (_targetWidth > inFrame.Width() &&
( ExactMultiplier(inFrame.Width(), inFrame.Height())))
{
// The codec might want to pad this later... adding 8 pixels
const WebRtc_UWord32 requiredSize = (_targetWidth + 8) *
(_targetHeight + 8) * 3 / 2;
outFrame.VerifyAndAllocate(requiredSize);
return UpsampleFrame(inFrame, outFrame);
}
else
{
// 1 cut/pad
// 2 scale factor 2X (in both cases if required)
WebRtc_UWord32 croppedWidth = inFrame.Width();
WebRtc_UWord32 croppedHeight = inFrame.Height();
//Calculates cropped dimensions
CropSize(inFrame.Width(), inFrame.Height(),
croppedWidth, croppedHeight);
VideoFrame* targetFrame;
outFrame.VerifyAndAllocate(croppedWidth * croppedHeight * 3 / 2);
targetFrame = &outFrame;
ConvertI420ToI420(inFrame.Buffer(), inFrame.Width(), inFrame.Height(),
targetFrame->Buffer(), croppedWidth, croppedHeight);
targetFrame->SetWidth(croppedWidth);
targetFrame->SetHeight(croppedHeight);
//We have correct aspect ratio, sub-sample with a multiple of two to get
//close to the target size
ret = SubsampleMultipleOf2(*targetFrame);
if (ret != VPM_OK)
{
return ret;
}
}
// Disabling cut/pad for now - only scaling.
int requiredSize = (WebRtc_UWord32)(_targetWidth * _targetHeight * 3 >> 1);
outFrame.VerifyAndAllocate(requiredSize);
outFrame.SetTimeStamp(inFrame.TimeStamp());
outFrame.SetWidth(_targetWidth);
outFrame.SetHeight(_targetHeight);
retVal = _scaler.Scale(inFrame.Buffer(), outFrame.Buffer(), requiredSize);
outFrame.SetLength(requiredSize);
if (retVal == 0)
return VPM_OK;
else
return VPM_SCALE_ERROR;
}
WebRtc_Word32
VPMSimpleSpatialResampler::UpsampleFrame(const VideoFrame& inFrame,
VideoFrame& outFrame)
{
outFrame.CopyFrame(inFrame);
float ratioWidth = _targetWidth / (float)inFrame.Width();
float ratioHeight = _targetHeight / (float)inFrame.Height();
WebRtc_UWord32 scaledWidth = 0;
WebRtc_UWord32 scaledHeight = 0;
if(ratioWidth > 1 || ratioHeight > 1)
{
// scale up
if(ratioWidth <= 1.5 && ratioHeight <= 1.5)
{
// scale up 1.5
WebRtc_Word32 ret = ScaleI420Up3_2(inFrame.Width(), inFrame.Height(),
outFrame.Buffer(), outFrame.Size(),
scaledWidth, scaledHeight);
if (ret < 0)
return VPM_GENERAL_ERROR;
}
else if(ratioWidth <= 2 && ratioHeight <= 2)
{
// scale up 2
WebRtc_Word32 ret = ScaleI420Up2(inFrame.Width(), inFrame.Height(),
outFrame.Buffer(), outFrame.Size(),
scaledWidth, scaledHeight);
if (ret < 0)
return VPM_GENERAL_ERROR;
}
else if(ratioWidth <= 2.25 && ratioHeight <= 2.25)
{
// scale up 2.25
WebRtc_Word32 ret = ScaleI420Up3_2(inFrame.Width(), inFrame.Height(),
outFrame.Buffer(), outFrame.Size(),
scaledWidth, scaledHeight);
if (ret < 0)
return VPM_GENERAL_ERROR;
ret = ScaleI420Up3_2(scaledWidth, scaledHeight,
outFrame.Buffer(), outFrame.Size(),
scaledWidth, scaledHeight);
if (ret < 0)
return VPM_GENERAL_ERROR;
}
else if(ratioWidth <= 3 && ratioHeight <= 3)
{
// scale up 3
WebRtc_Word32 ret = ScaleI420Up2(inFrame.Width(), inFrame.Height(),
outFrame.Buffer(), outFrame.Size(),
scaledWidth, scaledHeight);
if (ret < 0)
return VPM_GENERAL_ERROR;
ret = ScaleI420Up3_2(scaledWidth, scaledHeight,
outFrame.Buffer(), outFrame.Size(),
scaledWidth, scaledHeight);
if (ret < 0)
return VPM_GENERAL_ERROR;
}
else if(ratioWidth <= 4 && ratioHeight <= 4)
{
// scale up 4
WebRtc_Word32 ret = ScaleI420Up2(inFrame.Width(), inFrame.Height(),
outFrame.Buffer(), outFrame.Size(),
scaledWidth, scaledHeight);
if (ret < 0)
return VPM_GENERAL_ERROR;
ret = ScaleI420Up2(scaledWidth, scaledHeight,
outFrame.Buffer(), outFrame.Size(),
scaledWidth, scaledHeight);
if (ret < 0)
return VPM_GENERAL_ERROR;
}
//TODO: what if ratioWidth/Height >= 8 ?
if (scaledWidth <= 0 || scaledHeight <= 0)
{
return VPM_GENERAL_ERROR;
}
if ((static_cast<WebRtc_UWord32>(scaledWidth) > _targetWidth) ||
(static_cast<WebRtc_UWord32>(scaledHeight) > _targetHeight))
{
WebRtc_Word32 ret = CutI420Frame(outFrame.Buffer(),
scaledWidth, scaledHeight,
_targetWidth, _targetHeight);
if (ret < 0)
return VPM_GENERAL_ERROR;
}
}
else
{
return VPM_GENERAL_ERROR;
}
outFrame.SetWidth(_targetWidth);
outFrame.SetHeight(_targetHeight);
outFrame.SetLength(_targetWidth * _targetHeight * 3 / 2);
return VPM_OK;
}
WebRtc_Word32
VPMSimpleSpatialResampler::CropSize(WebRtc_UWord32 width, WebRtc_UWord32 height,
WebRtc_UWord32& croppedWidth,
WebRtc_UWord32& croppedHeight) const
{
// Crop the image to a width and height which is a
// multiple of two, so that we can do a simpler scaling.
croppedWidth = _targetWidth;
croppedHeight = _targetHeight;
if (width >= 8 * _targetWidth && height >= 8 * _targetHeight)
{
croppedWidth = 8 * _targetWidth;
croppedHeight = 8 * _targetHeight;
}
else if (width >= 4 * _targetWidth && height >= 4 * _targetHeight)
{
croppedWidth = 4 * _targetWidth;
croppedHeight = 4 * _targetHeight;
}
else if (width >= 2 * _targetWidth && height >= 2 * _targetHeight)
{
croppedWidth = 2 * _targetWidth;
croppedHeight = 2 * _targetHeight;
}
return VPM_OK;
}
WebRtc_Word32
VPMSimpleSpatialResampler::SubsampleMultipleOf2(VideoFrame& frame)
{
WebRtc_UWord32 tempWidth = frame.Width();
WebRtc_UWord32 tempHeight = frame.Height();
while (tempWidth / _targetWidth >= 2 && tempHeight / _targetHeight >= 2)
{
ScaleI420FrameQuarter(tempWidth, tempHeight, frame.Buffer());
tempWidth /= 2;
tempHeight /= 2;
}
frame.SetWidth(tempWidth);
frame.SetHeight(tempHeight);
frame.SetLength(frame.Width() * frame.Height() * 3 / 2);
return VPM_OK;
}
bool
VPMSimpleSpatialResampler::ExactMultiplier(WebRtc_UWord32 width,
WebRtc_UWord32 height) const
{
bool exactMultiplier = false;
if (_targetWidth % width == 0 && _targetHeight % height == 0)
{
// we have a multiple, is it an even multiple?
WebRtc_Word32 widthMultiple = _targetWidth / width;
WebRtc_Word32 heightMultiple = _targetHeight / height;
if ((widthMultiple == 2 && heightMultiple == 2) ||
(widthMultiple == 4 && heightMultiple == 4) ||
(widthMultiple == 8 && heightMultiple == 8) ||
(widthMultiple == 1 && heightMultiple == 1))
{
exactMultiplier = true;
}
}
return exactMultiplier;
}
WebRtc_Word32
VPMSimpleSpatialResampler::BiLinearInterpolation(const VideoFrame& inFrame,
VideoFrame& outFrame)
{
WebRtc_Word32 retVal;
if (_interpolatorPtr == NULL)
{
_interpolatorPtr = new interpolator();
}
// set bi-linear interpolator
retVal = _interpolatorPtr->Set(inFrame.Width(), inFrame.Height(),
_targetWidth, _targetHeight,
kI420, kI420, kBilinear );
if (retVal < 0 )
{
return retVal;
}
// Verify size of output buffer
outFrame.VerifyAndAllocate(_targetHeight * _targetWidth * 3 >> 1);
WebRtc_UWord32 outSz = outFrame.Size();
// interpolate frame
retVal = _interpolatorPtr->Interpolate(inFrame.Buffer(),
outFrame.Buffer(), outSz);
assert(outSz <= outFrame.Size());
// returns height
if (retVal < 0)
{
return retVal;
}
// Set output frame parameters
outFrame.SetHeight(_targetHeight);
outFrame.SetWidth(_targetWidth);
outFrame.SetLength(outSz);
outFrame.SetTimeStamp(inFrame.TimeStamp());
return VPM_OK;
}
WebRtc_UWord32
VPMSimpleSpatialResampler::TargetHeight()
{
return _targetHeight;
return _targetHeight;
}
WebRtc_UWord32
WebRtc_Word32
VPMSimpleSpatialResampler::TargetWidth()
{
return _targetWidth;
return _targetWidth;
}
bool
VPMSimpleSpatialResampler::ApplyResample(WebRtc_UWord32 width,
WebRtc_UWord32 height)
VPMSimpleSpatialResampler::ApplyResample(WebRtc_Word32 width,
WebRtc_Word32 height)
{
if ((width == _targetWidth && height == _targetHeight) ||
_resamplingMode == kNoRescaling)
return false;
else
return true;
if ((width == _targetWidth && height == _targetHeight) ||
_resamplingMode == kNoRescaling)
return false;
else
return true;
}
} //namespace

View File

@ -20,59 +20,48 @@
#include "module_common_types.h"
#include "video_processing_defines.h"
#include "vplib.h"
#include "interpolator.h"
#include "common_video/libyuv/include/libyuv.h"
#include "common_video/libyuv/include/scaler.h"
namespace webrtc {
class VPMSpatialResampler
{
public:
virtual ~VPMSpatialResampler() {};
virtual WebRtc_Word32 SetTargetFrameSize(WebRtc_UWord32 width,
WebRtc_UWord32 height) = 0;
virtual void SetInputFrameResampleMode(VideoFrameResampling
resamplingMode) = 0;
virtual void Reset() = 0;
virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
VideoFrame& outFrame) = 0;
virtual WebRtc_UWord32 TargetWidth() = 0;
virtual WebRtc_UWord32 TargetHeight() = 0;
virtual WebRtc_Word32 Release() = 0;
virtual bool ApplyResample(WebRtc_UWord32 width, WebRtc_UWord32 height) = 0;
virtual ~VPMSpatialResampler() {};
virtual WebRtc_Word32 SetTargetFrameSize(WebRtc_Word32 width,
WebRtc_Word32 height) = 0;
virtual void SetInputFrameResampleMode(VideoFrameResampling
resamplingMode) = 0;
virtual void Reset() = 0;
virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
VideoFrame& outFrame) = 0;
virtual WebRtc_Word32 TargetWidth() = 0;
virtual WebRtc_Word32 TargetHeight() = 0;
virtual bool ApplyResample(WebRtc_Word32 width, WebRtc_Word32 height) = 0;
};
class VPMSimpleSpatialResampler : public VPMSpatialResampler
{
public:
VPMSimpleSpatialResampler();
~VPMSimpleSpatialResampler();
virtual WebRtc_Word32 SetTargetFrameSize(WebRtc_UWord32 width,
WebRtc_UWord32 height);
virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode);
virtual void Reset();
virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
VideoFrame& outFrame);
virtual WebRtc_UWord32 TargetWidth();
virtual WebRtc_UWord32 TargetHeight();
virtual WebRtc_Word32 Release();
virtual bool ApplyResample(WebRtc_UWord32 width, WebRtc_UWord32 height);
VPMSimpleSpatialResampler();
~VPMSimpleSpatialResampler();
virtual WebRtc_Word32 SetTargetFrameSize(WebRtc_Word32 width,
WebRtc_Word32 height);
virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode);
virtual void Reset();
virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
VideoFrame& outFrame);
virtual WebRtc_Word32 TargetWidth();
virtual WebRtc_Word32 TargetHeight();
virtual bool ApplyResample(WebRtc_Word32 width, WebRtc_Word32 height);
private:
WebRtc_Word32 UpsampleFrame(const VideoFrame& inFrame, VideoFrame& outFrame);
WebRtc_Word32 CropSize(WebRtc_UWord32 width, WebRtc_UWord32 height,
WebRtc_UWord32& croppedWidth,
WebRtc_UWord32& croppedHeight) const;
WebRtc_Word32 SubsampleMultipleOf2(VideoFrame& frame);
bool ExactMultiplier(WebRtc_UWord32 width, WebRtc_UWord32 height) const;
WebRtc_Word32 BiLinearInterpolation(const VideoFrame& inFrame,
VideoFrame& outFrame);
VideoFrameResampling _resamplingMode;
WebRtc_UWord32 _targetWidth;
WebRtc_UWord32 _targetHeight;
interpolator* _interpolatorPtr;
VideoFrameResampling _resamplingMode;
WebRtc_Word32 _targetWidth;
WebRtc_Word32 _targetHeight;
Scaler _scaler;
};
} //namespace

View File

@ -14,7 +14,7 @@
'dependencies': [
'webrtc_utility',
'<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
],
'include_dirs': [

View File

@ -10,7 +10,6 @@
#include "unit_test.h"
#include "video_processing.h"
#include "vplib.h"
#include "tick_util.h"
#include <cstdio>

View File

@ -10,7 +10,6 @@
#include "unit_test.h"
#include "video_processing.h"
#include "vplib.h"
#include "content_analysis.h"
using namespace webrtc;

View File

@ -9,8 +9,11 @@
*/
#include "unit_test.h"
#include "trace.h"
#include "common_video/libyuv/include/libyuv.h"
#include "tick_util.h"
#include "trace.h"
using webrtc::Trace;

View File

@ -8,12 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef UNIT_TEST_H
#define UNIT_TEST_H
#ifndef VPM_UNIT_TEST_H
#define VPM_UNIT_TEST_H
#include "video_processing.h"
#include "vplib.h"
#include <gtest/gtest.h>
@ -35,4 +34,4 @@ protected:
};
#endif // UNIT_TEST_H
#endif // VPM_UNIT_TEST_H

View File

@ -18,7 +18,7 @@
],
'include_dirs': [
'../../../../system_wrappers/interface',
'../../../../common_video/vplib/main/interface',
'<(webrtc_root)/common_video/libyuv/include',
'../../../../modules/video_processing/main/source',
],
'sources': [

View File

@ -10,7 +10,7 @@
#include "video_render_android_surface_view.h"
#include "critical_section_wrapper.h"
#include "vplib.h"
#include "common_video/libyuv/include/libyuv.h"
#include "tick_util.h"
#ifdef ANDROID_NDK_8_OR_ABOVE
#include <android/bitmap.h>
@ -402,7 +402,7 @@ void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv)
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Locked bitmap", __FUNCTION__);
// Convert I420 straight into the Java bitmap.
const int conversionResult=ConvertI420ToRGB565( (unsigned char* )_bufferToRender.Buffer(), (unsigned char* ) pixels, _bitmapWidth, _bitmapHeight);
if(conversionResult<=0)
if(conversionResult<0)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion failed.", __FUNCTION__);
}
@ -448,8 +448,8 @@ void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv)
if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight)
{
const int conversionResult=ConvertI420ToRGB565Android((unsigned char* )_bufferToRender.Buffer(), _directBuffer, _bitmapWidth, _bitmapHeight);
if(conversionResult<=0)
{
if(conversionResult<0)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion failed.", __FUNCTION__);
_renderCritSect.Leave();
return;

View File

@ -17,7 +17,7 @@
#include "video_render_frames.h"
#include "tick_util.h"
#include "map_wrapper.h"
#include "vplib.h"
#include "common_video/libyuv/include/libyuv.h"
#include <cassert>

View File

@ -12,7 +12,7 @@
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
#include "video_render_defines.h"
#include "vplib.h"
#include "common_video/libyuv/include/libyuv.h"
#include <sys/shm.h>
#include <X11/Xlib.h>

View File

@ -19,7 +19,7 @@
#include "event_wrapper.h"
#include "trace.h"
#include "thread_wrapper.h"
#include "vplib.h"
#include "common_video/libyuv/include/libyuv.h"
namespace webrtc {
@ -236,8 +236,8 @@ int VideoChannelAGL::DeliverFrame(unsigned char* buffer, int bufferSize, unsigne
return -1;
}
int rgbLength = ConvertI420ToRGBAMac((WebRtc_UWord8*)buffer, (WebRtc_UWord8*)_buffer, (WebRtc_Word32)_width, (WebRtc_Word32)_height, 0);
if (rgbLength == -1)
int rgbret = ConvertI420ToRGBAMac((WebRtc_UWord8*)buffer, (WebRtc_UWord8*)_buffer, (WebRtc_Word32)_width, (WebRtc_Word32)_height, 0);
if (rgbret < 0)
{
_owner->UnlockAGLCntx();
return -1;

View File

@ -16,7 +16,7 @@
#include "event_wrapper.h"
#include "trace.h"
#include "thread_wrapper.h"
#include "vplib.h"
#include "common_video/libyuv/include/libyuv.h"
namespace webrtc {
@ -232,7 +232,7 @@ int VideoChannelNSOpenGL::DeliverFrame(unsigned char* buffer, int bufferSize, un
return -1;
}
int rgbLength = ConvertFromI420(kRGBAMac, buffer, _width, _height, _buffer);
int rgbLength = ConvertI420ToRGBAMac(buffer, _buffer, _width, _height, 0);
if (rgbLength == -1)
{
_owner->UnlockAGLCntx();

View File

@ -13,7 +13,7 @@
'type': '<(library)',
'dependencies': [
'webrtc_utility',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
],
'include_dirs': [
@ -144,7 +144,7 @@
'video_render_module',
'webrtc_utility',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
],
'sources': [
'../test/testAPI/testAPI.cpp',

View File

@ -19,7 +19,7 @@
#include "event_wrapper.h"
#include "trace.h"
#include "thread_wrapper.h"
#include "vplib.h"
#include "common_video/libyuv/include/libyuv.h"
namespace webrtc {

View File

@ -13,7 +13,7 @@
#include "typedefs.h"
#include "i_video_render_win.h"
#include "vplib.h"
#include "common_video/libyuv/include/libyuv.h"
#include "ddraw.h"
#include <Map>

View File

@ -14,7 +14,7 @@
'dependencies': [
# common_video
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
'<(webrtc_root)/common_video/common_video.gyp:webrtc_jpeg',
# ModulesShared

View File

@ -12,7 +12,7 @@
#include "video_render.h"
#include "video_render_defines.h"
#include "vie_render_manager.h"
#include "vplib.h"
#include "common_video/libyuv/include/libyuv.h"
namespace webrtc {