Switching WebRtc to LibYuv.
General Notes: 1. In general, API structure was not modified and is based on VPLIB. 2. Modification to API: Return values are based on libyuv, i.e. 0 if ok, a negative value in case of an error (instead of length). 3. All scaling (inteprolation) is now done via the scale interface. Crop/Pad is not being used. 4. VPLIB was completely removed. All tests are now part of the libyuv unit test (significantly more comprehensive and based on gtest). 5. JPEG is yet to be implemented in LibYuv and therefore existing implementation remains. Review URL: http://webrtc-codereview.appspot.com/258001 git-svn-id: http://webrtc.googlecode.com/svn/trunk@1140 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
ffa0a9e9c9
commit
2ab104e6be
@ -9,7 +9,7 @@
|
|||||||
{
|
{
|
||||||
'includes': [
|
'includes': [
|
||||||
'../common_settings.gypi', # Common settings
|
'../common_settings.gypi', # Common settings
|
||||||
'vplib/main/source/vplib.gypi',
|
'libyuv/libyuv.gypi',
|
||||||
'jpeg/main/source/jpeg.gypi',
|
'jpeg/main/source/jpeg.gypi',
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
@ -20,7 +20,6 @@
|
|||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
#include "vplib.h"
|
|
||||||
#include "jpeg.h"
|
#include "jpeg.h"
|
||||||
#include "data_manager.h"
|
#include "data_manager.h"
|
||||||
extern "C" {
|
extern "C" {
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
'target_name': 'webrtc_jpeg',
|
'target_name': 'webrtc_jpeg',
|
||||||
'type': '<(library)',
|
'type': '<(library)',
|
||||||
'dependencies': [
|
'dependencies': [
|
||||||
'webrtc_vplib',
|
'webrtc_libyuv',
|
||||||
],
|
],
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
'../../../interface',
|
'../../../interface',
|
||||||
@ -72,8 +72,7 @@
|
|||||||
],
|
],
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
'../interface',
|
'../interface',
|
||||||
'../../../vplib/main/interface',
|
'../source',
|
||||||
'../source',
|
|
||||||
],
|
],
|
||||||
'sources': [
|
'sources': [
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
#include <string.h> // memcpy
|
#include <string.h> // memcpy
|
||||||
|
|
||||||
#include "test_buffer.h"
|
#include "test_buffer.h"
|
||||||
#include "vplib.h"
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
|
|
||||||
TestBuffer::TestBuffer():
|
TestBuffer::TestBuffer():
|
||||||
_buffer(0),
|
_buffer(0),
|
||||||
|
@ -9,211 +9,49 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
|
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
|
||||||
|
|
||||||
#include "frame_scaler.h"
|
#include "frame_scaler.h"
|
||||||
|
|
||||||
|
#include "common_video/libyuv/include/scaler.h"
|
||||||
#include "trace.h"
|
#include "trace.h"
|
||||||
#include "vplib.h"
|
|
||||||
|
|
||||||
#ifndef NO_INTERPOLATOR
|
|
||||||
#include "InterpolatorInterface.h"
|
|
||||||
#endif
|
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
FrameScaler::FrameScaler()
|
FrameScaler::FrameScaler()
|
||||||
: _ptrVideoInterpolator(0),
|
: _scaler(new Scaler()),
|
||||||
|
_scalerBuffer(),
|
||||||
_outWidth(0),
|
_outWidth(0),
|
||||||
_outHeight(0),
|
_outHeight(0),
|
||||||
_inWidth(0),
|
_inWidth(0),
|
||||||
_inHeight(0)
|
_inHeight(0) {}
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
FrameScaler::~FrameScaler( )
|
FrameScaler::~FrameScaler( ) {}
|
||||||
{
|
|
||||||
#ifndef NO_INTERPOLATOR
|
|
||||||
if( _ptrVideoInterpolator != 0)
|
|
||||||
{
|
|
||||||
deleteInterpolator(_ptrVideoInterpolator);
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
WebRtc_Word32 FrameScaler::ResizeFrameIfNeeded(VideoFrame& videoFrame,
|
WebRtc_Word32 FrameScaler::ResizeFrameIfNeeded(VideoFrame& videoFrame,
|
||||||
WebRtc_UWord32 outWidth,
|
WebRtc_UWord32 outWidth,
|
||||||
WebRtc_UWord32 outHeight)
|
WebRtc_UWord32 outHeight) {
|
||||||
{
|
if ( videoFrame.Length( ) == 0) {
|
||||||
if( videoFrame.Length( ) == 0)
|
return -1;
|
||||||
{
|
}
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if((videoFrame.Width() != outWidth) || ( videoFrame.Height() != outHeight))
|
if ((videoFrame.Width() != outWidth) || (videoFrame.Height() != outHeight)) {
|
||||||
{
|
_scaler->Set(videoFrame.Width(), videoFrame.Height(),
|
||||||
// Scale down by factor 2-4.
|
outWidth, outHeight,
|
||||||
if(videoFrame.Width() % outWidth == 0 &&
|
kI420, kI420, kScaleBox);
|
||||||
videoFrame.Height() % outHeight == 0 &&
|
|
||||||
(videoFrame.Width() / outWidth) == (videoFrame.Height() / outHeight))
|
|
||||||
{
|
|
||||||
const WebRtc_Word32 multiple = videoFrame.Width() / outWidth;
|
|
||||||
WebRtc_UWord32 scaledWidth;
|
|
||||||
WebRtc_UWord32 scaledHeight;
|
|
||||||
switch(multiple)
|
|
||||||
{
|
|
||||||
case 2:
|
|
||||||
ScaleI420FrameQuarter(videoFrame.Width(), videoFrame.Height(),
|
|
||||||
videoFrame.Buffer());
|
|
||||||
|
|
||||||
videoFrame.SetLength(outWidth * outHeight * 3 / 2);
|
int reqSize = CalcBufferSize(kI420, _outWidth, _outHeight);
|
||||||
videoFrame.SetWidth( outWidth);
|
_scalerBuffer.VerifyAndAllocate(reqSize);
|
||||||
videoFrame.SetHeight(outHeight);
|
int ret = _scaler->Scale(videoFrame.Buffer(),
|
||||||
return 0;
|
_scalerBuffer.Buffer(),
|
||||||
case 3:
|
reqSize);
|
||||||
ScaleI420Down1_3(videoFrame.Width(), videoFrame.Height(),
|
if (ret < 0)
|
||||||
videoFrame.Buffer(), videoFrame.Size(),
|
return ret;
|
||||||
scaledWidth, scaledHeight);
|
videoFrame.VerifyAndAllocate(reqSize);
|
||||||
videoFrame.SetLength((outWidth * outHeight * 3) / 2);
|
videoFrame.CopyFrame(videoFrame.Length(), _scalerBuffer.Buffer());
|
||||||
videoFrame.SetWidth(outWidth);
|
videoFrame.SetWidth(_outWidth);
|
||||||
videoFrame.SetHeight(outHeight);
|
videoFrame.SetHeight(_outHeight);
|
||||||
return 0;
|
}
|
||||||
case 4:
|
return 0;
|
||||||
ScaleI420FrameQuarter(videoFrame.Width(), videoFrame.Height(),
|
|
||||||
videoFrame.Buffer());
|
|
||||||
|
|
||||||
ScaleI420FrameQuarter(videoFrame.Width() >> 1,
|
|
||||||
videoFrame.Height() >> 1,
|
|
||||||
videoFrame.Buffer());
|
|
||||||
|
|
||||||
videoFrame.SetLength((outWidth * outHeight * 3)/ 2);
|
|
||||||
videoFrame.SetWidth(outWidth);
|
|
||||||
videoFrame.SetHeight(outHeight);
|
|
||||||
return 0;
|
|
||||||
default:
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Scale up by factor 2-4.
|
|
||||||
if(outWidth % videoFrame.Width() == 0 &&
|
|
||||||
outHeight % videoFrame.Height() == 0 &&
|
|
||||||
(outWidth / videoFrame.Width()) == (outHeight / videoFrame.Height()))
|
|
||||||
{
|
|
||||||
const WebRtc_Word32 multiple = outWidth / videoFrame.Width();
|
|
||||||
WebRtc_UWord32 scaledWidth = 0;
|
|
||||||
WebRtc_UWord32 scaledHeight = 0;
|
|
||||||
switch(multiple)
|
|
||||||
{
|
|
||||||
case 2:
|
|
||||||
videoFrame.VerifyAndAllocate((outHeight * outWidth * 3) / 2);
|
|
||||||
ScaleI420Up2(videoFrame.Width(), videoFrame.Height(),
|
|
||||||
videoFrame.Buffer(), videoFrame.Size(),
|
|
||||||
scaledWidth, scaledHeight);
|
|
||||||
videoFrame.SetLength((outWidth * outHeight * 3) / 2);
|
|
||||||
videoFrame.SetWidth(outWidth);
|
|
||||||
videoFrame.SetHeight(outHeight);
|
|
||||||
return 0;
|
|
||||||
case 3:
|
|
||||||
videoFrame.VerifyAndAllocate((outWidth * outHeight * 3) / 2);
|
|
||||||
ScaleI420Up2(videoFrame.Width(), videoFrame.Height(),
|
|
||||||
videoFrame.Buffer(), videoFrame.Size(),
|
|
||||||
scaledWidth, scaledHeight);
|
|
||||||
|
|
||||||
ScaleI420Up3_2(scaledWidth, scaledHeight, videoFrame.Buffer(),
|
|
||||||
videoFrame.Size(), scaledWidth, scaledHeight);
|
|
||||||
videoFrame.SetLength((outWidth * outHeight * 3) / 2);
|
|
||||||
videoFrame.SetWidth(outWidth);
|
|
||||||
videoFrame.SetHeight(outHeight);
|
|
||||||
return 0;
|
|
||||||
case 4:
|
|
||||||
videoFrame.VerifyAndAllocate((outWidth * outHeight * 3) / 2);
|
|
||||||
ScaleI420Up2(videoFrame.Width(), videoFrame.Height(),
|
|
||||||
videoFrame.Buffer(), videoFrame.Size(),
|
|
||||||
scaledWidth, scaledHeight);
|
|
||||||
ScaleI420Up2(scaledWidth, scaledHeight, videoFrame.Buffer(),
|
|
||||||
videoFrame.Size(), scaledWidth, scaledHeight);
|
|
||||||
videoFrame.SetLength((outWidth * outHeight * 3) / 2);
|
|
||||||
videoFrame.SetWidth(outWidth);
|
|
||||||
videoFrame.SetHeight(outHeight);
|
|
||||||
return 0;
|
|
||||||
default:
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Use interpolator
|
|
||||||
#ifdef NO_INTERPOLATOR
|
|
||||||
assert(!"Interpolation not available");
|
|
||||||
#else
|
|
||||||
// Create new interpolator if the scaling changed.
|
|
||||||
if((_outWidth != outWidth) || (_outHeight != outHeight) ||
|
|
||||||
(_inWidth != videoFrame.Width()) ||
|
|
||||||
(_inHeight != videoFrame.Height()))
|
|
||||||
{
|
|
||||||
if(_ptrVideoInterpolator != 0)
|
|
||||||
{
|
|
||||||
deleteInterpolator(_ptrVideoInterpolator);
|
|
||||||
_ptrVideoInterpolator = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
_outWidth = outWidth;
|
|
||||||
_outHeight = outHeight;
|
|
||||||
_inWidth = videoFrame.Width();
|
|
||||||
_inHeight = videoFrame.Height();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
if (!_ptrVideoInterpolator)
|
|
||||||
{
|
|
||||||
InterpolatorType interpolator = BiCubicBSpline;
|
|
||||||
|
|
||||||
if((_inWidth > ( _outWidth * 2)) ||
|
|
||||||
(_inWidth < ( _outWidth / 2)) ||
|
|
||||||
(_inHeight > ( _outHeight * 2)) ||
|
|
||||||
(_inHeight < ( _outHeight / 2)))
|
|
||||||
|
|
||||||
{
|
|
||||||
interpolator = BiCubicSine;
|
|
||||||
}
|
|
||||||
|
|
||||||
VideoFrameFormat inputFormat;
|
|
||||||
VideoFrameFormat outputFormat;
|
|
||||||
|
|
||||||
inputFormat.videoType = YUV420P;
|
|
||||||
inputFormat.xChannels = static_cast<short>(_inWidth);
|
|
||||||
inputFormat.yChannels = static_cast<short>(_inHeight);
|
|
||||||
|
|
||||||
outputFormat.videoType = YUV420P;
|
|
||||||
outputFormat.xChannels = static_cast<short>(_outWidth);
|
|
||||||
outputFormat.yChannels = static_cast<short>(_outHeight);
|
|
||||||
|
|
||||||
_interpolatorBuffer.VerifyAndAllocate(_outWidth * _outHeight *
|
|
||||||
3 / 2);
|
|
||||||
|
|
||||||
_ptrVideoInterpolator = createInterpolator(
|
|
||||||
interpolator,
|
|
||||||
&inputFormat,
|
|
||||||
&outputFormat);
|
|
||||||
if (_ptrVideoInterpolator == NULL)
|
|
||||||
{
|
|
||||||
WEBRTC_TRACE(
|
|
||||||
kTraceError,
|
|
||||||
kTraceVideo,
|
|
||||||
-1,
|
|
||||||
"FrameScaler::ResizeFrame(): Could not create\
|
|
||||||
interpolator");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
interpolateFrame(_ptrVideoInterpolator, videoFrame.Buffer(),
|
|
||||||
_interpolatorBuffer.Buffer());
|
|
||||||
|
|
||||||
videoFrame.VerifyAndAllocate(_interpolatorBuffer.Size());
|
|
||||||
videoFrame.SetLength(_outWidth * _outHeight * 3 / 2);
|
|
||||||
videoFrame.CopyFrame(videoFrame.Length(), _interpolatorBuffer.Buffer());
|
|
||||||
videoFrame.SetWidth(_outWidth);
|
|
||||||
videoFrame.SetHeight(_outHeight);
|
|
||||||
#endif // NO_INTERPOLATOR
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
}
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
|
||||||
#endif // WEBRTC_MODULE_UTILITY_VIDEO
|
#endif // WEBRTC_MODULE_UTILITY_VIDEO
|
||||||
|
@ -16,18 +16,12 @@
|
|||||||
|
|
||||||
#include "engine_configurations.h"
|
#include "engine_configurations.h"
|
||||||
#include "module_common_types.h"
|
#include "module_common_types.h"
|
||||||
|
#include "system_wrappers/interface/scoped_ptr.h"
|
||||||
#include "typedefs.h"
|
#include "typedefs.h"
|
||||||
|
|
||||||
namespace webrtc
|
namespace webrtc
|
||||||
{
|
{
|
||||||
// TODO (perkj): add interpolator. Current implementation only support scaling
|
class Scaler;
|
||||||
// (up or down) where the width and height are scaled by a constant factor 2-4.
|
|
||||||
// Also remove NO_INTERPOLATOR.
|
|
||||||
|
|
||||||
// Disable usage of the old intertpolator implementation.
|
|
||||||
#define NO_INTERPOLATOR 1
|
|
||||||
|
|
||||||
|
|
||||||
class VideoFrame;
|
class VideoFrame;
|
||||||
class FrameScaler
|
class FrameScaler
|
||||||
{
|
{
|
||||||
@ -41,10 +35,8 @@ public:
|
|||||||
WebRtc_UWord32 outWidth,
|
WebRtc_UWord32 outWidth,
|
||||||
WebRtc_UWord32 outHeight);
|
WebRtc_UWord32 outHeight);
|
||||||
private:
|
private:
|
||||||
typedef WebRtc_Word8* VideoInterpolator;
|
scoped_ptr<Scaler> _scaler;
|
||||||
VideoInterpolator*_ptrVideoInterpolator;
|
VideoFrame _scalerBuffer;
|
||||||
|
|
||||||
VideoFrame _interpolatorBuffer;
|
|
||||||
WebRtc_UWord32 _outWidth;
|
WebRtc_UWord32 _outWidth;
|
||||||
WebRtc_UWord32 _outHeight;
|
WebRtc_UWord32 _outHeight;
|
||||||
WebRtc_UWord32 _inWidth;
|
WebRtc_UWord32 _inWidth;
|
||||||
|
@ -55,7 +55,6 @@
|
|||||||
],
|
],
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
'../../video_coding/main/interface',
|
'../../video_coding/main/interface',
|
||||||
'../../../common_video/vplib/main/interface',
|
|
||||||
],
|
],
|
||||||
'sources': [
|
'sources': [
|
||||||
'frame_scaler.cc',
|
'frame_scaler.cc',
|
||||||
|
@ -13,19 +13,19 @@
|
|||||||
'type': '<(library)',
|
'type': '<(library)',
|
||||||
'dependencies': [
|
'dependencies': [
|
||||||
'webrtc_utility',
|
'webrtc_utility',
|
||||||
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
|
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
|
||||||
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||||
],
|
],
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
'../interface',
|
'../interface',
|
||||||
'../../../interface',
|
'../../../interface',
|
||||||
'../../../../common_video/vplib/main/interface',
|
'<(webrtc_root)/common_video/libyuv/include',
|
||||||
],
|
],
|
||||||
'direct_dependent_settings': {
|
'direct_dependent_settings': {
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
'../interface',
|
'../interface',
|
||||||
'../../../interface',
|
'../../../interface',
|
||||||
'../../../../common_video/vplib/main/interface',
|
'<(webrtc_root)/common_video/libyuv/include',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
'sources': [
|
'sources': [
|
||||||
|
@ -304,9 +304,9 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(WebRtc_UWord8* videoFrame,
|
|||||||
|
|
||||||
if (frameInfo.codecType == kVideoCodecUnknown) // None encoded. Convert to I420.
|
if (frameInfo.codecType == kVideoCodecUnknown) // None encoded. Convert to I420.
|
||||||
{
|
{
|
||||||
const VideoType vpLibType = videocapturemodule::
|
const VideoType commonVideoType = videocapturemodule::
|
||||||
RawVideoTypeToVplibVideoType(frameInfo.rawType);
|
RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
|
||||||
int size = CalcBufferSize(vpLibType, width, height);
|
int size = CalcBufferSize(commonVideoType, width, height);
|
||||||
if (size != videoFrameLength)
|
if (size != videoFrameLength)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
||||||
@ -315,7 +315,8 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(WebRtc_UWord8* videoFrame,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Allocate I420 buffer
|
// Allocate I420 buffer
|
||||||
_captureFrame.VerifyAndAllocate(CalcBufferSize(kI420, width, height));
|
int requiredLength = CalcBufferSize(kI420, width, height);
|
||||||
|
_captureFrame.VerifyAndAllocate(requiredLength);
|
||||||
if (!_captureFrame.Buffer())
|
if (!_captureFrame.Buffer())
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
||||||
@ -324,19 +325,19 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(WebRtc_UWord8* videoFrame,
|
|||||||
}
|
}
|
||||||
|
|
||||||
memset(_captureFrame.Buffer(), 0, _captureFrame.Size());
|
memset(_captureFrame.Buffer(), 0, _captureFrame.Size());
|
||||||
const WebRtc_Word32 conversionResult = ConvertToI420(vpLibType, videoFrame,
|
const WebRtc_Word32 conversionResult = ConvertToI420(commonVideoType, videoFrame,
|
||||||
width, height,
|
width, height,
|
||||||
_captureFrame.Buffer(),
|
_captureFrame.Buffer(),
|
||||||
_requestedCapability.interlaced,
|
_requestedCapability.interlaced,
|
||||||
_rotateFrame);
|
_rotateFrame);
|
||||||
if (conversionResult <= 0)
|
if (conversionResult < 0)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
||||||
"Failed to convert capture frame from type %d to I420",
|
"Failed to convert capture frame from type %d to I420",
|
||||||
frameInfo.rawType);
|
frameInfo.rawType);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
_captureFrame.SetLength(conversionResult);
|
_captureFrame.SetLength(requiredLength);
|
||||||
}
|
}
|
||||||
else // Encoded format
|
else // Encoded format
|
||||||
{
|
{
|
||||||
@ -426,13 +427,13 @@ WebRtc_Word32 VideoCaptureImpl::SetCaptureRotation(VideoCaptureRotation rotation
|
|||||||
_rotateFrame = kRotateNone;
|
_rotateFrame = kRotateNone;
|
||||||
break;
|
break;
|
||||||
case kCameraRotate90:
|
case kCameraRotate90:
|
||||||
_rotateFrame = kRotateClockwise;
|
_rotateFrame = kRotate90;
|
||||||
break;
|
break;
|
||||||
case kCameraRotate180:
|
case kCameraRotate180:
|
||||||
_rotateFrame = kRotate180;
|
_rotateFrame = kRotate180;
|
||||||
break;
|
break;
|
||||||
case kCameraRotate270:
|
case kCameraRotate270:
|
||||||
_rotateFrame = kRotateAntiClockwise;
|
_rotateFrame = kRotate270;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
#include "video_capture.h"
|
#include "video_capture.h"
|
||||||
#include "video_capture_config.h"
|
#include "video_capture_config.h"
|
||||||
#include "tick_util.h"
|
#include "tick_util.h"
|
||||||
#include "vplib.h"
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
|
|
||||||
namespace webrtc
|
namespace webrtc
|
||||||
{
|
{
|
||||||
|
@ -14,7 +14,7 @@ namespace webrtc
|
|||||||
{
|
{
|
||||||
namespace videocapturemodule
|
namespace videocapturemodule
|
||||||
{
|
{
|
||||||
VideoType RawVideoTypeToVplibVideoType(RawVideoType type)
|
VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type)
|
||||||
{
|
{
|
||||||
switch (type)
|
switch (type)
|
||||||
{
|
{
|
||||||
@ -43,7 +43,7 @@ VideoType RawVideoTypeToVplibVideoType(RawVideoType type)
|
|||||||
case kVideoNV12:
|
case kVideoNV12:
|
||||||
return kNV12;
|
return kNV12;
|
||||||
default:
|
default:
|
||||||
assert(!"RawVideoTypeToVplibVideoType unknown type");
|
assert(!"RawVideoTypeToCommonVideoVideoType unknown type");
|
||||||
}
|
}
|
||||||
return kUnknown;
|
return kUnknown;
|
||||||
}
|
}
|
||||||
|
@ -12,13 +12,14 @@
|
|||||||
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VPLIB_CONVERSIONS_H_
|
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VPLIB_CONVERSIONS_H_
|
||||||
|
|
||||||
#include "video_capture.h"
|
#include "video_capture.h"
|
||||||
#include "vplib.h"
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
|
|
||||||
|
// TODO (mikhal): Update file name
|
||||||
namespace webrtc
|
namespace webrtc
|
||||||
{
|
{
|
||||||
namespace videocapturemodule
|
namespace videocapturemodule
|
||||||
{
|
{
|
||||||
VideoType RawVideoTypeToVplibVideoType(RawVideoType type);
|
VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type);
|
||||||
} // namespace videocapturemodule
|
} // namespace videocapturemodule
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VPLIB_CONVERSIONS_H_
|
#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VPLIB_CONVERSIONS_H_
|
||||||
|
@ -51,4 +51,4 @@
|
|||||||
# tab-width:2
|
# tab-width:2
|
||||||
# indent-tabs-mode:nil
|
# indent-tabs-mode:nil
|
||||||
# End:
|
# End:
|
||||||
# vim: set expandtab tabstop=2 shiftwidth=2:
|
# vim: set expandtab tabstop=2 shiftwidth=2:
|
@ -21,9 +21,9 @@
|
|||||||
|
|
||||||
#include "event_wrapper.h"
|
#include "event_wrapper.h"
|
||||||
#include "testsupport/fileutils.h"
|
#include "testsupport/fileutils.h"
|
||||||
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
#include "video_codec_interface.h"
|
#include "video_codec_interface.h"
|
||||||
#include "video_source.h"
|
#include "video_source.h"
|
||||||
#include "vplib.h"
|
|
||||||
|
|
||||||
|
|
||||||
#define SSIM_CALC 0 // by default, don't compute SSIM
|
#define SSIM_CALC 0 // by default, don't compute SSIM
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
|
|
||||||
#include "test.h"
|
#include "test.h"
|
||||||
#include "video_source.h"
|
#include "video_source.h"
|
||||||
#include "vplib.h"
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
#include "event_wrapper.h"
|
#include "event_wrapper.h"
|
||||||
#include "thread_wrapper.h"
|
#include "thread_wrapper.h"
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
'<(webrtc_root)/../test/test.gyp:test_support',
|
'<(webrtc_root)/../test/test.gyp:test_support',
|
||||||
'<(webrtc_root)/../testing/gtest.gyp:gtest',
|
'<(webrtc_root)/../testing/gtest.gyp:gtest',
|
||||||
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||||
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
|
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
|
||||||
],
|
],
|
||||||
|
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
|
@ -14,7 +14,6 @@
|
|||||||
#include <cassert>
|
#include <cassert>
|
||||||
|
|
||||||
#include "testsupport/fileutils.h"
|
#include "testsupport/fileutils.h"
|
||||||
#include "vplib.h"
|
|
||||||
|
|
||||||
VideoSource::VideoSource()
|
VideoSource::VideoSource()
|
||||||
:
|
:
|
||||||
|
@ -12,8 +12,7 @@
|
|||||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_SOURCE_H_
|
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_SOURCE_H_
|
||||||
|
|
||||||
#include <string>
|
#include <string>
|
||||||
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
#include "vplib.h"
|
|
||||||
|
|
||||||
enum VideoSize
|
enum VideoSize
|
||||||
{
|
{
|
||||||
|
@ -32,4 +32,4 @@
|
|||||||
# tab-width:2
|
# tab-width:2
|
||||||
# indent-tabs-mode:nil
|
# indent-tabs-mode:nil
|
||||||
# End:
|
# End:
|
||||||
# vim: set expandtab tabstop=2 shiftwidth=2:
|
# vim: set expandtab tabstop=2 shiftwidth=2:
|
@ -20,7 +20,8 @@
|
|||||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_H_
|
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_H_
|
||||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_H_
|
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_H_
|
||||||
|
|
||||||
#include "interpolator.h"
|
#include "common_video/libyuv/include/scaler.h"
|
||||||
|
|
||||||
#include "video_codec_interface.h"
|
#include "video_codec_interface.h"
|
||||||
#include "vp8.h"
|
#include "vp8.h"
|
||||||
|
|
||||||
@ -138,7 +139,7 @@ private:
|
|||||||
VP8Encoder* encoder_[kMaxSimulcastStreams];
|
VP8Encoder* encoder_[kMaxSimulcastStreams];
|
||||||
bool encode_stream_[kMaxSimulcastStreams];
|
bool encode_stream_[kMaxSimulcastStreams];
|
||||||
VideoFrameType frame_type_[kMaxSimulcastStreams];
|
VideoFrameType frame_type_[kMaxSimulcastStreams];
|
||||||
interpolator* interpolator_[kMaxSimulcastStreams];
|
Scaler* scaler_[kMaxSimulcastStreams];
|
||||||
RawImage video_frame_[kMaxSimulcastStreams];
|
RawImage video_frame_[kMaxSimulcastStreams];
|
||||||
VideoCodec video_codec_;
|
VideoCodec video_codec_;
|
||||||
};// end of VP8SimulcastEncoder class
|
};// end of VP8SimulcastEncoder class
|
||||||
|
@ -13,11 +13,11 @@
|
|||||||
'type': '<(library)',
|
'type': '<(library)',
|
||||||
'dependencies': [
|
'dependencies': [
|
||||||
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||||
|
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
|
||||||
],
|
],
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
'../interface',
|
'../interface',
|
||||||
'<(webrtc_root)/common_video/interface',
|
'<(webrtc_root)/common_video/interface',
|
||||||
'<(webrtc_root)/common_video/vplib/main/interface',
|
|
||||||
'<(webrtc_root)/modules/video_coding/codecs/interface',
|
'<(webrtc_root)/modules/video_coding/codecs/interface',
|
||||||
'<(webrtc_root)/modules/interface',
|
'<(webrtc_root)/modules/interface',
|
||||||
],
|
],
|
||||||
@ -69,7 +69,7 @@
|
|||||||
'dependencies': [
|
'dependencies': [
|
||||||
'test_framework',
|
'test_framework',
|
||||||
'webrtc_vp8',
|
'webrtc_vp8',
|
||||||
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
|
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
|
||||||
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||||
'<(webrtc_root)/../test/test.gyp:test_support',
|
'<(webrtc_root)/../test/test.gyp:test_support',
|
||||||
'<(webrtc_root)/../testing/gtest.gyp:gtest',
|
'<(webrtc_root)/../testing/gtest.gyp:gtest',
|
||||||
|
@ -27,14 +27,14 @@ VP8SimulcastEncoder::VP8SimulcastEncoder() {
|
|||||||
encoder_[i] = NULL;
|
encoder_[i] = NULL;
|
||||||
encode_stream_[i] = false;
|
encode_stream_[i] = false;
|
||||||
frame_type_[i] = kKeyFrame;
|
frame_type_[i] = kKeyFrame;
|
||||||
interpolator_[i] = NULL;
|
scaler_[i] = NULL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
VP8SimulcastEncoder::~VP8SimulcastEncoder() {
|
VP8SimulcastEncoder::~VP8SimulcastEncoder() {
|
||||||
for (int i = 0; i < kMaxSimulcastStreams; i++) {
|
for (int i = 0; i < kMaxSimulcastStreams; i++) {
|
||||||
delete encoder_[i];
|
delete encoder_[i];
|
||||||
delete interpolator_[i];
|
delete scaler_[i];
|
||||||
delete [] video_frame_[i]._buffer;
|
delete [] video_frame_[i]._buffer;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -43,8 +43,8 @@ WebRtc_Word32 VP8SimulcastEncoder::Release() {
|
|||||||
for (int i = 0; i < kMaxSimulcastStreams; i++) {
|
for (int i = 0; i < kMaxSimulcastStreams; i++) {
|
||||||
delete encoder_[i];
|
delete encoder_[i];
|
||||||
encoder_[i] = NULL;
|
encoder_[i] = NULL;
|
||||||
delete interpolator_[i];
|
delete scaler_[i];
|
||||||
interpolator_[i] = NULL;
|
scaler_[i] = NULL;
|
||||||
delete [] video_frame_[i]._buffer;
|
delete [] video_frame_[i]._buffer;
|
||||||
video_frame_[i]._buffer = NULL;
|
video_frame_[i]._buffer = NULL;
|
||||||
video_frame_[i]._size = 0;
|
video_frame_[i]._size = 0;
|
||||||
@ -121,17 +121,12 @@ WebRtc_Word32 VP8SimulcastEncoder::InitEncode(const VideoCodec* codecSettings,
|
|||||||
}
|
}
|
||||||
if (codecSettings->width != video_codec.width ||
|
if (codecSettings->width != video_codec.width ||
|
||||||
codecSettings->height != video_codec.height) {
|
codecSettings->height != video_codec.height) {
|
||||||
if (interpolator_[i] == NULL) {
|
if (scaler_[i] == NULL) {
|
||||||
interpolator_[i] = new interpolator();
|
scaler_[i] = new Scaler();
|
||||||
}
|
}
|
||||||
interpolator_[i]->Set(
|
scaler_[i]->Set(codecSettings->width, codecSettings->height,
|
||||||
codecSettings->width,
|
video_codec.width, video_codec.height,
|
||||||
codecSettings->height,
|
kI420, kI420, kScaleBox);
|
||||||
video_codec.width,
|
|
||||||
video_codec.height,
|
|
||||||
kI420,
|
|
||||||
kI420,
|
|
||||||
kBilinear);
|
|
||||||
|
|
||||||
if (video_frame_[i]._size <
|
if (video_frame_[i]._size <
|
||||||
(3u * video_codec.width * video_codec.height / 2u)) {
|
(3u * video_codec.width * video_codec.height / 2u)) {
|
||||||
@ -183,18 +178,15 @@ WebRtc_Word32 VP8SimulcastEncoder::Encode(
|
|||||||
|
|
||||||
for (int i = 0; i < numberOfStreams; i++) {
|
for (int i = 0; i < numberOfStreams; i++) {
|
||||||
if (encoder_[i] && encode_stream_[i]) {
|
if (encoder_[i] && encode_stream_[i]) {
|
||||||
// need the simulcastIdx to keep track of which encoder encoded the frame
|
// Need the simulcastIdx to keep track of which encoder encoded the frame.
|
||||||
info.codecSpecific.VP8.simulcastIdx = i;
|
info.codecSpecific.VP8.simulcastIdx = i;
|
||||||
VideoFrameType requested_frame_type = frame_type_[i];
|
VideoFrameType requested_frame_type = frame_type_[i];
|
||||||
if (interpolator_[i]) {
|
if (scaler_[i]) {
|
||||||
interpolator_[i]->Interpolate(inputImage._buffer,
|
int video_frame_size = static_cast<int>(video_frame_[i]._size);
|
||||||
video_frame_[i]._buffer,
|
scaler_[i]->Scale(inputImage._buffer,
|
||||||
video_frame_[i]._size);
|
video_frame_[i]._buffer,
|
||||||
video_frame_[i]._length =
|
video_frame_size);
|
||||||
3 *
|
video_frame_[i]._length = video_frame_[i]._size = video_frame_size;
|
||||||
video_codec_.simulcastStream[i].width *
|
|
||||||
video_codec_.simulcastStream[i].height /
|
|
||||||
2;
|
|
||||||
ret_val = encoder_[i]->Encode(video_frame_[i],
|
ret_val = encoder_[i]->Encode(video_frame_[i],
|
||||||
&info,
|
&info,
|
||||||
&requested_frame_type);
|
&requested_frame_type);
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
'dependencies': [
|
'dependencies': [
|
||||||
'webrtc_i420',
|
'webrtc_i420',
|
||||||
'webrtc_vp8',
|
'webrtc_vp8',
|
||||||
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
|
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
|
||||||
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||||
],
|
],
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
'rtp_rtcp',
|
'rtp_rtcp',
|
||||||
'webrtc_utility',
|
'webrtc_utility',
|
||||||
'video_processing',
|
'video_processing',
|
||||||
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
|
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
|
||||||
],
|
],
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
'../../../interface',
|
'../../../interface',
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
#include "test_callbacks.h"
|
#include "test_callbacks.h"
|
||||||
#include "test_macros.h"
|
#include "test_macros.h"
|
||||||
#include "testsupport/metrics/video_metrics.h"
|
#include "testsupport/metrics/video_metrics.h"
|
||||||
#include "vplib.h"
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
|
|
||||||
using namespace webrtc;
|
using namespace webrtc;
|
||||||
|
|
||||||
|
@ -13,7 +13,6 @@
|
|||||||
#include <cassert>
|
#include <cassert>
|
||||||
|
|
||||||
#include "testsupport/fileutils.h"
|
#include "testsupport/fileutils.h"
|
||||||
#include "vplib.h"
|
|
||||||
|
|
||||||
VideoSource::VideoSource()
|
VideoSource::VideoSource()
|
||||||
:
|
:
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
|
#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
|
||||||
#define WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
|
#define WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
|
||||||
|
|
||||||
#include "vplib.h"
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
#include "typedefs.h"
|
#include "typedefs.h"
|
||||||
|
|
||||||
#include <string>
|
#include <string>
|
||||||
|
@ -25,14 +25,17 @@ namespace webrtc {
|
|||||||
#define VPM_GENERAL_ERROR -1
|
#define VPM_GENERAL_ERROR -1
|
||||||
#define VPM_MEMORY -2
|
#define VPM_MEMORY -2
|
||||||
#define VPM_PARAMETER_ERROR -3
|
#define VPM_PARAMETER_ERROR -3
|
||||||
#define VPM_UNINITIALIZED -4
|
#define VPM_SCALE_ERROR -4
|
||||||
#define VPM_UNIMPLEMENTED -5
|
#define VPM_UNINITIALIZED -5
|
||||||
|
#define VPM_UNIMPLEMENTED -6
|
||||||
|
|
||||||
enum VideoFrameResampling
|
enum VideoFrameResampling
|
||||||
{
|
{
|
||||||
|
// TODO: Do we still need crop/pad?
|
||||||
kNoRescaling, // disables rescaling
|
kNoRescaling, // disables rescaling
|
||||||
kFastRescaling, // fast up/down scaling; crop/pad when needed.
|
kFastRescaling, // point
|
||||||
kBiLinear, // bi-linear interpolation
|
kBiLinear, // bi-linear interpolation
|
||||||
|
kBox, // Box inteprolation
|
||||||
};
|
};
|
||||||
|
|
||||||
} //namespace
|
} //namespace
|
||||||
|
@ -18,369 +18,106 @@ VPMSimpleSpatialResampler::VPMSimpleSpatialResampler()
|
|||||||
_resamplingMode(kFastRescaling),
|
_resamplingMode(kFastRescaling),
|
||||||
_targetWidth(0),
|
_targetWidth(0),
|
||||||
_targetHeight(0),
|
_targetHeight(0),
|
||||||
_interpolatorPtr(NULL)
|
_scaler()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
VPMSimpleSpatialResampler::~VPMSimpleSpatialResampler()
|
VPMSimpleSpatialResampler::~VPMSimpleSpatialResampler()
|
||||||
{
|
{
|
||||||
Release();
|
//
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32
|
|
||||||
VPMSimpleSpatialResampler::Release()
|
|
||||||
{
|
|
||||||
if (_interpolatorPtr != NULL)
|
|
||||||
{
|
|
||||||
delete _interpolatorPtr;
|
|
||||||
_interpolatorPtr = NULL;
|
|
||||||
}
|
|
||||||
return VPM_OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
WebRtc_Word32
|
WebRtc_Word32
|
||||||
VPMSimpleSpatialResampler::SetTargetFrameSize(WebRtc_UWord32 width,
|
VPMSimpleSpatialResampler::SetTargetFrameSize(WebRtc_Word32 width,
|
||||||
WebRtc_UWord32 height)
|
WebRtc_Word32 height)
|
||||||
{
|
{
|
||||||
if (_resamplingMode == kNoRescaling)
|
if (_resamplingMode == kNoRescaling) {
|
||||||
{
|
|
||||||
return VPM_OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (width < 1 || height < 1)
|
|
||||||
{
|
|
||||||
return VPM_PARAMETER_ERROR;
|
|
||||||
}
|
|
||||||
|
|
||||||
_targetWidth = width;
|
|
||||||
_targetHeight = height;
|
|
||||||
|
|
||||||
return VPM_OK;
|
return VPM_OK;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (width < 1 || height < 1) {
|
||||||
|
return VPM_PARAMETER_ERROR;
|
||||||
|
}
|
||||||
|
|
||||||
|
_targetWidth = width;
|
||||||
|
_targetHeight = height;
|
||||||
|
|
||||||
|
return VPM_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
VPMSimpleSpatialResampler::SetInputFrameResampleMode(VideoFrameResampling
|
VPMSimpleSpatialResampler::SetInputFrameResampleMode(VideoFrameResampling
|
||||||
resamplingMode)
|
resamplingMode)
|
||||||
{
|
{
|
||||||
_resamplingMode = resamplingMode;
|
_resamplingMode = resamplingMode;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
VPMSimpleSpatialResampler::Reset()
|
VPMSimpleSpatialResampler::Reset()
|
||||||
{
|
{
|
||||||
_resamplingMode = kFastRescaling;
|
_resamplingMode = kFastRescaling;
|
||||||
_targetWidth = 0;
|
_targetWidth = 0;
|
||||||
_targetHeight = 0;
|
_targetHeight = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32
|
WebRtc_Word32
|
||||||
VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame,
|
VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame,
|
||||||
VideoFrame& outFrame)
|
VideoFrame& outFrame)
|
||||||
{
|
{
|
||||||
WebRtc_Word32 ret;
|
if (_resamplingMode == kNoRescaling)
|
||||||
|
return outFrame.CopyFrame(inFrame);
|
||||||
|
// Check if re-sampling is needed
|
||||||
|
if ((inFrame.Width() == (WebRtc_UWord32)_targetWidth) &&
|
||||||
|
(inFrame.Height() == (WebRtc_UWord32)_targetHeight)) {
|
||||||
|
return outFrame.CopyFrame(inFrame);
|
||||||
|
}
|
||||||
|
|
||||||
if (_resamplingMode == kNoRescaling)
|
// Setting scaler
|
||||||
{
|
//TODO: Modify scaler types
|
||||||
return outFrame.CopyFrame(inFrame);
|
int retVal = 0;
|
||||||
}
|
retVal = _scaler.Set(inFrame.Width(), inFrame.Height(),
|
||||||
else if (_targetWidth < 1 || _targetHeight < 1)
|
_targetWidth, _targetHeight, kI420, kI420, kScaleBox);
|
||||||
{
|
if (retVal < 0)
|
||||||
return VPM_PARAMETER_ERROR;
|
return retVal;
|
||||||
}
|
|
||||||
|
|
||||||
// Check if re-sampling is needed
|
|
||||||
if ((inFrame.Width() == _targetWidth) &&
|
|
||||||
(inFrame.Height() == _targetHeight))
|
|
||||||
{
|
|
||||||
return outFrame.CopyFrame(inFrame);
|
|
||||||
}
|
|
||||||
if (_resamplingMode == kBiLinear)
|
|
||||||
{
|
|
||||||
return BiLinearInterpolation(inFrame, outFrame);
|
|
||||||
}
|
|
||||||
|
|
||||||
outFrame.SetTimeStamp(inFrame.TimeStamp());
|
// Disabling cut/pad for now - only scaling.
|
||||||
|
int requiredSize = (WebRtc_UWord32)(_targetWidth * _targetHeight * 3 >> 1);
|
||||||
if (_targetWidth > inFrame.Width() &&
|
outFrame.VerifyAndAllocate(requiredSize);
|
||||||
( ExactMultiplier(inFrame.Width(), inFrame.Height())))
|
outFrame.SetTimeStamp(inFrame.TimeStamp());
|
||||||
{
|
outFrame.SetWidth(_targetWidth);
|
||||||
// The codec might want to pad this later... adding 8 pixels
|
outFrame.SetHeight(_targetHeight);
|
||||||
const WebRtc_UWord32 requiredSize = (_targetWidth + 8) *
|
|
||||||
(_targetHeight + 8) * 3 / 2;
|
|
||||||
outFrame.VerifyAndAllocate(requiredSize);
|
|
||||||
return UpsampleFrame(inFrame, outFrame);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// 1 cut/pad
|
|
||||||
// 2 scale factor 2X (in both cases if required)
|
|
||||||
WebRtc_UWord32 croppedWidth = inFrame.Width();
|
|
||||||
WebRtc_UWord32 croppedHeight = inFrame.Height();
|
|
||||||
|
|
||||||
//Calculates cropped dimensions
|
|
||||||
CropSize(inFrame.Width(), inFrame.Height(),
|
|
||||||
croppedWidth, croppedHeight);
|
|
||||||
|
|
||||||
VideoFrame* targetFrame;
|
|
||||||
outFrame.VerifyAndAllocate(croppedWidth * croppedHeight * 3 / 2);
|
|
||||||
targetFrame = &outFrame;
|
|
||||||
|
|
||||||
ConvertI420ToI420(inFrame.Buffer(), inFrame.Width(), inFrame.Height(),
|
|
||||||
targetFrame->Buffer(), croppedWidth, croppedHeight);
|
|
||||||
targetFrame->SetWidth(croppedWidth);
|
|
||||||
targetFrame->SetHeight(croppedHeight);
|
|
||||||
//We have correct aspect ratio, sub-sample with a multiple of two to get
|
|
||||||
//close to the target size
|
|
||||||
ret = SubsampleMultipleOf2(*targetFrame);
|
|
||||||
|
|
||||||
if (ret != VPM_OK)
|
|
||||||
{
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
retVal = _scaler.Scale(inFrame.Buffer(), outFrame.Buffer(), requiredSize);
|
||||||
|
outFrame.SetLength(requiredSize);
|
||||||
|
if (retVal == 0)
|
||||||
return VPM_OK;
|
return VPM_OK;
|
||||||
|
else
|
||||||
|
return VPM_SCALE_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32
|
WebRtc_Word32
|
||||||
VPMSimpleSpatialResampler::UpsampleFrame(const VideoFrame& inFrame,
|
|
||||||
VideoFrame& outFrame)
|
|
||||||
{
|
|
||||||
outFrame.CopyFrame(inFrame);
|
|
||||||
|
|
||||||
float ratioWidth = _targetWidth / (float)inFrame.Width();
|
|
||||||
float ratioHeight = _targetHeight / (float)inFrame.Height();
|
|
||||||
|
|
||||||
WebRtc_UWord32 scaledWidth = 0;
|
|
||||||
WebRtc_UWord32 scaledHeight = 0;
|
|
||||||
|
|
||||||
if(ratioWidth > 1 || ratioHeight > 1)
|
|
||||||
{
|
|
||||||
// scale up
|
|
||||||
if(ratioWidth <= 1.5 && ratioHeight <= 1.5)
|
|
||||||
{
|
|
||||||
// scale up 1.5
|
|
||||||
WebRtc_Word32 ret = ScaleI420Up3_2(inFrame.Width(), inFrame.Height(),
|
|
||||||
outFrame.Buffer(), outFrame.Size(),
|
|
||||||
scaledWidth, scaledHeight);
|
|
||||||
if (ret < 0)
|
|
||||||
return VPM_GENERAL_ERROR;
|
|
||||||
}
|
|
||||||
else if(ratioWidth <= 2 && ratioHeight <= 2)
|
|
||||||
{
|
|
||||||
// scale up 2
|
|
||||||
WebRtc_Word32 ret = ScaleI420Up2(inFrame.Width(), inFrame.Height(),
|
|
||||||
outFrame.Buffer(), outFrame.Size(),
|
|
||||||
scaledWidth, scaledHeight);
|
|
||||||
if (ret < 0)
|
|
||||||
return VPM_GENERAL_ERROR;
|
|
||||||
}
|
|
||||||
else if(ratioWidth <= 2.25 && ratioHeight <= 2.25)
|
|
||||||
{
|
|
||||||
// scale up 2.25
|
|
||||||
WebRtc_Word32 ret = ScaleI420Up3_2(inFrame.Width(), inFrame.Height(),
|
|
||||||
outFrame.Buffer(), outFrame.Size(),
|
|
||||||
scaledWidth, scaledHeight);
|
|
||||||
if (ret < 0)
|
|
||||||
return VPM_GENERAL_ERROR;
|
|
||||||
ret = ScaleI420Up3_2(scaledWidth, scaledHeight,
|
|
||||||
outFrame.Buffer(), outFrame.Size(),
|
|
||||||
scaledWidth, scaledHeight);
|
|
||||||
if (ret < 0)
|
|
||||||
return VPM_GENERAL_ERROR;
|
|
||||||
}
|
|
||||||
else if(ratioWidth <= 3 && ratioHeight <= 3)
|
|
||||||
{
|
|
||||||
// scale up 3
|
|
||||||
WebRtc_Word32 ret = ScaleI420Up2(inFrame.Width(), inFrame.Height(),
|
|
||||||
outFrame.Buffer(), outFrame.Size(),
|
|
||||||
scaledWidth, scaledHeight);
|
|
||||||
if (ret < 0)
|
|
||||||
return VPM_GENERAL_ERROR;
|
|
||||||
ret = ScaleI420Up3_2(scaledWidth, scaledHeight,
|
|
||||||
outFrame.Buffer(), outFrame.Size(),
|
|
||||||
scaledWidth, scaledHeight);
|
|
||||||
if (ret < 0)
|
|
||||||
return VPM_GENERAL_ERROR;
|
|
||||||
}
|
|
||||||
else if(ratioWidth <= 4 && ratioHeight <= 4)
|
|
||||||
{
|
|
||||||
// scale up 4
|
|
||||||
WebRtc_Word32 ret = ScaleI420Up2(inFrame.Width(), inFrame.Height(),
|
|
||||||
outFrame.Buffer(), outFrame.Size(),
|
|
||||||
scaledWidth, scaledHeight);
|
|
||||||
if (ret < 0)
|
|
||||||
return VPM_GENERAL_ERROR;
|
|
||||||
ret = ScaleI420Up2(scaledWidth, scaledHeight,
|
|
||||||
outFrame.Buffer(), outFrame.Size(),
|
|
||||||
scaledWidth, scaledHeight);
|
|
||||||
if (ret < 0)
|
|
||||||
return VPM_GENERAL_ERROR;
|
|
||||||
}
|
|
||||||
|
|
||||||
//TODO: what if ratioWidth/Height >= 8 ?
|
|
||||||
|
|
||||||
if (scaledWidth <= 0 || scaledHeight <= 0)
|
|
||||||
{
|
|
||||||
return VPM_GENERAL_ERROR;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ((static_cast<WebRtc_UWord32>(scaledWidth) > _targetWidth) ||
|
|
||||||
(static_cast<WebRtc_UWord32>(scaledHeight) > _targetHeight))
|
|
||||||
{
|
|
||||||
WebRtc_Word32 ret = CutI420Frame(outFrame.Buffer(),
|
|
||||||
scaledWidth, scaledHeight,
|
|
||||||
_targetWidth, _targetHeight);
|
|
||||||
if (ret < 0)
|
|
||||||
return VPM_GENERAL_ERROR;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
return VPM_GENERAL_ERROR;
|
|
||||||
}
|
|
||||||
|
|
||||||
outFrame.SetWidth(_targetWidth);
|
|
||||||
outFrame.SetHeight(_targetHeight);
|
|
||||||
outFrame.SetLength(_targetWidth * _targetHeight * 3 / 2);
|
|
||||||
|
|
||||||
return VPM_OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
WebRtc_Word32
|
|
||||||
VPMSimpleSpatialResampler::CropSize(WebRtc_UWord32 width, WebRtc_UWord32 height,
|
|
||||||
WebRtc_UWord32& croppedWidth,
|
|
||||||
WebRtc_UWord32& croppedHeight) const
|
|
||||||
{
|
|
||||||
// Crop the image to a width and height which is a
|
|
||||||
// multiple of two, so that we can do a simpler scaling.
|
|
||||||
croppedWidth = _targetWidth;
|
|
||||||
croppedHeight = _targetHeight;
|
|
||||||
|
|
||||||
if (width >= 8 * _targetWidth && height >= 8 * _targetHeight)
|
|
||||||
{
|
|
||||||
croppedWidth = 8 * _targetWidth;
|
|
||||||
croppedHeight = 8 * _targetHeight;
|
|
||||||
}
|
|
||||||
else if (width >= 4 * _targetWidth && height >= 4 * _targetHeight)
|
|
||||||
{
|
|
||||||
croppedWidth = 4 * _targetWidth;
|
|
||||||
croppedHeight = 4 * _targetHeight;
|
|
||||||
}
|
|
||||||
else if (width >= 2 * _targetWidth && height >= 2 * _targetHeight)
|
|
||||||
{
|
|
||||||
croppedWidth = 2 * _targetWidth;
|
|
||||||
croppedHeight = 2 * _targetHeight;
|
|
||||||
}
|
|
||||||
return VPM_OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
WebRtc_Word32
|
|
||||||
VPMSimpleSpatialResampler::SubsampleMultipleOf2(VideoFrame& frame)
|
|
||||||
{
|
|
||||||
WebRtc_UWord32 tempWidth = frame.Width();
|
|
||||||
WebRtc_UWord32 tempHeight = frame.Height();
|
|
||||||
|
|
||||||
while (tempWidth / _targetWidth >= 2 && tempHeight / _targetHeight >= 2)
|
|
||||||
{
|
|
||||||
ScaleI420FrameQuarter(tempWidth, tempHeight, frame.Buffer());
|
|
||||||
tempWidth /= 2;
|
|
||||||
tempHeight /= 2;
|
|
||||||
}
|
|
||||||
frame.SetWidth(tempWidth);
|
|
||||||
frame.SetHeight(tempHeight);
|
|
||||||
frame.SetLength(frame.Width() * frame.Height() * 3 / 2);
|
|
||||||
|
|
||||||
return VPM_OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
bool
|
|
||||||
VPMSimpleSpatialResampler::ExactMultiplier(WebRtc_UWord32 width,
|
|
||||||
WebRtc_UWord32 height) const
|
|
||||||
{
|
|
||||||
bool exactMultiplier = false;
|
|
||||||
if (_targetWidth % width == 0 && _targetHeight % height == 0)
|
|
||||||
{
|
|
||||||
// we have a multiple, is it an even multiple?
|
|
||||||
WebRtc_Word32 widthMultiple = _targetWidth / width;
|
|
||||||
WebRtc_Word32 heightMultiple = _targetHeight / height;
|
|
||||||
if ((widthMultiple == 2 && heightMultiple == 2) ||
|
|
||||||
(widthMultiple == 4 && heightMultiple == 4) ||
|
|
||||||
(widthMultiple == 8 && heightMultiple == 8) ||
|
|
||||||
(widthMultiple == 1 && heightMultiple == 1))
|
|
||||||
{
|
|
||||||
exactMultiplier = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return exactMultiplier;
|
|
||||||
}
|
|
||||||
|
|
||||||
WebRtc_Word32
|
|
||||||
VPMSimpleSpatialResampler::BiLinearInterpolation(const VideoFrame& inFrame,
|
|
||||||
VideoFrame& outFrame)
|
|
||||||
{
|
|
||||||
WebRtc_Word32 retVal;
|
|
||||||
|
|
||||||
if (_interpolatorPtr == NULL)
|
|
||||||
{
|
|
||||||
_interpolatorPtr = new interpolator();
|
|
||||||
}
|
|
||||||
// set bi-linear interpolator
|
|
||||||
retVal = _interpolatorPtr->Set(inFrame.Width(), inFrame.Height(),
|
|
||||||
_targetWidth, _targetHeight,
|
|
||||||
kI420, kI420, kBilinear );
|
|
||||||
if (retVal < 0 )
|
|
||||||
{
|
|
||||||
return retVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify size of output buffer
|
|
||||||
outFrame.VerifyAndAllocate(_targetHeight * _targetWidth * 3 >> 1);
|
|
||||||
WebRtc_UWord32 outSz = outFrame.Size();
|
|
||||||
|
|
||||||
// interpolate frame
|
|
||||||
retVal = _interpolatorPtr->Interpolate(inFrame.Buffer(),
|
|
||||||
outFrame.Buffer(), outSz);
|
|
||||||
|
|
||||||
assert(outSz <= outFrame.Size());
|
|
||||||
|
|
||||||
// returns height
|
|
||||||
if (retVal < 0)
|
|
||||||
{
|
|
||||||
return retVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set output frame parameters
|
|
||||||
outFrame.SetHeight(_targetHeight);
|
|
||||||
outFrame.SetWidth(_targetWidth);
|
|
||||||
outFrame.SetLength(outSz);
|
|
||||||
outFrame.SetTimeStamp(inFrame.TimeStamp());
|
|
||||||
return VPM_OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
WebRtc_UWord32
|
|
||||||
VPMSimpleSpatialResampler::TargetHeight()
|
VPMSimpleSpatialResampler::TargetHeight()
|
||||||
{
|
{
|
||||||
return _targetHeight;
|
return _targetHeight;
|
||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_UWord32
|
WebRtc_Word32
|
||||||
VPMSimpleSpatialResampler::TargetWidth()
|
VPMSimpleSpatialResampler::TargetWidth()
|
||||||
{
|
{
|
||||||
return _targetWidth;
|
return _targetWidth;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
VPMSimpleSpatialResampler::ApplyResample(WebRtc_UWord32 width,
|
VPMSimpleSpatialResampler::ApplyResample(WebRtc_Word32 width,
|
||||||
WebRtc_UWord32 height)
|
WebRtc_Word32 height)
|
||||||
{
|
{
|
||||||
if ((width == _targetWidth && height == _targetHeight) ||
|
if ((width == _targetWidth && height == _targetHeight) ||
|
||||||
_resamplingMode == kNoRescaling)
|
_resamplingMode == kNoRescaling)
|
||||||
return false;
|
return false;
|
||||||
else
|
else
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
} //namespace
|
} //namespace
|
||||||
|
@ -20,59 +20,48 @@
|
|||||||
#include "module_common_types.h"
|
#include "module_common_types.h"
|
||||||
#include "video_processing_defines.h"
|
#include "video_processing_defines.h"
|
||||||
|
|
||||||
#include "vplib.h"
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
#include "interpolator.h"
|
#include "common_video/libyuv/include/scaler.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
class VPMSpatialResampler
|
class VPMSpatialResampler
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
virtual ~VPMSpatialResampler() {};
|
virtual ~VPMSpatialResampler() {};
|
||||||
virtual WebRtc_Word32 SetTargetFrameSize(WebRtc_UWord32 width,
|
virtual WebRtc_Word32 SetTargetFrameSize(WebRtc_Word32 width,
|
||||||
WebRtc_UWord32 height) = 0;
|
WebRtc_Word32 height) = 0;
|
||||||
virtual void SetInputFrameResampleMode(VideoFrameResampling
|
virtual void SetInputFrameResampleMode(VideoFrameResampling
|
||||||
resamplingMode) = 0;
|
resamplingMode) = 0;
|
||||||
virtual void Reset() = 0;
|
virtual void Reset() = 0;
|
||||||
virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
|
virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
|
||||||
VideoFrame& outFrame) = 0;
|
VideoFrame& outFrame) = 0;
|
||||||
virtual WebRtc_UWord32 TargetWidth() = 0;
|
virtual WebRtc_Word32 TargetWidth() = 0;
|
||||||
virtual WebRtc_UWord32 TargetHeight() = 0;
|
virtual WebRtc_Word32 TargetHeight() = 0;
|
||||||
virtual WebRtc_Word32 Release() = 0;
|
virtual bool ApplyResample(WebRtc_Word32 width, WebRtc_Word32 height) = 0;
|
||||||
virtual bool ApplyResample(WebRtc_UWord32 width, WebRtc_UWord32 height) = 0;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
class VPMSimpleSpatialResampler : public VPMSpatialResampler
|
class VPMSimpleSpatialResampler : public VPMSpatialResampler
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
VPMSimpleSpatialResampler();
|
VPMSimpleSpatialResampler();
|
||||||
~VPMSimpleSpatialResampler();
|
~VPMSimpleSpatialResampler();
|
||||||
virtual WebRtc_Word32 SetTargetFrameSize(WebRtc_UWord32 width,
|
virtual WebRtc_Word32 SetTargetFrameSize(WebRtc_Word32 width,
|
||||||
WebRtc_UWord32 height);
|
WebRtc_Word32 height);
|
||||||
virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode);
|
virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode);
|
||||||
virtual void Reset();
|
virtual void Reset();
|
||||||
virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
|
virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
|
||||||
VideoFrame& outFrame);
|
VideoFrame& outFrame);
|
||||||
virtual WebRtc_UWord32 TargetWidth();
|
virtual WebRtc_Word32 TargetWidth();
|
||||||
virtual WebRtc_UWord32 TargetHeight();
|
virtual WebRtc_Word32 TargetHeight();
|
||||||
virtual WebRtc_Word32 Release();
|
virtual bool ApplyResample(WebRtc_Word32 width, WebRtc_Word32 height);
|
||||||
virtual bool ApplyResample(WebRtc_UWord32 width, WebRtc_UWord32 height);
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
WebRtc_Word32 UpsampleFrame(const VideoFrame& inFrame, VideoFrame& outFrame);
|
|
||||||
WebRtc_Word32 CropSize(WebRtc_UWord32 width, WebRtc_UWord32 height,
|
|
||||||
WebRtc_UWord32& croppedWidth,
|
|
||||||
WebRtc_UWord32& croppedHeight) const;
|
|
||||||
WebRtc_Word32 SubsampleMultipleOf2(VideoFrame& frame);
|
|
||||||
bool ExactMultiplier(WebRtc_UWord32 width, WebRtc_UWord32 height) const;
|
|
||||||
WebRtc_Word32 BiLinearInterpolation(const VideoFrame& inFrame,
|
|
||||||
VideoFrame& outFrame);
|
|
||||||
|
|
||||||
|
VideoFrameResampling _resamplingMode;
|
||||||
VideoFrameResampling _resamplingMode;
|
WebRtc_Word32 _targetWidth;
|
||||||
WebRtc_UWord32 _targetWidth;
|
WebRtc_Word32 _targetHeight;
|
||||||
WebRtc_UWord32 _targetHeight;
|
Scaler _scaler;
|
||||||
interpolator* _interpolatorPtr;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} //namespace
|
} //namespace
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
'dependencies': [
|
'dependencies': [
|
||||||
'webrtc_utility',
|
'webrtc_utility',
|
||||||
'<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
|
'<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
|
||||||
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
|
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
|
||||||
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||||
],
|
],
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
|
@ -10,7 +10,6 @@
|
|||||||
|
|
||||||
#include "unit_test.h"
|
#include "unit_test.h"
|
||||||
#include "video_processing.h"
|
#include "video_processing.h"
|
||||||
#include "vplib.h"
|
|
||||||
#include "tick_util.h"
|
#include "tick_util.h"
|
||||||
|
|
||||||
#include <cstdio>
|
#include <cstdio>
|
||||||
|
@ -10,7 +10,6 @@
|
|||||||
|
|
||||||
#include "unit_test.h"
|
#include "unit_test.h"
|
||||||
#include "video_processing.h"
|
#include "video_processing.h"
|
||||||
#include "vplib.h"
|
|
||||||
#include "content_analysis.h"
|
#include "content_analysis.h"
|
||||||
|
|
||||||
using namespace webrtc;
|
using namespace webrtc;
|
||||||
|
@ -9,8 +9,11 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
#include "unit_test.h"
|
#include "unit_test.h"
|
||||||
#include "trace.h"
|
|
||||||
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
#include "tick_util.h"
|
#include "tick_util.h"
|
||||||
|
#include "trace.h"
|
||||||
|
|
||||||
|
|
||||||
using webrtc::Trace;
|
using webrtc::Trace;
|
||||||
|
|
||||||
|
@ -8,12 +8,11 @@
|
|||||||
* be found in the AUTHORS file in the root of the source tree.
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#ifndef UNIT_TEST_H
|
#ifndef VPM_UNIT_TEST_H
|
||||||
#define UNIT_TEST_H
|
#define VPM_UNIT_TEST_H
|
||||||
|
|
||||||
#include "video_processing.h"
|
#include "video_processing.h"
|
||||||
|
|
||||||
#include "vplib.h"
|
|
||||||
|
|
||||||
#include <gtest/gtest.h>
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
@ -35,4 +34,4 @@ protected:
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
#endif // UNIT_TEST_H
|
#endif // VPM_UNIT_TEST_H
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
],
|
],
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
'../../../../system_wrappers/interface',
|
'../../../../system_wrappers/interface',
|
||||||
'../../../../common_video/vplib/main/interface',
|
'<(webrtc_root)/common_video/libyuv/include',
|
||||||
'../../../../modules/video_processing/main/source',
|
'../../../../modules/video_processing/main/source',
|
||||||
],
|
],
|
||||||
'sources': [
|
'sources': [
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
|
|
||||||
#include "video_render_android_surface_view.h"
|
#include "video_render_android_surface_view.h"
|
||||||
#include "critical_section_wrapper.h"
|
#include "critical_section_wrapper.h"
|
||||||
#include "vplib.h"
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
#include "tick_util.h"
|
#include "tick_util.h"
|
||||||
#ifdef ANDROID_NDK_8_OR_ABOVE
|
#ifdef ANDROID_NDK_8_OR_ABOVE
|
||||||
#include <android/bitmap.h>
|
#include <android/bitmap.h>
|
||||||
@ -402,7 +402,7 @@ void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv)
|
|||||||
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Locked bitmap", __FUNCTION__);
|
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Locked bitmap", __FUNCTION__);
|
||||||
// Convert I420 straight into the Java bitmap.
|
// Convert I420 straight into the Java bitmap.
|
||||||
const int conversionResult=ConvertI420ToRGB565( (unsigned char* )_bufferToRender.Buffer(), (unsigned char* ) pixels, _bitmapWidth, _bitmapHeight);
|
const int conversionResult=ConvertI420ToRGB565( (unsigned char* )_bufferToRender.Buffer(), (unsigned char* ) pixels, _bitmapWidth, _bitmapHeight);
|
||||||
if(conversionResult<=0)
|
if(conversionResult<0)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion failed.", __FUNCTION__);
|
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion failed.", __FUNCTION__);
|
||||||
}
|
}
|
||||||
@ -448,8 +448,8 @@ void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv)
|
|||||||
if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight)
|
if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight)
|
||||||
{
|
{
|
||||||
const int conversionResult=ConvertI420ToRGB565Android((unsigned char* )_bufferToRender.Buffer(), _directBuffer, _bitmapWidth, _bitmapHeight);
|
const int conversionResult=ConvertI420ToRGB565Android((unsigned char* )_bufferToRender.Buffer(), _directBuffer, _bitmapWidth, _bitmapHeight);
|
||||||
if(conversionResult<=0)
|
if(conversionResult<0)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion failed.", __FUNCTION__);
|
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion failed.", __FUNCTION__);
|
||||||
_renderCritSect.Leave();
|
_renderCritSect.Leave();
|
||||||
return;
|
return;
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
#include "video_render_frames.h"
|
#include "video_render_frames.h"
|
||||||
#include "tick_util.h"
|
#include "tick_util.h"
|
||||||
#include "map_wrapper.h"
|
#include "map_wrapper.h"
|
||||||
#include "vplib.h"
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
|
|
||||||
#include <cassert>
|
#include <cassert>
|
||||||
|
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
|
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
|
||||||
|
|
||||||
#include "video_render_defines.h"
|
#include "video_render_defines.h"
|
||||||
#include "vplib.h"
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
#include <sys/shm.h>
|
#include <sys/shm.h>
|
||||||
|
|
||||||
#include <X11/Xlib.h>
|
#include <X11/Xlib.h>
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
#include "event_wrapper.h"
|
#include "event_wrapper.h"
|
||||||
#include "trace.h"
|
#include "trace.h"
|
||||||
#include "thread_wrapper.h"
|
#include "thread_wrapper.h"
|
||||||
#include "vplib.h"
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
@ -236,8 +236,8 @@ int VideoChannelAGL::DeliverFrame(unsigned char* buffer, int bufferSize, unsigne
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rgbLength = ConvertI420ToRGBAMac((WebRtc_UWord8*)buffer, (WebRtc_UWord8*)_buffer, (WebRtc_Word32)_width, (WebRtc_Word32)_height, 0);
|
int rgbret = ConvertI420ToRGBAMac((WebRtc_UWord8*)buffer, (WebRtc_UWord8*)_buffer, (WebRtc_Word32)_width, (WebRtc_Word32)_height, 0);
|
||||||
if (rgbLength == -1)
|
if (rgbret < 0)
|
||||||
{
|
{
|
||||||
_owner->UnlockAGLCntx();
|
_owner->UnlockAGLCntx();
|
||||||
return -1;
|
return -1;
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
#include "event_wrapper.h"
|
#include "event_wrapper.h"
|
||||||
#include "trace.h"
|
#include "trace.h"
|
||||||
#include "thread_wrapper.h"
|
#include "thread_wrapper.h"
|
||||||
#include "vplib.h"
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
@ -232,7 +232,7 @@ int VideoChannelNSOpenGL::DeliverFrame(unsigned char* buffer, int bufferSize, un
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rgbLength = ConvertFromI420(kRGBAMac, buffer, _width, _height, _buffer);
|
int rgbLength = ConvertI420ToRGBAMac(buffer, _buffer, _width, _height, 0);
|
||||||
if (rgbLength == -1)
|
if (rgbLength == -1)
|
||||||
{
|
{
|
||||||
_owner->UnlockAGLCntx();
|
_owner->UnlockAGLCntx();
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
'type': '<(library)',
|
'type': '<(library)',
|
||||||
'dependencies': [
|
'dependencies': [
|
||||||
'webrtc_utility',
|
'webrtc_utility',
|
||||||
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
|
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
|
||||||
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||||
],
|
],
|
||||||
'include_dirs': [
|
'include_dirs': [
|
||||||
@ -144,7 +144,7 @@
|
|||||||
'video_render_module',
|
'video_render_module',
|
||||||
'webrtc_utility',
|
'webrtc_utility',
|
||||||
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||||
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
|
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
|
||||||
],
|
],
|
||||||
'sources': [
|
'sources': [
|
||||||
'../test/testAPI/testAPI.cpp',
|
'../test/testAPI/testAPI.cpp',
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
#include "event_wrapper.h"
|
#include "event_wrapper.h"
|
||||||
#include "trace.h"
|
#include "trace.h"
|
||||||
#include "thread_wrapper.h"
|
#include "thread_wrapper.h"
|
||||||
#include "vplib.h"
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
|
|
||||||
#include "typedefs.h"
|
#include "typedefs.h"
|
||||||
#include "i_video_render_win.h"
|
#include "i_video_render_win.h"
|
||||||
#include "vplib.h"
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
|
|
||||||
#include "ddraw.h"
|
#include "ddraw.h"
|
||||||
#include <Map>
|
#include <Map>
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
'dependencies': [
|
'dependencies': [
|
||||||
|
|
||||||
# common_video
|
# common_video
|
||||||
'<(webrtc_root)/common_video/common_video.gyp:webrtc_vplib',
|
'<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
|
||||||
'<(webrtc_root)/common_video/common_video.gyp:webrtc_jpeg',
|
'<(webrtc_root)/common_video/common_video.gyp:webrtc_jpeg',
|
||||||
|
|
||||||
# ModulesShared
|
# ModulesShared
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
#include "video_render.h"
|
#include "video_render.h"
|
||||||
#include "video_render_defines.h"
|
#include "video_render_defines.h"
|
||||||
#include "vie_render_manager.h"
|
#include "vie_render_manager.h"
|
||||||
#include "vplib.h"
|
#include "common_video/libyuv/include/libyuv.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user