Remove VideoRotationMode to VideoRotation.

With this change, there is only one copy of rotation enum.

BUG=4145
R=mflodman@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/48369004

Cr-Commit-Position: refs/heads/master@{#8654}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8654 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
guoweis@webrtc.org 2015-03-09 17:07:31 +00:00
parent 600587d5ac
commit 59140d6a5a
19 changed files with 92 additions and 151 deletions

View File

@ -18,6 +18,7 @@
#include <stdio.h>
#include "webrtc/common_types.h" // RawVideoTypes.
#include "webrtc/common_video/rotation.h"
#include "webrtc/common_video/interface/i420_video_frame.h"
#include "webrtc/typedefs.h"
@ -50,15 +51,6 @@ const double kPerfectPSNR = 48.0f;
// TODO(wu): Consolidate types into one type throughout WebRtc.
VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type);
// Supported rotation
// Direction of rotation - clockwise.
enum VideoRotationMode {
kRotateNone = 0,
kRotate90 = 90,
kRotate180 = 180,
kRotate270 = 270,
};
// Align integer values.
// Input:
// - value : Input value to be aligned.
@ -117,10 +109,12 @@ int ExtractBuffer(const I420VideoFrame& input_frame,
int ConvertToI420(VideoType src_video_type,
const uint8_t* src_frame,
int crop_x, int crop_y,
int src_width, int src_height,
int crop_x,
int crop_y,
int src_width,
int src_height,
size_t sample_size,
VideoRotationMode rotation,
VideoRotation rotation,
I420VideoFrame* dst_frame);
// Convert From I420

View File

@ -150,9 +150,8 @@ TEST_F(TestLibYuv, ConvertTest) {
rtc::scoped_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
out_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &res_i420_frame));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, &res_i420_frame));
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
@ -172,7 +171,7 @@ TEST_F(TestLibYuv, ConvertTest) {
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kRGB24, 0, res_rgb_buffer2.get()));
EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2.get(), 0, 0, width_,
height_, 0, kRotateNone, &res_i420_frame));
height_, 0, kVideoRotation_0, &res_i420_frame));
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
@ -187,7 +186,7 @@ TEST_F(TestLibYuv, ConvertTest) {
rtc::scoped_ptr<uint8_t[]> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
height_, 0, kRotateNone, &res_i420_frame));
height_, 0, kVideoRotation_0, &res_i420_frame));
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
EXPECT_EQ(48.0, psnr);
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
@ -211,8 +210,8 @@ TEST_F(TestLibYuv, ConvertTest) {
return;
}
ConvertToI420(kI420, res_i420_buffer.get(), 0, 0,
width_, height_, 0, kRotateNone, &res_i420_frame);
ConvertToI420(kI420, res_i420_buffer.get(), 0, 0, width_, height_, 0,
kVideoRotation_0, &res_i420_frame);
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
EXPECT_EQ(48.0, psnr);
j++;
@ -222,7 +221,7 @@ TEST_F(TestLibYuv, ConvertTest) {
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kYUY2, 0, out_yuy2_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_,
height_, 0, kRotateNone, &res_i420_frame));
height_, 0, kVideoRotation_0, &res_i420_frame));
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
@ -237,7 +236,7 @@ TEST_F(TestLibYuv, ConvertTest) {
out_rgb565_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer.get(), 0, 0, width_,
height_, 0, kRotateNone, &res_i420_frame));
height_, 0, kVideoRotation_0, &res_i420_frame));
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
@ -257,7 +256,7 @@ TEST_F(TestLibYuv, ConvertTest) {
out_argb8888_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer.get(), 0, 0, width_,
height_, 0, kRotateNone, &res_i420_frame));
height_, 0, kVideoRotation_0, &res_i420_frame));
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
@ -288,9 +287,8 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) {
rtc::scoped_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
out_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &res_i420_frame));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, &res_i420_frame));
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
@ -314,18 +312,15 @@ TEST_F(TestLibYuv, RotateTest) {
stride_y,
stride_uv,
stride_uv));
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0,
width_, height_,
0, kRotate90, &rotated_res_i420_frame));
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0,
width_, height_,
0, kRotate270, &rotated_res_i420_frame));
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
0, kVideoRotation_90, &rotated_res_i420_frame));
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
0, kVideoRotation_270, &rotated_res_i420_frame));
EXPECT_EQ(0,rotated_res_i420_frame.CreateEmptyFrame(width_, height_,
width_, (width_ + 1) / 2,
(width_ + 1) / 2));
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0,
width_, height_,
0, kRotate180, &rotated_res_i420_frame));
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
0, kVideoRotation_180, &rotated_res_i420_frame));
}
TEST_F(TestLibYuv, alignment) {

View File

@ -309,9 +309,9 @@ double TestScaler::ComputeAvgSequencePSNR(FILE* input_file,
}
frame_count++;
EXPECT_EQ(0, ConvertToI420(kI420, input_buffer, 0, 0, width, height,
required_size, kRotateNone, &in_frame));
required_size, kVideoRotation_0, &in_frame));
EXPECT_EQ(0, ConvertToI420(kI420, output_buffer, 0, 0, width, height,
required_size, kRotateNone, &out_frame));
required_size, kVideoRotation_0, &out_frame));
double psnr = I420PSNR(&in_frame, &out_frame);
avg_psnr += psnr;
}

View File

@ -176,15 +176,15 @@ int ConvertRGB24ToARGB(const uint8_t* src_frame, uint8_t* dst_frame,
width, height);
}
libyuv::RotationMode ConvertRotationMode(VideoRotationMode rotation) {
libyuv::RotationMode ConvertRotationMode(VideoRotation rotation) {
switch(rotation) {
case kRotateNone:
case kVideoRotation_0:
return libyuv::kRotate0;
case kRotate90:
case kVideoRotation_90:
return libyuv::kRotate90;
case kRotate180:
case kVideoRotation_180:
return libyuv::kRotate180;
case kRotate270:
case kVideoRotation_270:
return libyuv::kRotate270;
}
assert(false);
@ -231,16 +231,18 @@ int ConvertVideoType(VideoType video_type) {
int ConvertToI420(VideoType src_video_type,
const uint8_t* src_frame,
int crop_x, int crop_y,
int src_width, int src_height,
int crop_x,
int crop_y,
int src_width,
int src_height,
size_t sample_size,
VideoRotationMode rotation,
VideoRotation rotation,
I420VideoFrame* dst_frame) {
int dst_width = dst_frame->width();
int dst_height = dst_frame->height();
// LibYuv expects pre-rotation values for dst.
// Stride values should correspond to the destination values.
if (rotation == kRotate90 || rotation == kRotate270) {
if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) {
dst_width = dst_frame->height();
dst_height =dst_frame->width();
}

View File

@ -25,22 +25,6 @@
namespace webrtc
{
// Converting the rotation mode from capturemodule's to I420VideoFrame's define.
VideoRotation ConvertRotation(VideoRotationMode rotation) {
switch (rotation) {
case kRotateNone:
return kVideoRotation_0;
case kRotate90:
return kVideoRotation_90;
case kRotate180:
return kVideoRotation_180;
case kRotate270:
return kVideoRotation_270;
}
assert(false);
return kVideoRotation_0;
}
namespace videocapturemodule
{
VideoCaptureModule* VideoCaptureImpl::Create(
@ -172,7 +156,7 @@ VideoCaptureImpl::VideoCaptureImpl(const int32_t id)
_dataCallBack(NULL),
_captureCallBack(NULL),
_lastProcessFrameCount(TickTime::Now()),
_rotateFrame(kRotateNone),
_rotateFrame(kVideoRotation_0),
apply_rotation_(true) {
_requestedCapability.width = kDefaultWidth;
_requestedCapability.height = kDefaultHeight;
@ -281,7 +265,8 @@ int32_t VideoCaptureImpl::IncomingFrame(
if (apply_rotation_) {
// Rotating resolution when for 90/270 degree rotations.
if (_rotateFrame == kRotate90 || _rotateFrame == kRotate270) {
if (_rotateFrame == kVideoRotation_90 ||
_rotateFrame == kVideoRotation_270) {
target_width = abs(height);
target_height = width;
}
@ -305,8 +290,7 @@ int32_t VideoCaptureImpl::IncomingFrame(
const int conversionResult = ConvertToI420(
commonVideoType, videoFrame, 0, 0, // No cropping
width, height, videoFrameLength,
apply_rotation_ ? _rotateFrame : kRotateNone,
&_captureFrame);
apply_rotation_ ? _rotateFrame : kVideoRotation_0, &_captureFrame);
if (conversionResult < 0)
{
LOG(LS_ERROR) << "Failed to convert capture frame from type "
@ -315,7 +299,7 @@ int32_t VideoCaptureImpl::IncomingFrame(
}
if (!apply_rotation_) {
_captureFrame.set_rotation(ConvertRotation(_rotateFrame));
_captureFrame.set_rotation(_rotateFrame);
} else {
_captureFrame.set_rotation(kVideoRotation_0);
}
@ -336,22 +320,7 @@ int32_t VideoCaptureImpl::IncomingFrame(
int32_t VideoCaptureImpl::SetCaptureRotation(VideoRotation rotation) {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
switch (rotation){
case kVideoRotation_0:
_rotateFrame = kRotateNone;
break;
case kVideoRotation_90:
_rotateFrame = kRotate90;
break;
case kVideoRotation_180:
_rotateFrame = kRotate180;
break;
case kVideoRotation_270:
_rotateFrame = kRotate270;
break;
default:
return -1;
}
_rotateFrame = rotation;
return 0;
}

View File

@ -17,6 +17,7 @@
#include "webrtc/common_video/interface/i420_video_frame.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/common_video/rotation.h"
#include "webrtc/modules/video_capture/include/video_capture.h"
#include "webrtc/modules/video_capture/video_capture_config.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
@ -128,7 +129,8 @@ private:
TickTime _lastProcessFrameCount;
TickTime _incomingFrameTimes[kFrameRateCountHistorySize];// timestamp for local captured frames
VideoRotationMode _rotateFrame; //Set if the frame should be rotated by the capture module.
VideoRotation _rotateFrame; // Set if the frame should be rotated by the
// capture module.
I420VideoFrame _captureFrame;

View File

@ -208,8 +208,8 @@ int I420Decoder::Decode(const EncodedImage& inputImage, bool /*missingFrames*/,
_decodedImage.CreateEmptyFrame(_width, _height,
_width, half_width, half_width);
// Converting from buffer to plane representation.
int ret = ConvertToI420(kI420, buffer, 0, 0, _width, _height, 0, kRotateNone,
&_decodedImage);
int ret = ConvertToI420(kI420, buffer, 0, 0, _width, _height, 0,
kVideoRotation_0, &_decodedImage);
if (ret < 0) {
return WEBRTC_VIDEO_CODEC_MEMORY;
}

View File

@ -137,9 +137,9 @@ class TestVp8Impl : public ::testing::Test {
stride_y, stride_uv, stride_uv);
input_frame_.set_timestamp(kTestTimestamp);
// Using ConvertToI420 to add stride to the image.
EXPECT_EQ(
0, ConvertToI420(kI420, source_buffer_.get(), 0, 0, codec_inst_.width,
codec_inst_.height, 0, kRotateNone, &input_frame_));
EXPECT_EQ(0, ConvertToI420(kI420, source_buffer_.get(), 0, 0,
codec_inst_.width, codec_inst_.height, 0,
kVideoRotation_0, &input_frame_));
}
void SetUpEncodeDecode() {

View File

@ -161,9 +161,8 @@ int SequenceCoder(webrtc::test::CommandLineParser& parser) {
if (fread(frame_buffer.get(), 1, length, input_file) != length)
continue;
if (frame_cnt >= start_frame) {
webrtc::ConvertToI420(webrtc::kI420, frame_buffer.get(), 0, 0,
width, height, 0, webrtc::kRotateNone,
&input_frame);
webrtc::ConvertToI420(webrtc::kI420, frame_buffer.get(), 0, 0, width,
height, 0, webrtc::kVideoRotation_0, &input_frame);
encoder->Encode(input_frame, NULL, NULL);
decoder->Decode(encoder_callback.encoded_image(), false, NULL);
++frames_processed;

View File

@ -23,9 +23,8 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
frame_length_)
{
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &video_frame_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, &video_frame_));
frameNum++;
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
@ -51,9 +50,8 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
frame_length_ &&
frameNum < 300)
{
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &video_frame_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, &video_frame_));
frameNum++;
uint8_t* frame = video_frame_.buffer(kYPlane);
@ -91,9 +89,8 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
frame_length_ && frameNum < 300)
{
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &video_frame_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, &video_frame_));
frameNum++;
uint8_t* y_plane = video_frame_.buffer(kYPlane);

View File

@ -44,9 +44,8 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
frame_length_)
{
// Using ConvertToI420 to add stride to the image.
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &video_frame_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, &video_frame_));
frameNum++;
t0 = TickTime::Now();
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&video_frame_));
@ -91,14 +90,13 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
frame_length_)
{
// Using ConvertToI420 to add stride to the image.
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &video_frame_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, &video_frame_));
ASSERT_EQ(frame_length_, fread(ref_buffer.get(), 1, frame_length_,
refFile));
EXPECT_EQ(0, ConvertToI420(kI420, ref_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &refVideoFrame));
EXPECT_EQ(
0, ConvertToI420(kI420, ref_buffer.get(), 0, 0, width_, height_, 0,
kVideoRotation_0, &refVideoFrame));
EXPECT_EQ(0, memcmp(video_frame_.buffer(kYPlane),
refVideoFrame.buffer(kYPlane),
size_y_));
@ -123,9 +121,8 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
I420VideoFrame testVideoFrame;
testVideoFrame.CreateEmptyFrame(width_, height_,
width_, half_width_, half_width_);
EXPECT_EQ(0, ConvertToI420(kI420, testFrame.get(), 0, 0,
width_, height_, 0, kRotateNone,
&testVideoFrame));
EXPECT_EQ(0, ConvertToI420(kI420, testFrame.get(), 0, 0, width_, height_, 0,
kVideoRotation_0, &testVideoFrame));
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testVideoFrame));

View File

@ -27,9 +27,8 @@ TEST_F(VideoProcessingModuleTest, ContentAnalysis) {
while (fread(video_buffer.get(), 1, frame_length_, source_file_)
== frame_length_) {
// Using ConvertToI420 to add stride to the image.
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &video_frame_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
0, kVideoRotation_0, &video_frame_));
_cM_c = ca__c.ComputeContentMetrics(video_frame_);
_cM_SSE = ca__sse.ComputeContentMetrics(video_frame_);

View File

@ -56,9 +56,9 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
frame_length_)
{
frameNum++;
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &video_frame_));
EXPECT_EQ(
0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, &video_frame_));
video_frame_.set_timestamp(timeStamp);
t0 = TickTime::Now();

View File

@ -106,9 +106,8 @@ TEST_F(VideoProcessingModuleTest, HandleBadStats) {
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
source_file_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &video_frame_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
0, kVideoRotation_0, &video_frame_));
EXPECT_EQ(-1, vpm_->Deflickering(&video_frame_, &stats));
@ -142,9 +141,8 @@ TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset) {
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
source_file_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &video_frame_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
0, kVideoRotation_0, &video_frame_));
ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
ASSERT_EQ(0, video_frame2.CopyFrame(video_frame_));
ASSERT_EQ(0, vpm_->Deflickering(&video_frame_, &stats));
@ -156,9 +154,8 @@ TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset) {
ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
source_file_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &video_frame_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
0, kVideoRotation_0, &video_frame_));
ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
video_frame2.CopyFrame(video_frame_);
ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame_, stats));
@ -172,9 +169,8 @@ TEST_F(VideoProcessingModuleTest, FrameStats) {
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
source_file_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &video_frame_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
0, kVideoRotation_0, &video_frame_));
EXPECT_FALSE(vpm_->ValidFrameStats(stats));
EXPECT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
@ -233,9 +229,8 @@ TEST_F(VideoProcessingModuleTest, Resampler) {
ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
source_file_));
// Using ConvertToI420 to add stride to the image.
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &video_frame_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
0, kVideoRotation_0, &video_frame_));
// Cropped source frame that will contain the expected visible region.
I420VideoFrame cropped_source_frame;
cropped_source_frame.CopyFrame(video_frame_);
@ -337,7 +332,7 @@ void CropFrame(const uint8_t* source_data,
(cropped_width + 1) / 2);
EXPECT_EQ(0,
ConvertToI420(kI420, source_data, offset_x, offset_y, source_width,
source_height, 0, kRotateNone, cropped_frame));
source_height, 0, kVideoRotation_0, cropped_frame));
}
void TestSize(const I420VideoFrame& source_frame,

View File

@ -80,14 +80,6 @@ enum StretchMode
kStretchNone = 5
};
enum Rotation
{
kRotation0 = 0,
kRotation90 = 1,
kRotation180 = 2,
kRotation270 = 3
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_DEFINES_H_

View File

@ -110,7 +110,8 @@ class YuvFileGenerator : public FrameGenerator {
static_cast<int>((width_ + 1) / 2));
ConvertToI420(kI420, frame_buffer_.get(), 0, 0, static_cast<int>(width_),
static_cast<int>(height_), 0, kRotateNone, &last_read_frame_);
static_cast<int>(height_), 0, kVideoRotation_0,
&last_read_frame_);
}
private:

View File

@ -124,9 +124,9 @@ int CalculateMetrics(VideoMetricsType video_metrics_type,
while (ref_bytes == frame_length && test_bytes == frame_length) {
// Converting from buffer to plane representation.
ConvertToI420(kI420, ref_buffer.get(), 0, 0, width, height, 0,
kRotateNone, &ref_frame);
kVideoRotation_0, &ref_frame);
ConvertToI420(kI420, test_buffer.get(), 0, 0, width, height, 0,
kRotateNone, &test_frame);
kVideoRotation_0, &test_frame);
switch (video_metrics_type) {
case kPSNR:
CalculateFrame(kPSNR, &ref_frame, &test_frame, frame_number,

View File

@ -237,9 +237,9 @@ int32_t TbI420Decoder::Decode(
return WEBRTC_VIDEO_CODEC_ERROR;
}
int ret = ConvertToI420(webrtc::kI420, inputImage._buffer, 0, 0,
_width, _height,
0, webrtc::kRotateNone, &_decodedImage);
int ret =
ConvertToI420(webrtc::kI420, inputImage._buffer, 0, 0, _width, _height,
0, webrtc::kVideoRotation_0, &_decodedImage);
if (ret < 0)
return WEBRTC_VIDEO_CODEC_ERROR;

View File

@ -74,9 +74,8 @@ int ViEFileImage::ConvertPictureToI420VideoFrame(int engine_id,
int half_width = (picture.width + 1) / 2;
video_frame->CreateEmptyFrame(picture.width, picture.height,
picture.width, half_width, half_width);
return ConvertToI420(kI420, picture.data, 0, 0,
picture.width, picture.height,
0, kRotateNone, video_frame);
return ConvertToI420(kI420, picture.data, 0, 0, picture.width, picture.height,
0, kVideoRotation_0, video_frame);
}
} // namespace webrtc