diff --git a/webrtc/common_video/libyuv/include/webrtc_libyuv.h b/webrtc/common_video/libyuv/include/webrtc_libyuv.h index 4d5fad694..d8e931d1d 100644 --- a/webrtc/common_video/libyuv/include/webrtc_libyuv.h +++ b/webrtc/common_video/libyuv/include/webrtc_libyuv.h @@ -18,6 +18,7 @@ #include #include "webrtc/common_types.h" // RawVideoTypes. +#include "webrtc/common_video/rotation.h" #include "webrtc/common_video/interface/i420_video_frame.h" #include "webrtc/typedefs.h" @@ -50,15 +51,6 @@ const double kPerfectPSNR = 48.0f; // TODO(wu): Consolidate types into one type throughout WebRtc. VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type); -// Supported rotation -// Direction of rotation - clockwise. -enum VideoRotationMode { - kRotateNone = 0, - kRotate90 = 90, - kRotate180 = 180, - kRotate270 = 270, -}; - // Align integer values. // Input: // - value : Input value to be aligned. @@ -117,10 +109,12 @@ int ExtractBuffer(const I420VideoFrame& input_frame, int ConvertToI420(VideoType src_video_type, const uint8_t* src_frame, - int crop_x, int crop_y, - int src_width, int src_height, + int crop_x, + int crop_y, + int src_width, + int src_height, size_t sample_size, - VideoRotationMode rotation, + VideoRotation rotation, I420VideoFrame* dst_frame); // Convert From I420 diff --git a/webrtc/common_video/libyuv/libyuv_unittest.cc b/webrtc/common_video/libyuv/libyuv_unittest.cc index 1fecf8c41..8491aa837 100644 --- a/webrtc/common_video/libyuv/libyuv_unittest.cc +++ b/webrtc/common_video/libyuv/libyuv_unittest.cc @@ -150,9 +150,8 @@ TEST_F(TestLibYuv, ConvertTest) { rtc::scoped_ptr out_i420_buffer(new uint8_t[frame_length_]); EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0, out_i420_buffer.get())); - EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, - width_, height_, - 0, kRotateNone, &res_i420_frame)); + EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_, + height_, 0, kVideoRotation_0, &res_i420_frame)); if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { return; @@ -172,7 +171,7 @@ TEST_F(TestLibYuv, ConvertTest) { EXPECT_EQ(0, ConvertFromI420(orig_frame_, kRGB24, 0, res_rgb_buffer2.get())); EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2.get(), 0, 0, width_, - height_, 0, kRotateNone, &res_i420_frame)); + height_, 0, kVideoRotation_0, &res_i420_frame)); if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { return; @@ -187,7 +186,7 @@ TEST_F(TestLibYuv, ConvertTest) { rtc::scoped_ptr out_uyvy_buffer(new uint8_t[width_ * height_ * 2]); EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get())); EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_, - height_, 0, kRotateNone, &res_i420_frame)); + height_, 0, kVideoRotation_0, &res_i420_frame)); psnr = I420PSNR(&orig_frame_, &res_i420_frame); EXPECT_EQ(48.0, psnr); if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { @@ -211,8 +210,8 @@ TEST_F(TestLibYuv, ConvertTest) { return; } - ConvertToI420(kI420, res_i420_buffer.get(), 0, 0, - width_, height_, 0, kRotateNone, &res_i420_frame); + ConvertToI420(kI420, res_i420_buffer.get(), 0, 0, width_, height_, 0, + kVideoRotation_0, &res_i420_frame); psnr = I420PSNR(&orig_frame_, &res_i420_frame); EXPECT_EQ(48.0, psnr); j++; @@ -222,7 +221,7 @@ TEST_F(TestLibYuv, ConvertTest) { EXPECT_EQ(0, ConvertFromI420(orig_frame_, kYUY2, 0, out_yuy2_buffer.get())); EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_, - height_, 0, kRotateNone, &res_i420_frame)); + height_, 0, kVideoRotation_0, &res_i420_frame)); if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { return; @@ -237,7 +236,7 @@ TEST_F(TestLibYuv, ConvertTest) { out_rgb565_buffer.get())); EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer.get(), 0, 0, width_, - height_, 0, kRotateNone, &res_i420_frame)); + height_, 0, kVideoRotation_0, &res_i420_frame)); if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { return; @@ -257,7 +256,7 @@ TEST_F(TestLibYuv, ConvertTest) { out_argb8888_buffer.get())); EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer.get(), 0, 0, width_, - height_, 0, kRotateNone, &res_i420_frame)); + height_, 0, kVideoRotation_0, &res_i420_frame)); if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { return; @@ -288,9 +287,8 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) { rtc::scoped_ptr out_i420_buffer(new uint8_t[frame_length_]); EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0, out_i420_buffer.get())); - EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, - width_, height_, - 0, kRotateNone, &res_i420_frame)); + EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_, + height_, 0, kVideoRotation_0, &res_i420_frame)); if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { return; @@ -314,18 +312,15 @@ TEST_F(TestLibYuv, RotateTest) { stride_y, stride_uv, stride_uv)); - EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, - width_, height_, - 0, kRotate90, &rotated_res_i420_frame)); - EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, - width_, height_, - 0, kRotate270, &rotated_res_i420_frame)); + EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_, + 0, kVideoRotation_90, &rotated_res_i420_frame)); + EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_, + 0, kVideoRotation_270, &rotated_res_i420_frame)); EXPECT_EQ(0,rotated_res_i420_frame.CreateEmptyFrame(width_, height_, width_, (width_ + 1) / 2, (width_ + 1) / 2)); - EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, - width_, height_, - 0, kRotate180, &rotated_res_i420_frame)); + EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_, + 0, kVideoRotation_180, &rotated_res_i420_frame)); } TEST_F(TestLibYuv, alignment) { diff --git a/webrtc/common_video/libyuv/scaler_unittest.cc b/webrtc/common_video/libyuv/scaler_unittest.cc index e50829092..d77ce59e5 100644 --- a/webrtc/common_video/libyuv/scaler_unittest.cc +++ b/webrtc/common_video/libyuv/scaler_unittest.cc @@ -309,9 +309,9 @@ double TestScaler::ComputeAvgSequencePSNR(FILE* input_file, } frame_count++; EXPECT_EQ(0, ConvertToI420(kI420, input_buffer, 0, 0, width, height, - required_size, kRotateNone, &in_frame)); + required_size, kVideoRotation_0, &in_frame)); EXPECT_EQ(0, ConvertToI420(kI420, output_buffer, 0, 0, width, height, - required_size, kRotateNone, &out_frame)); + required_size, kVideoRotation_0, &out_frame)); double psnr = I420PSNR(&in_frame, &out_frame); avg_psnr += psnr; } diff --git a/webrtc/common_video/libyuv/webrtc_libyuv.cc b/webrtc/common_video/libyuv/webrtc_libyuv.cc index 339ceea1d..65b4d0c5c 100644 --- a/webrtc/common_video/libyuv/webrtc_libyuv.cc +++ b/webrtc/common_video/libyuv/webrtc_libyuv.cc @@ -176,15 +176,15 @@ int ConvertRGB24ToARGB(const uint8_t* src_frame, uint8_t* dst_frame, width, height); } -libyuv::RotationMode ConvertRotationMode(VideoRotationMode rotation) { +libyuv::RotationMode ConvertRotationMode(VideoRotation rotation) { switch(rotation) { - case kRotateNone: + case kVideoRotation_0: return libyuv::kRotate0; - case kRotate90: + case kVideoRotation_90: return libyuv::kRotate90; - case kRotate180: + case kVideoRotation_180: return libyuv::kRotate180; - case kRotate270: + case kVideoRotation_270: return libyuv::kRotate270; } assert(false); @@ -231,16 +231,18 @@ int ConvertVideoType(VideoType video_type) { int ConvertToI420(VideoType src_video_type, const uint8_t* src_frame, - int crop_x, int crop_y, - int src_width, int src_height, + int crop_x, + int crop_y, + int src_width, + int src_height, size_t sample_size, - VideoRotationMode rotation, + VideoRotation rotation, I420VideoFrame* dst_frame) { int dst_width = dst_frame->width(); int dst_height = dst_frame->height(); // LibYuv expects pre-rotation values for dst. // Stride values should correspond to the destination values. - if (rotation == kRotate90 || rotation == kRotate270) { + if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) { dst_width = dst_frame->height(); dst_height =dst_frame->width(); } diff --git a/webrtc/modules/video_capture/video_capture_impl.cc b/webrtc/modules/video_capture/video_capture_impl.cc index 7202b69fa..3bf5e4bb8 100644 --- a/webrtc/modules/video_capture/video_capture_impl.cc +++ b/webrtc/modules/video_capture/video_capture_impl.cc @@ -25,22 +25,6 @@ namespace webrtc { -// Converting the rotation mode from capturemodule's to I420VideoFrame's define. -VideoRotation ConvertRotation(VideoRotationMode rotation) { - switch (rotation) { - case kRotateNone: - return kVideoRotation_0; - case kRotate90: - return kVideoRotation_90; - case kRotate180: - return kVideoRotation_180; - case kRotate270: - return kVideoRotation_270; - } - assert(false); - return kVideoRotation_0; -} - namespace videocapturemodule { VideoCaptureModule* VideoCaptureImpl::Create( @@ -172,7 +156,7 @@ VideoCaptureImpl::VideoCaptureImpl(const int32_t id) _dataCallBack(NULL), _captureCallBack(NULL), _lastProcessFrameCount(TickTime::Now()), - _rotateFrame(kRotateNone), + _rotateFrame(kVideoRotation_0), apply_rotation_(true) { _requestedCapability.width = kDefaultWidth; _requestedCapability.height = kDefaultHeight; @@ -281,7 +265,8 @@ int32_t VideoCaptureImpl::IncomingFrame( if (apply_rotation_) { // Rotating resolution when for 90/270 degree rotations. - if (_rotateFrame == kRotate90 || _rotateFrame == kRotate270) { + if (_rotateFrame == kVideoRotation_90 || + _rotateFrame == kVideoRotation_270) { target_width = abs(height); target_height = width; } @@ -305,8 +290,7 @@ int32_t VideoCaptureImpl::IncomingFrame( const int conversionResult = ConvertToI420( commonVideoType, videoFrame, 0, 0, // No cropping width, height, videoFrameLength, - apply_rotation_ ? _rotateFrame : kRotateNone, - &_captureFrame); + apply_rotation_ ? _rotateFrame : kVideoRotation_0, &_captureFrame); if (conversionResult < 0) { LOG(LS_ERROR) << "Failed to convert capture frame from type " @@ -315,7 +299,7 @@ int32_t VideoCaptureImpl::IncomingFrame( } if (!apply_rotation_) { - _captureFrame.set_rotation(ConvertRotation(_rotateFrame)); + _captureFrame.set_rotation(_rotateFrame); } else { _captureFrame.set_rotation(kVideoRotation_0); } @@ -336,22 +320,7 @@ int32_t VideoCaptureImpl::IncomingFrame( int32_t VideoCaptureImpl::SetCaptureRotation(VideoRotation rotation) { CriticalSectionScoped cs(&_apiCs); CriticalSectionScoped cs2(&_callBackCs); - switch (rotation){ - case kVideoRotation_0: - _rotateFrame = kRotateNone; - break; - case kVideoRotation_90: - _rotateFrame = kRotate90; - break; - case kVideoRotation_180: - _rotateFrame = kRotate180; - break; - case kVideoRotation_270: - _rotateFrame = kRotate270; - break; - default: - return -1; - } + _rotateFrame = rotation; return 0; } diff --git a/webrtc/modules/video_capture/video_capture_impl.h b/webrtc/modules/video_capture/video_capture_impl.h index a4ac68031..fd4b39894 100644 --- a/webrtc/modules/video_capture/video_capture_impl.h +++ b/webrtc/modules/video_capture/video_capture_impl.h @@ -17,6 +17,7 @@ #include "webrtc/common_video/interface/i420_video_frame.h" #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" +#include "webrtc/common_video/rotation.h" #include "webrtc/modules/video_capture/include/video_capture.h" #include "webrtc/modules/video_capture/video_capture_config.h" #include "webrtc/system_wrappers/interface/tick_util.h" @@ -128,7 +129,8 @@ private: TickTime _lastProcessFrameCount; TickTime _incomingFrameTimes[kFrameRateCountHistorySize];// timestamp for local captured frames - VideoRotationMode _rotateFrame; //Set if the frame should be rotated by the capture module. + VideoRotation _rotateFrame; // Set if the frame should be rotated by the + // capture module. I420VideoFrame _captureFrame; diff --git a/webrtc/modules/video_coding/codecs/i420/main/source/i420.cc b/webrtc/modules/video_coding/codecs/i420/main/source/i420.cc index e6aabbc8e..be2d17d87 100644 --- a/webrtc/modules/video_coding/codecs/i420/main/source/i420.cc +++ b/webrtc/modules/video_coding/codecs/i420/main/source/i420.cc @@ -208,8 +208,8 @@ int I420Decoder::Decode(const EncodedImage& inputImage, bool /*missingFrames*/, _decodedImage.CreateEmptyFrame(_width, _height, _width, half_width, half_width); // Converting from buffer to plane representation. - int ret = ConvertToI420(kI420, buffer, 0, 0, _width, _height, 0, kRotateNone, - &_decodedImage); + int ret = ConvertToI420(kI420, buffer, 0, 0, _width, _height, 0, + kVideoRotation_0, &_decodedImage); if (ret < 0) { return WEBRTC_VIDEO_CODEC_MEMORY; } diff --git a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc index e2e56e4e0..43fc9c8e8 100644 --- a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc +++ b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc @@ -137,9 +137,9 @@ class TestVp8Impl : public ::testing::Test { stride_y, stride_uv, stride_uv); input_frame_.set_timestamp(kTestTimestamp); // Using ConvertToI420 to add stride to the image. - EXPECT_EQ( - 0, ConvertToI420(kI420, source_buffer_.get(), 0, 0, codec_inst_.width, - codec_inst_.height, 0, kRotateNone, &input_frame_)); + EXPECT_EQ(0, ConvertToI420(kI420, source_buffer_.get(), 0, 0, + codec_inst_.width, codec_inst_.height, 0, + kVideoRotation_0, &input_frame_)); } void SetUpEncodeDecode() { diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc index 84b17e4a8..224d57cd2 100644 --- a/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc +++ b/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc @@ -161,9 +161,8 @@ int SequenceCoder(webrtc::test::CommandLineParser& parser) { if (fread(frame_buffer.get(), 1, length, input_file) != length) continue; if (frame_cnt >= start_frame) { - webrtc::ConvertToI420(webrtc::kI420, frame_buffer.get(), 0, 0, - width, height, 0, webrtc::kRotateNone, - &input_frame); + webrtc::ConvertToI420(webrtc::kI420, frame_buffer.get(), 0, 0, width, + height, 0, webrtc::kVideoRotation_0, &input_frame); encoder->Encode(input_frame, NULL, NULL); decoder->Decode(encoder_callback.encoded_image(), false, NULL); ++frames_processed; diff --git a/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc b/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc index 69f7fd86d..8e15d6439 100644 --- a/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc +++ b/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc @@ -23,9 +23,8 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection) while (fread(video_buffer.get(), 1, frame_length_, source_file_) == frame_length_) { - EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - width_, height_, - 0, kRotateNone, &video_frame_)); + EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, + height_, 0, kVideoRotation_0, &video_frame_)); frameNum++; VideoProcessingModule::FrameStats stats; ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); @@ -51,9 +50,8 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection) frame_length_ && frameNum < 300) { - EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - width_, height_, - 0, kRotateNone, &video_frame_)); + EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, + height_, 0, kVideoRotation_0, &video_frame_)); frameNum++; uint8_t* frame = video_frame_.buffer(kYPlane); @@ -91,9 +89,8 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection) while (fread(video_buffer.get(), 1, frame_length_, source_file_) == frame_length_ && frameNum < 300) { - EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - width_, height_, - 0, kRotateNone, &video_frame_)); + EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, + height_, 0, kVideoRotation_0, &video_frame_)); frameNum++; uint8_t* y_plane = video_frame_.buffer(kYPlane); diff --git a/webrtc/modules/video_processing/main/test/unit_test/color_enhancement_test.cc b/webrtc/modules/video_processing/main/test/unit_test/color_enhancement_test.cc index 80f230d9f..4307be3f3 100644 --- a/webrtc/modules/video_processing/main/test/unit_test/color_enhancement_test.cc +++ b/webrtc/modules/video_processing/main/test/unit_test/color_enhancement_test.cc @@ -44,9 +44,8 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement) frame_length_) { // Using ConvertToI420 to add stride to the image. - EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - width_, height_, - 0, kRotateNone, &video_frame_)); + EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, + height_, 0, kVideoRotation_0, &video_frame_)); frameNum++; t0 = TickTime::Now(); ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&video_frame_)); @@ -91,14 +90,13 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement) frame_length_) { // Using ConvertToI420 to add stride to the image. - EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - width_, height_, - 0, kRotateNone, &video_frame_)); + EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, + height_, 0, kVideoRotation_0, &video_frame_)); ASSERT_EQ(frame_length_, fread(ref_buffer.get(), 1, frame_length_, refFile)); - EXPECT_EQ(0, ConvertToI420(kI420, ref_buffer.get(), 0, 0, - width_, height_, - 0, kRotateNone, &refVideoFrame)); + EXPECT_EQ( + 0, ConvertToI420(kI420, ref_buffer.get(), 0, 0, width_, height_, 0, + kVideoRotation_0, &refVideoFrame)); EXPECT_EQ(0, memcmp(video_frame_.buffer(kYPlane), refVideoFrame.buffer(kYPlane), size_y_)); @@ -123,9 +121,8 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement) I420VideoFrame testVideoFrame; testVideoFrame.CreateEmptyFrame(width_, height_, width_, half_width_, half_width_); - EXPECT_EQ(0, ConvertToI420(kI420, testFrame.get(), 0, 0, - width_, height_, 0, kRotateNone, - &testVideoFrame)); + EXPECT_EQ(0, ConvertToI420(kI420, testFrame.get(), 0, 0, width_, height_, 0, + kVideoRotation_0, &testVideoFrame)); ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testVideoFrame)); diff --git a/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc b/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc index ca71d551b..8a2404f8e 100644 --- a/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc +++ b/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc @@ -27,9 +27,8 @@ TEST_F(VideoProcessingModuleTest, ContentAnalysis) { while (fread(video_buffer.get(), 1, frame_length_, source_file_) == frame_length_) { // Using ConvertToI420 to add stride to the image. - EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - width_, height_, - 0, kRotateNone, &video_frame_)); + EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, + 0, kVideoRotation_0, &video_frame_)); _cM_c = ca__c.ComputeContentMetrics(video_frame_); _cM_SSE = ca__sse.ComputeContentMetrics(video_frame_); diff --git a/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc b/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc index 01e98d13d..cba1dfc4f 100644 --- a/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc +++ b/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc @@ -56,9 +56,9 @@ TEST_F(VideoProcessingModuleTest, Deflickering) frame_length_) { frameNum++; - EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - width_, height_, - 0, kRotateNone, &video_frame_)); + EXPECT_EQ( + 0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, + height_, 0, kVideoRotation_0, &video_frame_)); video_frame_.set_timestamp(timeStamp); t0 = TickTime::Now(); diff --git a/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc b/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc index 5ca2feb07..60a2e41c4 100644 --- a/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc +++ b/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc @@ -106,9 +106,8 @@ TEST_F(VideoProcessingModuleTest, HandleBadStats) { rtc::scoped_ptr video_buffer(new uint8_t[frame_length_]); ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, source_file_)); - EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - width_, height_, - 0, kRotateNone, &video_frame_)); + EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, + 0, kVideoRotation_0, &video_frame_)); EXPECT_EQ(-1, vpm_->Deflickering(&video_frame_, &stats)); @@ -142,9 +141,8 @@ TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset) { rtc::scoped_ptr video_buffer(new uint8_t[frame_length_]); ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, source_file_)); - EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - width_, height_, - 0, kRotateNone, &video_frame_)); + EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, + 0, kVideoRotation_0, &video_frame_)); ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); ASSERT_EQ(0, video_frame2.CopyFrame(video_frame_)); ASSERT_EQ(0, vpm_->Deflickering(&video_frame_, &stats)); @@ -156,9 +154,8 @@ TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset) { ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, source_file_)); - EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - width_, height_, - 0, kRotateNone, &video_frame_)); + EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, + 0, kVideoRotation_0, &video_frame_)); ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); video_frame2.CopyFrame(video_frame_); ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame_, stats)); @@ -172,9 +169,8 @@ TEST_F(VideoProcessingModuleTest, FrameStats) { rtc::scoped_ptr video_buffer(new uint8_t[frame_length_]); ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, source_file_)); - EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - width_, height_, - 0, kRotateNone, &video_frame_)); + EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, + 0, kVideoRotation_0, &video_frame_)); EXPECT_FALSE(vpm_->ValidFrameStats(stats)); EXPECT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); @@ -233,9 +229,8 @@ TEST_F(VideoProcessingModuleTest, Resampler) { ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, source_file_)); // Using ConvertToI420 to add stride to the image. - EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, - width_, height_, - 0, kRotateNone, &video_frame_)); + EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, + 0, kVideoRotation_0, &video_frame_)); // Cropped source frame that will contain the expected visible region. I420VideoFrame cropped_source_frame; cropped_source_frame.CopyFrame(video_frame_); @@ -337,7 +332,7 @@ void CropFrame(const uint8_t* source_data, (cropped_width + 1) / 2); EXPECT_EQ(0, ConvertToI420(kI420, source_data, offset_x, offset_y, source_width, - source_height, 0, kRotateNone, cropped_frame)); + source_height, 0, kVideoRotation_0, cropped_frame)); } void TestSize(const I420VideoFrame& source_frame, diff --git a/webrtc/modules/video_render/include/video_render_defines.h b/webrtc/modules/video_render/include/video_render_defines.h index 36d439875..e5da2bb64 100644 --- a/webrtc/modules/video_render/include/video_render_defines.h +++ b/webrtc/modules/video_render/include/video_render_defines.h @@ -80,14 +80,6 @@ enum StretchMode kStretchNone = 5 }; -enum Rotation -{ - kRotation0 = 0, - kRotation90 = 1, - kRotation180 = 2, - kRotation270 = 3 -}; - } // namespace webrtc #endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_DEFINES_H_ diff --git a/webrtc/test/frame_generator.cc b/webrtc/test/frame_generator.cc index 71be89e62..5152f1a87 100644 --- a/webrtc/test/frame_generator.cc +++ b/webrtc/test/frame_generator.cc @@ -110,7 +110,8 @@ class YuvFileGenerator : public FrameGenerator { static_cast((width_ + 1) / 2)); ConvertToI420(kI420, frame_buffer_.get(), 0, 0, static_cast(width_), - static_cast(height_), 0, kRotateNone, &last_read_frame_); + static_cast(height_), 0, kVideoRotation_0, + &last_read_frame_); } private: diff --git a/webrtc/test/testsupport/metrics/video_metrics.cc b/webrtc/test/testsupport/metrics/video_metrics.cc index 145d5585f..0202a71eb 100644 --- a/webrtc/test/testsupport/metrics/video_metrics.cc +++ b/webrtc/test/testsupport/metrics/video_metrics.cc @@ -124,9 +124,9 @@ int CalculateMetrics(VideoMetricsType video_metrics_type, while (ref_bytes == frame_length && test_bytes == frame_length) { // Converting from buffer to plane representation. ConvertToI420(kI420, ref_buffer.get(), 0, 0, width, height, 0, - kRotateNone, &ref_frame); + kVideoRotation_0, &ref_frame); ConvertToI420(kI420, test_buffer.get(), 0, 0, width, height, 0, - kRotateNone, &test_frame); + kVideoRotation_0, &test_frame); switch (video_metrics_type) { case kPSNR: CalculateFrame(kPSNR, &ref_frame, &test_frame, frame_number, diff --git a/webrtc/video_engine/test/libvietest/testbed/tb_I420_codec.cc b/webrtc/video_engine/test/libvietest/testbed/tb_I420_codec.cc index a6910e348..e9cefcf84 100644 --- a/webrtc/video_engine/test/libvietest/testbed/tb_I420_codec.cc +++ b/webrtc/video_engine/test/libvietest/testbed/tb_I420_codec.cc @@ -237,9 +237,9 @@ int32_t TbI420Decoder::Decode( return WEBRTC_VIDEO_CODEC_ERROR; } - int ret = ConvertToI420(webrtc::kI420, inputImage._buffer, 0, 0, - _width, _height, - 0, webrtc::kRotateNone, &_decodedImage); + int ret = + ConvertToI420(webrtc::kI420, inputImage._buffer, 0, 0, _width, _height, + 0, webrtc::kVideoRotation_0, &_decodedImage); if (ret < 0) return WEBRTC_VIDEO_CODEC_ERROR; diff --git a/webrtc/video_engine/vie_file_image.cc b/webrtc/video_engine/vie_file_image.cc index 919da9fa1..cb6e20619 100644 --- a/webrtc/video_engine/vie_file_image.cc +++ b/webrtc/video_engine/vie_file_image.cc @@ -74,9 +74,8 @@ int ViEFileImage::ConvertPictureToI420VideoFrame(int engine_id, int half_width = (picture.width + 1) / 2; video_frame->CreateEmptyFrame(picture.width, picture.height, picture.width, half_width, half_width); - return ConvertToI420(kI420, picture.data, 0, 0, - picture.width, picture.height, - 0, kRotateNone, video_frame); + return ConvertToI420(kI420, picture.data, 0, 0, picture.width, picture.height, + 0, kVideoRotation_0, video_frame); } } // namespace webrtc