Add support of texture frames for video capturer.
- Add ViECapturer unittest. - Add CloneFrame function in I420VideoFrame. - Encoders do not support texture yet and texture frames are dropped in ViEEncoder for now. Corresponding CLs: https://codereview.chromium.org/277943002 http://cl/66620352 BUG=chromium:362437 TEST=WebRTC video stream forwarding. Run video_engine_core_unittests and common_video_unittests. R=fischman@webrtc.org, perkj@webrtc.org, stefan@webrtc.org, wu@webrtc.org Review URL: https://webrtc-codereview.appspot.com/12499004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@6252 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
		| @@ -10,6 +10,8 @@ | |||||||
|  |  | ||||||
| #include "webrtc/common_video/interface/i420_video_frame.h" | #include "webrtc/common_video/interface/i420_video_frame.h" | ||||||
|  |  | ||||||
|  | #include <string.h> | ||||||
|  |  | ||||||
| #include <algorithm>  // swap | #include <algorithm>  // swap | ||||||
|  |  | ||||||
| namespace webrtc { | namespace webrtc { | ||||||
| @@ -78,6 +80,15 @@ int I420VideoFrame::CopyFrame(const I420VideoFrame& videoFrame) { | |||||||
|   return 0; |   return 0; | ||||||
| } | } | ||||||
|  |  | ||||||
|  | I420VideoFrame* I420VideoFrame::CloneFrame() const { | ||||||
|  |   scoped_ptr<I420VideoFrame> new_frame(new I420VideoFrame()); | ||||||
|  |   if (new_frame->CopyFrame(*this) == -1) { | ||||||
|  |     // CopyFrame failed. | ||||||
|  |     return NULL; | ||||||
|  |   } | ||||||
|  |   return new_frame.release(); | ||||||
|  | } | ||||||
|  |  | ||||||
| void I420VideoFrame::SwapFrame(I420VideoFrame* videoFrame) { | void I420VideoFrame::SwapFrame(I420VideoFrame* videoFrame) { | ||||||
|   y_plane_.Swap(videoFrame->y_plane_); |   y_plane_.Swap(videoFrame->y_plane_); | ||||||
|   u_plane_.Swap(videoFrame->u_plane_); |   u_plane_.Swap(videoFrame->u_plane_); | ||||||
|   | |||||||
| @@ -19,8 +19,8 @@ | |||||||
|  |  | ||||||
| namespace webrtc { | namespace webrtc { | ||||||
|  |  | ||||||
| bool EqualFrames(const I420VideoFrame& videoFrame1, | bool EqualFrames(const I420VideoFrame& frame1, | ||||||
|                  const I420VideoFrame& videoFrame2); |                  const I420VideoFrame& frame2); | ||||||
| bool EqualFramesExceptSize(const I420VideoFrame& frame1, | bool EqualFramesExceptSize(const I420VideoFrame& frame1, | ||||||
|                            const I420VideoFrame& frame2); |                            const I420VideoFrame& frame2); | ||||||
| int ExpectedSize(int plane_stride, int image_height, PlaneType type); | int ExpectedSize(int plane_stride, int image_height, PlaneType type); | ||||||
| @@ -122,6 +122,29 @@ TEST(TestI420VideoFrame, CopyFrame) { | |||||||
|   EXPECT_TRUE(EqualFrames(frame1, frame2)); |   EXPECT_TRUE(EqualFrames(frame1, frame2)); | ||||||
| } | } | ||||||
|  |  | ||||||
|  | TEST(TestI420VideoFrame, CloneFrame) { | ||||||
|  |   I420VideoFrame frame1; | ||||||
|  |   scoped_ptr<I420VideoFrame> frame2; | ||||||
|  |   const int kSizeY = 225; | ||||||
|  |   const int kSizeU = 80; | ||||||
|  |   const int kSizeV = 80; | ||||||
|  |   uint8_t buffer_y[kSizeY]; | ||||||
|  |   uint8_t buffer_u[kSizeU]; | ||||||
|  |   uint8_t buffer_v[kSizeV]; | ||||||
|  |   memset(buffer_y, 16, kSizeY); | ||||||
|  |   memset(buffer_u, 8, kSizeU); | ||||||
|  |   memset(buffer_v, 4, kSizeV); | ||||||
|  |   frame1.CreateFrame( | ||||||
|  |       kSizeY, buffer_y, kSizeU, buffer_u, kSizeV, buffer_v, 20, 20, 20, 10, 10); | ||||||
|  |   frame1.set_timestamp(1); | ||||||
|  |   frame1.set_ntp_time_ms(2); | ||||||
|  |   frame1.set_render_time_ms(3); | ||||||
|  |  | ||||||
|  |   frame2.reset(frame1.CloneFrame()); | ||||||
|  |   EXPECT_TRUE(frame2.get() != NULL); | ||||||
|  |   EXPECT_TRUE(EqualFrames(frame1, *frame2)); | ||||||
|  | } | ||||||
|  |  | ||||||
| TEST(TestI420VideoFrame, CopyBuffer) { | TEST(TestI420VideoFrame, CopyBuffer) { | ||||||
|   I420VideoFrame frame1, frame2; |   I420VideoFrame frame1, frame2; | ||||||
|   int width = 15; |   int width = 15; | ||||||
| @@ -234,29 +257,24 @@ TEST(TestI420VideoFrame, RefCountedInstantiation) { | |||||||
|  |  | ||||||
| bool EqualFrames(const I420VideoFrame& frame1, | bool EqualFrames(const I420VideoFrame& frame1, | ||||||
|                  const I420VideoFrame& frame2) { |                  const I420VideoFrame& frame2) { | ||||||
|   if (!EqualFramesExceptSize(frame1, frame2)) |   return (EqualFramesExceptSize(frame1, frame2) && | ||||||
|     return false; |           (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) && | ||||||
|   // Compare allocated memory size. |           (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) && | ||||||
|   bool ret = true; |           (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane))); | ||||||
|   ret |= (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)); |  | ||||||
|   ret |= (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)); |  | ||||||
|   ret |= (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)); |  | ||||||
|   return ret; |  | ||||||
| } | } | ||||||
|  |  | ||||||
| bool EqualFramesExceptSize(const I420VideoFrame& frame1, | bool EqualFramesExceptSize(const I420VideoFrame& frame1, | ||||||
|                            const I420VideoFrame& frame2) { |                            const I420VideoFrame& frame2) { | ||||||
|   bool ret = true; |   if ((frame1.width() != frame2.width()) || | ||||||
|   ret |= (frame1.width() == frame2.width()); |       (frame1.height() != frame2.height()) || | ||||||
|   ret |= (frame1.height() == frame2.height()); |       (frame1.stride(kYPlane) != frame2.stride(kYPlane)) || | ||||||
|   ret |= (frame1.stride(kYPlane) == frame2.stride(kYPlane)); |       (frame1.stride(kUPlane) != frame2.stride(kUPlane)) || | ||||||
|   ret |= (frame1.stride(kUPlane) == frame2.stride(kUPlane)); |       (frame1.stride(kVPlane) != frame2.stride(kVPlane)) || | ||||||
|   ret |= (frame1.stride(kVPlane) == frame2.stride(kVPlane)); |       (frame1.timestamp() != frame2.timestamp()) || | ||||||
|   ret |= (frame1.timestamp() == frame2.timestamp()); |       (frame1.ntp_time_ms() != frame2.ntp_time_ms()) || | ||||||
|   ret |= (frame1.ntp_time_ms() == frame2.ntp_time_ms()); |       (frame1.render_time_ms() != frame2.render_time_ms())) { | ||||||
|   ret |= (frame1.render_time_ms() == frame2.render_time_ms()); |  | ||||||
|   if (!ret) |  | ||||||
|     return false; |     return false; | ||||||
|  |   } | ||||||
|   // Memory should be the equal for the minimum of the two sizes. |   // Memory should be the equal for the minimum of the two sizes. | ||||||
|   int size_y = std::min(frame1.allocated_size(kYPlane), |   int size_y = std::min(frame1.allocated_size(kYPlane), | ||||||
|                         frame2.allocated_size(kYPlane)); |                         frame2.allocated_size(kYPlane)); | ||||||
| @@ -264,13 +282,9 @@ bool EqualFramesExceptSize(const I420VideoFrame& frame1, | |||||||
|                         frame2.allocated_size(kUPlane)); |                         frame2.allocated_size(kUPlane)); | ||||||
|   int size_v = std::min(frame1.allocated_size(kVPlane), |   int size_v = std::min(frame1.allocated_size(kVPlane), | ||||||
|                         frame2.allocated_size(kVPlane)); |                         frame2.allocated_size(kVPlane)); | ||||||
|   int ret_val = 0; |   return (memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane), size_y) == 0 && | ||||||
|   ret_val += memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane), size_y); |           memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane), size_u) == 0 && | ||||||
|   ret_val += memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane), size_u); |           memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane), size_v) == 0); | ||||||
|   ret_val += memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane), size_v); |  | ||||||
|   if (ret_val == 0) |  | ||||||
|     return true; |  | ||||||
|   return false; |  | ||||||
| } | } | ||||||
|  |  | ||||||
| int ExpectedSize(int plane_stride, int image_height, PlaneType type) { | int ExpectedSize(int plane_stride, int image_height, PlaneType type) { | ||||||
|   | |||||||
| @@ -51,13 +51,13 @@ class I420VideoFrame { | |||||||
|   // on set dimensions - height and plane stride. |   // on set dimensions - height and plane stride. | ||||||
|   // If required size is bigger than the allocated one, new buffers of adequate |   // If required size is bigger than the allocated one, new buffers of adequate | ||||||
|   // size will be allocated. |   // size will be allocated. | ||||||
|   // Return value: 0 on success ,-1 on error. |   // Return value: 0 on success, -1 on error. | ||||||
|   virtual int CreateEmptyFrame(int width, int height, |   virtual int CreateEmptyFrame(int width, int height, | ||||||
|                                int stride_y, int stride_u, int stride_v); |                                int stride_y, int stride_u, int stride_v); | ||||||
|  |  | ||||||
|   // CreateFrame: Sets the frame's members and buffers. If required size is |   // CreateFrame: Sets the frame's members and buffers. If required size is | ||||||
|   // bigger than allocated one, new buffers of adequate size will be allocated. |   // bigger than allocated one, new buffers of adequate size will be allocated. | ||||||
|   // Return value: 0 on success ,-1 on error. |   // Return value: 0 on success, -1 on error. | ||||||
|   virtual int CreateFrame(int size_y, const uint8_t* buffer_y, |   virtual int CreateFrame(int size_y, const uint8_t* buffer_y, | ||||||
|                           int size_u, const uint8_t* buffer_u, |                           int size_u, const uint8_t* buffer_u, | ||||||
|                           int size_v, const uint8_t* buffer_v, |                           int size_v, const uint8_t* buffer_v, | ||||||
| @@ -66,9 +66,13 @@ class I420VideoFrame { | |||||||
|  |  | ||||||
|   // Copy frame: If required size is bigger than allocated one, new buffers of |   // Copy frame: If required size is bigger than allocated one, new buffers of | ||||||
|   // adequate size will be allocated. |   // adequate size will be allocated. | ||||||
|   // Return value: 0 on success ,-1 on error. |   // Return value: 0 on success, -1 on error. | ||||||
|   virtual int CopyFrame(const I420VideoFrame& videoFrame); |   virtual int CopyFrame(const I420VideoFrame& videoFrame); | ||||||
|  |  | ||||||
|  |   // Make a copy of |this|. The caller owns the returned frame. | ||||||
|  |   // Return value: a new frame on success, NULL on error. | ||||||
|  |   virtual I420VideoFrame* CloneFrame() const; | ||||||
|  |  | ||||||
|   // Swap Frame. |   // Swap Frame. | ||||||
|   virtual void SwapFrame(I420VideoFrame* videoFrame); |   virtual void SwapFrame(I420VideoFrame* videoFrame); | ||||||
|  |  | ||||||
|   | |||||||
| @@ -49,6 +49,7 @@ class TextureVideoFrame : public I420VideoFrame { | |||||||
|                           int stride_u, |                           int stride_u, | ||||||
|                           int stride_v) OVERRIDE; |                           int stride_v) OVERRIDE; | ||||||
|   virtual int CopyFrame(const I420VideoFrame& videoFrame) OVERRIDE; |   virtual int CopyFrame(const I420VideoFrame& videoFrame) OVERRIDE; | ||||||
|  |   virtual I420VideoFrame* CloneFrame() const OVERRIDE; | ||||||
|   virtual void SwapFrame(I420VideoFrame* videoFrame) OVERRIDE; |   virtual void SwapFrame(I420VideoFrame* videoFrame) OVERRIDE; | ||||||
|   virtual uint8_t* buffer(PlaneType type) OVERRIDE; |   virtual uint8_t* buffer(PlaneType type) OVERRIDE; | ||||||
|   virtual const uint8_t* buffer(PlaneType type) const OVERRIDE; |   virtual const uint8_t* buffer(PlaneType type) const OVERRIDE; | ||||||
|   | |||||||
| @@ -57,6 +57,11 @@ int TextureVideoFrame::CopyFrame(const I420VideoFrame& videoFrame) { | |||||||
|   return -1; |   return -1; | ||||||
| } | } | ||||||
|  |  | ||||||
|  | I420VideoFrame* TextureVideoFrame::CloneFrame() const { | ||||||
|  |   return new TextureVideoFrame( | ||||||
|  |       handle_, width(), height(), timestamp(), render_time_ms()); | ||||||
|  | } | ||||||
|  |  | ||||||
| void TextureVideoFrame::SwapFrame(I420VideoFrame* videoFrame) { | void TextureVideoFrame::SwapFrame(I420VideoFrame* videoFrame) { | ||||||
|   assert(false);  // Should not be called. |   assert(false);  // Should not be called. | ||||||
| } | } | ||||||
|   | |||||||
| @@ -8,9 +8,10 @@ | |||||||
|  *  be found in the AUTHORS file in the root of the source tree. |  *  be found in the AUTHORS file in the root of the source tree. | ||||||
|  */ |  */ | ||||||
|  |  | ||||||
|  | #include "webrtc/common_video/interface/texture_video_frame.h" | ||||||
|  |  | ||||||
| #include "testing/gtest/include/gtest/gtest.h" | #include "testing/gtest/include/gtest/gtest.h" | ||||||
| #include "webrtc/common_video/interface/native_handle.h" | #include "webrtc/common_video/interface/native_handle.h" | ||||||
| #include "webrtc/common_video/interface/texture_video_frame.h" |  | ||||||
|  |  | ||||||
| namespace webrtc { | namespace webrtc { | ||||||
|  |  | ||||||
| @@ -27,6 +28,9 @@ class NativeHandleImpl : public NativeHandle { | |||||||
|   int32_t ref_count_; |   int32_t ref_count_; | ||||||
| }; | }; | ||||||
|  |  | ||||||
|  | bool EqualTextureFrames(const I420VideoFrame& frame1, | ||||||
|  |                         const I420VideoFrame& frame2); | ||||||
|  |  | ||||||
| TEST(TestTextureVideoFrame, InitialValues) { | TEST(TestTextureVideoFrame, InitialValues) { | ||||||
|   NativeHandleImpl handle; |   NativeHandleImpl handle; | ||||||
|   TextureVideoFrame frame(&handle, 640, 480, 100, 10); |   TextureVideoFrame frame(&handle, 640, 480, 100, 10); | ||||||
| @@ -55,4 +59,21 @@ TEST(TestTextureVideoFrame, RefCount) { | |||||||
|   EXPECT_EQ(0, handle.ref_count()); |   EXPECT_EQ(0, handle.ref_count()); | ||||||
| } | } | ||||||
|  |  | ||||||
|  | TEST(TestTextureVideoFrame, CloneFrame) { | ||||||
|  |   NativeHandleImpl handle; | ||||||
|  |   TextureVideoFrame frame1(&handle, 640, 480, 100, 200); | ||||||
|  |   scoped_ptr<I420VideoFrame> frame2(frame1.CloneFrame()); | ||||||
|  |   EXPECT_TRUE(frame2.get() != NULL); | ||||||
|  |   EXPECT_TRUE(EqualTextureFrames(frame1, *frame2)); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | bool EqualTextureFrames(const I420VideoFrame& frame1, | ||||||
|  |                         const I420VideoFrame& frame2) { | ||||||
|  |   return ((frame1.native_handle() == frame2.native_handle()) && | ||||||
|  |           (frame1.width() == frame2.width()) && | ||||||
|  |           (frame1.height() == frame2.height()) && | ||||||
|  |           (frame1.timestamp() == frame2.timestamp()) && | ||||||
|  |           (frame1.render_time_ms() == frame2.render_time_ms())); | ||||||
|  | } | ||||||
|  |  | ||||||
| }  // namespace webrtc | }  // namespace webrtc | ||||||
|   | |||||||
							
								
								
									
										29
									
								
								webrtc/modules/utility/interface/mock/mock_process_thread.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								webrtc/modules/utility/interface/mock/mock_process_thread.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | |||||||
|  | /* | ||||||
|  |  *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | ||||||
|  |  * | ||||||
|  |  *  Use of this source code is governed by a BSD-style license | ||||||
|  |  *  that can be found in the LICENSE file in the root of the source | ||||||
|  |  *  tree. An additional intellectual property rights grant can be found | ||||||
|  |  *  in the file PATENTS.  All contributing project authors may | ||||||
|  |  *  be found in the AUTHORS file in the root of the source tree. | ||||||
|  |  */ | ||||||
|  |  | ||||||
|  | #ifndef WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_ | ||||||
|  | #define WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_ | ||||||
|  |  | ||||||
|  | #include "webrtc/modules/utility/interface/process_thread.h" | ||||||
|  |  | ||||||
|  | #include "testing/gmock/include/gmock/gmock.h" | ||||||
|  |  | ||||||
|  | namespace webrtc { | ||||||
|  |  | ||||||
|  | class MockProcessThread : public ProcessThread { | ||||||
|  |  public: | ||||||
|  |   MOCK_METHOD0(Start, int32_t()); | ||||||
|  |   MOCK_METHOD0(Stop, int32_t()); | ||||||
|  |   MOCK_METHOD1(RegisterModule, int32_t(Module* module)); | ||||||
|  |   MOCK_METHOD1(DeRegisterModule, int32_t(const Module* module)); | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | }  // namespace webrtc | ||||||
|  | #endif  // WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_ | ||||||
| @@ -38,12 +38,7 @@ VideoFramesQueue::~VideoFramesQueue() { | |||||||
|  |  | ||||||
| int32_t VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) { | int32_t VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) { | ||||||
|   if (newFrame.native_handle() != NULL) { |   if (newFrame.native_handle() != NULL) { | ||||||
|     _incomingFrames.push_back(new TextureVideoFrame( |     _incomingFrames.push_back(newFrame.CloneFrame()); | ||||||
|         static_cast<NativeHandle*>(newFrame.native_handle()), |  | ||||||
|         newFrame.width(), |  | ||||||
|         newFrame.height(), |  | ||||||
|         newFrame.timestamp(), |  | ||||||
|         newFrame.render_time_ms())); |  | ||||||
|     return 0; |     return 0; | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -0,0 +1,50 @@ | |||||||
|  | /* | ||||||
|  |  *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | ||||||
|  |  * | ||||||
|  |  *  Use of this source code is governed by a BSD-style license | ||||||
|  |  *  that can be found in the LICENSE file in the root of the source | ||||||
|  |  *  tree. An additional intellectual property rights grant can be found | ||||||
|  |  *  in the file PATENTS.  All contributing project authors may | ||||||
|  |  *  be found in the AUTHORS file in the root of the source tree. | ||||||
|  |  */ | ||||||
|  | #ifndef WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_MOCK_MOCK_VIDEO_CAPTURE_H_ | ||||||
|  | #define WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_MOCK_MOCK_VIDEO_CAPTURE_H_ | ||||||
|  |  | ||||||
|  | #include "webrtc/modules/video_capture/include/video_capture.h" | ||||||
|  | #include "testing/gmock/include/gmock/gmock.h" | ||||||
|  |  | ||||||
|  | namespace webrtc { | ||||||
|  |  | ||||||
|  | class MockVideoCaptureModule : public VideoCaptureModule { | ||||||
|  |  public: | ||||||
|  |   // from Module | ||||||
|  |   MOCK_METHOD0(TimeUntilNextProcess, int32_t()); | ||||||
|  |   MOCK_METHOD0(Process, int32_t()); | ||||||
|  |  | ||||||
|  |   // from RefCountedModule | ||||||
|  |   MOCK_METHOD0(AddRef, int32_t()); | ||||||
|  |   MOCK_METHOD0(Release, int32_t()); | ||||||
|  |  | ||||||
|  |   // from VideoCaptureModule | ||||||
|  |   MOCK_METHOD1(RegisterCaptureDataCallback, | ||||||
|  |       void(VideoCaptureDataCallback& dataCallback)); | ||||||
|  |   MOCK_METHOD0(DeRegisterCaptureDataCallback, void()); | ||||||
|  |   MOCK_METHOD1(RegisterCaptureCallback, void(VideoCaptureFeedBack& callBack)); | ||||||
|  |   MOCK_METHOD0(DeRegisterCaptureCallback, void()); | ||||||
|  |   MOCK_METHOD1(StartCapture, int32_t(const VideoCaptureCapability& capability)); | ||||||
|  |   MOCK_METHOD0(StopCapture, int32_t()); | ||||||
|  |   MOCK_CONST_METHOD0(CurrentDeviceName, const char*()); | ||||||
|  |   MOCK_METHOD0(CaptureStarted, bool()); | ||||||
|  |   MOCK_METHOD1(CaptureSettings, int32_t(VideoCaptureCapability& settings)); | ||||||
|  |   MOCK_METHOD1(SetCaptureDelay, void(int32_t delayMS)); | ||||||
|  |   MOCK_METHOD0(CaptureDelay, int32_t()); | ||||||
|  |   MOCK_METHOD1(SetCaptureRotation, int32_t(VideoCaptureRotation rotation)); | ||||||
|  |   MOCK_METHOD1(GetEncodeInterface, | ||||||
|  |                VideoCaptureEncodeInterface*(const VideoCodec& codec)); | ||||||
|  |   MOCK_METHOD1(EnableFrameRateCallback, void(const bool enable)); | ||||||
|  |   MOCK_METHOD1(EnableNoPictureAlarm, void(const bool enable)); | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | }  // namespace webrtc | ||||||
|  |  | ||||||
|  | #endif  // WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_MOCK_MOCK_VIDEO_CAPTURE_H_ | ||||||
| @@ -60,6 +60,7 @@ | |||||||
|               'link_settings': { |               'link_settings': { | ||||||
|                 'xcode_settings': { |                 'xcode_settings': { | ||||||
|                   'OTHER_LDFLAGS': [ |                   'OTHER_LDFLAGS': [ | ||||||
|  |                     '-framework CoreVideo', | ||||||
|                     '-framework QTKit', |                     '-framework QTKit', | ||||||
|                   ], |                   ], | ||||||
|                 }, |                 }, | ||||||
|   | |||||||
| @@ -55,12 +55,7 @@ int32_t VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) { | |||||||
|   } |   } | ||||||
|  |  | ||||||
|   if (new_frame->native_handle() != NULL) { |   if (new_frame->native_handle() != NULL) { | ||||||
|     incoming_frames_.push_back(new TextureVideoFrame( |     incoming_frames_.push_back(new_frame->CloneFrame()); | ||||||
|         static_cast<NativeHandle*>(new_frame->native_handle()), |  | ||||||
|         new_frame->width(), |  | ||||||
|         new_frame->height(), |  | ||||||
|         new_frame->timestamp(), |  | ||||||
|         new_frame->render_time_ms())); |  | ||||||
|     return static_cast<int32_t>(incoming_frames_.size()); |     return static_cast<int32_t>(incoming_frames_.size()); | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -17,26 +17,17 @@ | |||||||
|  |  | ||||||
| #include "webrtc/common.h" | #include "webrtc/common.h" | ||||||
| #include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h" | #include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h" | ||||||
| #include "webrtc/modules/utility/interface/process_thread.h" | #include "webrtc/modules/utility/interface/mock/mock_process_thread.h" | ||||||
| #include "webrtc/system_wrappers/interface/scoped_ptr.h" | #include "webrtc/system_wrappers/interface/scoped_ptr.h" | ||||||
| #include "webrtc/video_engine/vie_encoder.h" | #include "webrtc/video_engine/vie_encoder.h" | ||||||
|  |  | ||||||
| namespace webrtc { | using ::testing::NiceMock; | ||||||
|  |  | ||||||
| // TODO(mflodman) Create a common mock in module utility. | namespace webrtc { | ||||||
| class TestProcessThread : public ProcessThread { |  | ||||||
|  public: |  | ||||||
|   TestProcessThread() {} |  | ||||||
|   ~TestProcessThread() {} |  | ||||||
|   virtual int32_t Start() { return 0; } |  | ||||||
|   virtual int32_t Stop() { return 0; } |  | ||||||
|   virtual int32_t RegisterModule(Module* module) { return 0; } |  | ||||||
|   virtual int32_t DeRegisterModule(const Module* module) { return 0; } |  | ||||||
| }; |  | ||||||
|  |  | ||||||
| class MockVieEncoder : public ViEEncoder { | class MockVieEncoder : public ViEEncoder { | ||||||
|  public: |  public: | ||||||
|   explicit MockVieEncoder(TestProcessThread* process_thread) |   explicit MockVieEncoder(ProcessThread* process_thread) | ||||||
|       : ViEEncoder(1, 1, 1, config_, *process_thread, NULL) {} |       : ViEEncoder(1, 1, 1, config_, *process_thread, NULL) {} | ||||||
|   ~MockVieEncoder() {} |   ~MockVieEncoder() {} | ||||||
|  |  | ||||||
| @@ -55,10 +46,10 @@ class MockVieEncoder : public ViEEncoder { | |||||||
| class VieKeyRequestTest : public ::testing::Test { | class VieKeyRequestTest : public ::testing::Test { | ||||||
|  protected: |  protected: | ||||||
|   virtual void SetUp() { |   virtual void SetUp() { | ||||||
|     process_thread_.reset(new TestProcessThread()); |     process_thread_.reset(new NiceMock<MockProcessThread>); | ||||||
|     encoder_state_feedback_.reset(new EncoderStateFeedback()); |     encoder_state_feedback_.reset(new EncoderStateFeedback()); | ||||||
|   } |   } | ||||||
|   scoped_ptr<TestProcessThread> process_thread_; |   scoped_ptr<MockProcessThread> process_thread_; | ||||||
|   scoped_ptr<EncoderStateFeedback> encoder_state_feedback_; |   scoped_ptr<EncoderStateFeedback> encoder_state_feedback_; | ||||||
| }; | }; | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										33
									
								
								webrtc/video_engine/mock/mock_vie_frame_provider_base.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										33
									
								
								webrtc/video_engine/mock/mock_vie_frame_provider_base.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,33 @@ | |||||||
|  | /* | ||||||
|  |  *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | ||||||
|  |  * | ||||||
|  |  *  Use of this source code is governed by a BSD-style license | ||||||
|  |  *  that can be found in the LICENSE file in the root of the source | ||||||
|  |  *  tree. An additional intellectual property rights grant can be found | ||||||
|  |  *  in the file PATENTS.  All contributing project authors may | ||||||
|  |  *  be found in the AUTHORS file in the root of the source tree. | ||||||
|  |  */ | ||||||
|  | #ifndef WEBRTC_VIDEO_ENGINE_MOCK_MOCK_VIE_FRAME_PROVIDER_BASE_H_ | ||||||
|  | #define WEBRTC_VIDEO_ENGINE_MOCK_MOCK_VIE_FRAME_PROVIDER_BASE_H_ | ||||||
|  |  | ||||||
|  | #include "webrtc/video_engine/vie_frame_provider_base.h" | ||||||
|  | #include "testing/gmock/include/gmock/gmock.h" | ||||||
|  |  | ||||||
|  | namespace webrtc { | ||||||
|  |  | ||||||
|  | class MockViEFrameCallback : public ViEFrameCallback { | ||||||
|  |  public: | ||||||
|  |   MOCK_METHOD4(DeliverFrame, | ||||||
|  |                void(int id, | ||||||
|  |                     I420VideoFrame* video_frame, | ||||||
|  |                     int num_csrcs, | ||||||
|  |                     const uint32_t CSRC[kRtpCsrcSize])); | ||||||
|  |   MOCK_METHOD2(DelayChanged, void(int id, int frame_delay)); | ||||||
|  |   MOCK_METHOD3(GetPreferedFrameSettings, | ||||||
|  |                int(int* width, int* height, int* frame_rate)); | ||||||
|  |   MOCK_METHOD1(ProviderDestroyed, void(int id)); | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | }  // namespace webrtc | ||||||
|  |  | ||||||
|  | #endif  // WEBRTC_VIDEO_ENGINE_MOCK_MOCK_VIE_FRAME_PROVIDER_BASE_H_ | ||||||
| @@ -131,6 +131,7 @@ | |||||||
|             'encoder_state_feedback_unittest.cc', |             'encoder_state_feedback_unittest.cc', | ||||||
|             'overuse_frame_detector_unittest.cc', |             'overuse_frame_detector_unittest.cc', | ||||||
|             'stream_synchronization_unittest.cc', |             'stream_synchronization_unittest.cc', | ||||||
|  |             'vie_capturer_unittest.cc', | ||||||
|             'vie_codec_unittest.cc', |             'vie_codec_unittest.cc', | ||||||
|             'vie_remb_unittest.cc', |             'vie_remb_unittest.cc', | ||||||
|           ], |           ], | ||||||
|   | |||||||
| @@ -10,6 +10,7 @@ | |||||||
|  |  | ||||||
| #include "webrtc/video_engine/vie_capturer.h" | #include "webrtc/video_engine/vie_capturer.h" | ||||||
|  |  | ||||||
|  | #include "webrtc/common_video/interface/texture_video_frame.h" | ||||||
| #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | ||||||
| #include "webrtc/modules/interface/module_common_types.h" | #include "webrtc/modules/interface/module_common_types.h" | ||||||
| #include "webrtc/modules/utility/interface/process_thread.h" | #include "webrtc/modules/utility/interface/process_thread.h" | ||||||
| @@ -346,11 +347,16 @@ void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id, | |||||||
|   TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(), |   TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(), | ||||||
|                            "render_time", video_frame.render_time_ms()); |                            "render_time", video_frame.render_time_ms()); | ||||||
|  |  | ||||||
|   captured_frame_.SwapFrame(&video_frame); |   if (video_frame.native_handle() != NULL) { | ||||||
|  |     captured_frame_.reset(video_frame.CloneFrame()); | ||||||
|  |   } else { | ||||||
|  |     if (captured_frame_ == NULL || captured_frame_->native_handle() != NULL) | ||||||
|  |       captured_frame_.reset(new I420VideoFrame()); | ||||||
|  |     captured_frame_->SwapFrame(&video_frame); | ||||||
|  |   } | ||||||
|   capture_event_.Set(); |   capture_event_.Set(); | ||||||
|   overuse_detector_->FrameCaptured(captured_frame_.width(), |   overuse_detector_->FrameCaptured(captured_frame_->width(), | ||||||
|                                    captured_frame_.height()); |                                    captured_frame_->height()); | ||||||
|   return; |  | ||||||
| } | } | ||||||
|  |  | ||||||
| void ViECapturer::OnCaptureDelayChanged(const int32_t id, | void ViECapturer::OnCaptureDelayChanged(const int32_t id, | ||||||
| @@ -473,7 +479,9 @@ bool ViECapturer::ViECaptureProcess() { | |||||||
|     deliver_cs_->Enter(); |     deliver_cs_->Enter(); | ||||||
|     if (SwapCapturedAndDeliverFrameIfAvailable()) { |     if (SwapCapturedAndDeliverFrameIfAvailable()) { | ||||||
|       encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds(); |       encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds(); | ||||||
|       DeliverI420Frame(&deliver_frame_); |       DeliverI420Frame(deliver_frame_.get()); | ||||||
|  |       if (deliver_frame_->native_handle() != NULL) | ||||||
|  |         deliver_frame_.reset();  // Release the texture so it can be reused. | ||||||
|     } |     } | ||||||
|     deliver_cs_->Leave(); |     deliver_cs_->Leave(); | ||||||
|     if (current_brightness_level_ != reported_brightness_level_) { |     if (current_brightness_level_ != reported_brightness_level_) { | ||||||
| @@ -494,6 +502,11 @@ bool ViECapturer::ViECaptureProcess() { | |||||||
| } | } | ||||||
|  |  | ||||||
| void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) { | void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) { | ||||||
|  |   if (video_frame->native_handle() != NULL) { | ||||||
|  |     ViEFrameProviderBase::DeliverFrame(video_frame); | ||||||
|  |     return; | ||||||
|  |   } | ||||||
|  |  | ||||||
|   // Apply image enhancement and effect filter. |   // Apply image enhancement and effect filter. | ||||||
|   if (deflicker_frame_stats_) { |   if (deflicker_frame_stats_) { | ||||||
|     if (image_proc_module_->GetFrameStats(deflicker_frame_stats_, |     if (image_proc_module_->GetFrameStats(deflicker_frame_stats_, | ||||||
| @@ -608,11 +621,21 @@ void ViECapturer::OnNoPictureAlarm(const int32_t id, | |||||||
|  |  | ||||||
| bool ViECapturer::SwapCapturedAndDeliverFrameIfAvailable() { | bool ViECapturer::SwapCapturedAndDeliverFrameIfAvailable() { | ||||||
|   CriticalSectionScoped cs(capture_cs_.get()); |   CriticalSectionScoped cs(capture_cs_.get()); | ||||||
|   if (captured_frame_.IsZeroSize()) |   if (captured_frame_ == NULL) | ||||||
|     return false; |     return false; | ||||||
|  |  | ||||||
|   deliver_frame_.SwapFrame(&captured_frame_); |   if (captured_frame_->native_handle() != NULL) { | ||||||
|   captured_frame_.ResetSize(); |     deliver_frame_.reset(captured_frame_.release()); | ||||||
|  |     return true; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   if (captured_frame_->IsZeroSize()) | ||||||
|  |     return false; | ||||||
|  |  | ||||||
|  |   if (deliver_frame_ == NULL) | ||||||
|  |     deliver_frame_.reset(new I420VideoFrame()); | ||||||
|  |   deliver_frame_->SwapFrame(captured_frame_.get()); | ||||||
|  |   captured_frame_->ResetSize(); | ||||||
|   return true; |   return true; | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -169,8 +169,8 @@ class ViECapturer | |||||||
|   EventWrapper& capture_event_; |   EventWrapper& capture_event_; | ||||||
|   EventWrapper& deliver_event_; |   EventWrapper& deliver_event_; | ||||||
|  |  | ||||||
|   I420VideoFrame captured_frame_; |   scoped_ptr<I420VideoFrame> captured_frame_; | ||||||
|   I420VideoFrame deliver_frame_; |   scoped_ptr<I420VideoFrame> deliver_frame_; | ||||||
|  |  | ||||||
|   // Image processing. |   // Image processing. | ||||||
|   ViEEffectFilter* effect_filter_; |   ViEEffectFilter* effect_filter_; | ||||||
|   | |||||||
							
								
								
									
										263
									
								
								webrtc/video_engine/vie_capturer_unittest.cc
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										263
									
								
								webrtc/video_engine/vie_capturer_unittest.cc
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,263 @@ | |||||||
|  | /* | ||||||
|  |  *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | ||||||
|  |  * | ||||||
|  |  *  Use of this source code is governed by a BSD-style license | ||||||
|  |  *  that can be found in the LICENSE file in the root of the source | ||||||
|  |  *  tree. An additional intellectual property rights grant can be found | ||||||
|  |  *  in the file PATENTS.  All contributing project authors may | ||||||
|  |  *  be found in the AUTHORS file in the root of the source tree. | ||||||
|  |  */ | ||||||
|  |  | ||||||
|  | // This file includes unit tests for ViECapturer. | ||||||
|  |  | ||||||
|  | #include "webrtc/video_engine/vie_capturer.h" | ||||||
|  |  | ||||||
|  | #include <vector> | ||||||
|  |  | ||||||
|  | #include "testing/gmock/include/gmock/gmock.h" | ||||||
|  | #include "testing/gtest/include/gtest/gtest.h" | ||||||
|  | #include "webrtc/common.h" | ||||||
|  | #include "webrtc/common_video/interface/native_handle.h" | ||||||
|  | #include "webrtc/common_video/interface/texture_video_frame.h" | ||||||
|  | #include "webrtc/modules/utility/interface/mock/mock_process_thread.h" | ||||||
|  | #include "webrtc/modules/video_capture/include/mock/mock_video_capture.h" | ||||||
|  | #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" | ||||||
|  | #include "webrtc/system_wrappers/interface/event_wrapper.h" | ||||||
|  | #include "webrtc/system_wrappers/interface/ref_count.h" | ||||||
|  | #include "webrtc/system_wrappers/interface/scoped_ptr.h" | ||||||
|  | #include "webrtc/system_wrappers/interface/scoped_vector.h" | ||||||
|  | #include "webrtc/video_engine/mock/mock_vie_frame_provider_base.h" | ||||||
|  |  | ||||||
|  | using ::testing::_; | ||||||
|  | using ::testing::Invoke; | ||||||
|  | using ::testing::NiceMock; | ||||||
|  | using ::testing::Return; | ||||||
|  | using ::testing::WithArg; | ||||||
|  |  | ||||||
|  | // If an output frame does not arrive in 500ms, the test will fail. | ||||||
|  | #define FRAME_TIMEOUT_MS 500 | ||||||
|  |  | ||||||
|  | namespace webrtc { | ||||||
|  |  | ||||||
|  | bool EqualFrames(const I420VideoFrame& frame1, | ||||||
|  |                  const I420VideoFrame& frame2); | ||||||
|  | bool EqualTextureFrames(const I420VideoFrame& frame1, | ||||||
|  |                         const I420VideoFrame& frame2); | ||||||
|  | bool EqualBufferFrames(const I420VideoFrame& frame1, | ||||||
|  |                        const I420VideoFrame& frame2); | ||||||
|  | bool EqualFramesVector(const ScopedVector<I420VideoFrame>& frames1, | ||||||
|  |                        const ScopedVector<I420VideoFrame>& frames2); | ||||||
|  | I420VideoFrame* CreateI420VideoFrame(uint8_t length); | ||||||
|  |  | ||||||
|  | class FakeNativeHandle : public NativeHandle { | ||||||
|  |  public: | ||||||
|  |   FakeNativeHandle() {} | ||||||
|  |   virtual ~FakeNativeHandle() {} | ||||||
|  |   virtual void* GetHandle() { return NULL; } | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | class ViECapturerTest : public ::testing::Test { | ||||||
|  |  protected: | ||||||
|  |   ViECapturerTest() | ||||||
|  |       : mock_capture_module_(new NiceMock<MockVideoCaptureModule>()), | ||||||
|  |         mock_process_thread_(new NiceMock<MockProcessThread>), | ||||||
|  |         mock_frame_callback_(new NiceMock<MockViEFrameCallback>), | ||||||
|  |         data_callback_(NULL), | ||||||
|  |         output_frame_event_(EventWrapper::Create()) { | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   virtual void SetUp() { | ||||||
|  |     EXPECT_CALL(*mock_capture_module_, RegisterCaptureDataCallback(_)) | ||||||
|  |         .WillRepeatedly(Invoke(this, &ViECapturerTest::SetCaptureDataCallback)); | ||||||
|  |     EXPECT_CALL(*mock_frame_callback_, DeliverFrame(_, _, _, _)) | ||||||
|  |         .WillRepeatedly( | ||||||
|  |             WithArg<1>(Invoke(this, &ViECapturerTest::AddOutputFrame))); | ||||||
|  |  | ||||||
|  |     Config config; | ||||||
|  |     vie_capturer_.reset( | ||||||
|  |         ViECapturer::CreateViECapture( | ||||||
|  |             0, 0, config, mock_capture_module_.get(), *mock_process_thread_)); | ||||||
|  |     vie_capturer_->RegisterFrameCallback(0, mock_frame_callback_.get()); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   virtual void TearDown() { | ||||||
|  |     // ViECapturer accesses |mock_process_thread_| in destructor and should | ||||||
|  |     // be deleted first. | ||||||
|  |     vie_capturer_.reset(); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   void SetCaptureDataCallback(VideoCaptureDataCallback& data_callback) { | ||||||
|  |     data_callback_ = &data_callback; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   void AddInputFrame(I420VideoFrame* frame) { | ||||||
|  |     data_callback_->OnIncomingCapturedFrame(0, *frame); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   void AddOutputFrame(I420VideoFrame* frame) { | ||||||
|  |     if (frame->native_handle() == NULL) | ||||||
|  |       output_frame_ybuffers_.push_back(frame->buffer(kYPlane)); | ||||||
|  |     // Clone the frames because ViECapturer owns the frames. | ||||||
|  |     output_frames_.push_back(frame->CloneFrame()); | ||||||
|  |     output_frame_event_->Set(); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   void WaitOutputFrame() { | ||||||
|  |     EXPECT_EQ(kEventSignaled, output_frame_event_->Wait(FRAME_TIMEOUT_MS)); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   scoped_ptr<MockVideoCaptureModule> mock_capture_module_; | ||||||
|  |   scoped_ptr<MockProcessThread> mock_process_thread_; | ||||||
|  |   scoped_ptr<MockViEFrameCallback> mock_frame_callback_; | ||||||
|  |  | ||||||
|  |   // Used to send input capture frames to ViECapturer. | ||||||
|  |   VideoCaptureDataCallback* data_callback_; | ||||||
|  |  | ||||||
|  |   scoped_ptr<ViECapturer> vie_capturer_; | ||||||
|  |  | ||||||
|  |   // Input capture frames of ViECapturer. | ||||||
|  |   ScopedVector<I420VideoFrame> input_frames_; | ||||||
|  |  | ||||||
|  |   // Indicate an output frame has arrived. | ||||||
|  |   scoped_ptr<EventWrapper> output_frame_event_; | ||||||
|  |  | ||||||
|  |   // Output delivered frames of ViECaptuer. | ||||||
|  |   ScopedVector<I420VideoFrame> output_frames_; | ||||||
|  |  | ||||||
|  |   // The pointers of Y plane buffers of output frames. This is used to verify | ||||||
|  |   // the frame are swapped and not copied. | ||||||
|  |   std::vector<uint8_t*> output_frame_ybuffers_; | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | TEST_F(ViECapturerTest, TestTextureFrames) { | ||||||
|  |   const int kNumFrame = 3; | ||||||
|  |   for (int i = 0 ; i < kNumFrame; ++i) { | ||||||
|  |     webrtc::RefCountImpl<FakeNativeHandle>* handle = | ||||||
|  |               new webrtc::RefCountImpl<FakeNativeHandle>(); | ||||||
|  |     input_frames_.push_back(new TextureVideoFrame(handle, i, i, i, i)); | ||||||
|  |     AddInputFrame(input_frames_[i]); | ||||||
|  |     WaitOutputFrame(); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | TEST_F(ViECapturerTest, TestI420Frames) { | ||||||
|  |   const int kNumFrame = 4; | ||||||
|  |   ScopedVector<I420VideoFrame> copied_input_frames; | ||||||
|  |   std::vector<uint8_t*> ybuffer_pointers; | ||||||
|  |   for (int i = 0; i < kNumFrame; ++i) { | ||||||
|  |     input_frames_.push_back(CreateI420VideoFrame(static_cast<uint8_t>(i + 1))); | ||||||
|  |     ybuffer_pointers.push_back(input_frames_[i]->buffer(kYPlane)); | ||||||
|  |     // Copy input frames because the buffer data will be swapped. | ||||||
|  |     copied_input_frames.push_back(input_frames_[i]->CloneFrame()); | ||||||
|  |     AddInputFrame(input_frames_[i]); | ||||||
|  |     WaitOutputFrame(); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   EXPECT_TRUE(EqualFramesVector(copied_input_frames, output_frames_)); | ||||||
|  |   // Make sure the buffer is swapped and not copied. | ||||||
|  |   for (int i = 0; i < kNumFrame; ++i) | ||||||
|  |     EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]); | ||||||
|  |   // The pipeline should be filled with frames with allocated buffers. Check | ||||||
|  |   // the last input frame has the same allocated size after swapping. | ||||||
|  |   EXPECT_EQ(input_frames_.back()->allocated_size(kYPlane), | ||||||
|  |             copied_input_frames.back()->allocated_size(kYPlane)); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | TEST_F(ViECapturerTest, TestI420FrameAfterTextureFrame) { | ||||||
|  |   webrtc::RefCountImpl<FakeNativeHandle>* handle = | ||||||
|  |       new webrtc::RefCountImpl<FakeNativeHandle>(); | ||||||
|  |   input_frames_.push_back(new TextureVideoFrame(handle, 1, 1, 1, 1)); | ||||||
|  |   AddInputFrame(input_frames_[0]); | ||||||
|  |   WaitOutputFrame(); | ||||||
|  |  | ||||||
|  |   input_frames_.push_back(CreateI420VideoFrame(1)); | ||||||
|  |   scoped_ptr<I420VideoFrame> copied_input_frame(input_frames_[1]->CloneFrame()); | ||||||
|  |   AddInputFrame(copied_input_frame.get()); | ||||||
|  |   WaitOutputFrame(); | ||||||
|  |  | ||||||
|  |   EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | TEST_F(ViECapturerTest, TestTextureFrameAfterI420Frame) { | ||||||
|  |   input_frames_.push_back(CreateI420VideoFrame(1)); | ||||||
|  |   scoped_ptr<I420VideoFrame> copied_input_frame(input_frames_[0]->CloneFrame()); | ||||||
|  |   AddInputFrame(copied_input_frame.get()); | ||||||
|  |   WaitOutputFrame(); | ||||||
|  |  | ||||||
|  |   webrtc::RefCountImpl<FakeNativeHandle>* handle = | ||||||
|  |       new webrtc::RefCountImpl<FakeNativeHandle>(); | ||||||
|  |   input_frames_.push_back(new TextureVideoFrame(handle, 1, 1, 1, 1)); | ||||||
|  |   AddInputFrame(input_frames_[1]); | ||||||
|  |   WaitOutputFrame(); | ||||||
|  |  | ||||||
|  |   EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | bool EqualFrames(const I420VideoFrame& frame1, | ||||||
|  |                  const I420VideoFrame& frame2) { | ||||||
|  |   if (frame1.native_handle() != NULL || frame2.native_handle() != NULL) | ||||||
|  |     return EqualTextureFrames(frame1, frame2); | ||||||
|  |   return EqualBufferFrames(frame1, frame2); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | bool EqualTextureFrames(const I420VideoFrame& frame1, | ||||||
|  |                         const I420VideoFrame& frame2) { | ||||||
|  |   return ((frame1.native_handle() == frame2.native_handle()) && | ||||||
|  |           (frame1.width() == frame2.width()) && | ||||||
|  |           (frame1.height() == frame2.height()) && | ||||||
|  |           (frame1.timestamp() == frame2.timestamp()) && | ||||||
|  |           (frame1.render_time_ms() == frame2.render_time_ms())); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | bool EqualBufferFrames(const I420VideoFrame& frame1, | ||||||
|  |                        const I420VideoFrame& frame2) { | ||||||
|  |   return ((frame1.width() == frame2.width()) && | ||||||
|  |           (frame1.height() == frame2.height()) && | ||||||
|  |           (frame1.stride(kYPlane) == frame2.stride(kYPlane)) && | ||||||
|  |           (frame1.stride(kUPlane) == frame2.stride(kUPlane)) && | ||||||
|  |           (frame1.stride(kVPlane) == frame2.stride(kVPlane)) && | ||||||
|  |           (frame1.timestamp() == frame2.timestamp()) && | ||||||
|  |           (frame1.ntp_time_ms() == frame2.ntp_time_ms()) && | ||||||
|  |           (frame1.render_time_ms() == frame2.render_time_ms()) && | ||||||
|  |           (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) && | ||||||
|  |           (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) && | ||||||
|  |           (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)) && | ||||||
|  |           (memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane), | ||||||
|  |                   frame1.allocated_size(kYPlane)) == 0) && | ||||||
|  |           (memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane), | ||||||
|  |                   frame1.allocated_size(kUPlane)) == 0) && | ||||||
|  |           (memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane), | ||||||
|  |                   frame1.allocated_size(kVPlane)) == 0)); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | bool EqualFramesVector(const ScopedVector<I420VideoFrame>& frames1, | ||||||
|  |                        const ScopedVector<I420VideoFrame>& frames2) { | ||||||
|  |   if (frames1.size() != frames2.size()) | ||||||
|  |     return false; | ||||||
|  |   for (size_t i = 0; i < frames1.size(); ++i) { | ||||||
|  |     if (!EqualFrames(*frames1[i], *frames2[i])) | ||||||
|  |       return false; | ||||||
|  |   } | ||||||
|  |   return true; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | I420VideoFrame* CreateI420VideoFrame(uint8_t data) { | ||||||
|  |   I420VideoFrame* frame = new I420VideoFrame(); | ||||||
|  |   const int width = 36; | ||||||
|  |   const int height = 24; | ||||||
|  |   const int kSizeY = width * height * 2; | ||||||
|  |   const int kSizeUV = width * height; | ||||||
|  |   uint8_t buffer[kSizeY]; | ||||||
|  |   memset(buffer, data, kSizeY); | ||||||
|  |   frame->CreateFrame( | ||||||
|  |       kSizeY, buffer, kSizeUV, buffer, kSizeUV, buffer, width, height, width, | ||||||
|  |       width / 2, width / 2); | ||||||
|  |   frame->set_timestamp(data); | ||||||
|  |   frame->set_ntp_time_ms(data); | ||||||
|  |   frame->set_render_time_ms(data); | ||||||
|  |   return frame; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | }  // namespace webrtc | ||||||
| @@ -487,6 +487,10 @@ void ViEEncoder::DeliverFrame(int id, | |||||||
|     } |     } | ||||||
|     encoder_paused_and_dropped_frame_ = false; |     encoder_paused_and_dropped_frame_ = false; | ||||||
|   } |   } | ||||||
|  |   if (video_frame->native_handle() != NULL) { | ||||||
|  |     // TODO(wuchengli): add texture support. http://crbug.com/362437 | ||||||
|  |     return; | ||||||
|  |   } | ||||||
|  |  | ||||||
|   // Convert render time, in ms, to RTP timestamp. |   // Convert render time, in ms, to RTP timestamp. | ||||||
|   const int kMsToRtpTimestamp = 90; |   const int kMsToRtpTimestamp = 90; | ||||||
|   | |||||||
| @@ -18,35 +18,26 @@ | |||||||
|  |  | ||||||
| #include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h" | #include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h" | ||||||
| #include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h" | #include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h" | ||||||
| #include "webrtc/modules/utility/interface/process_thread.h" | #include "webrtc/modules/utility/interface/mock/mock_process_thread.h" | ||||||
| #include "webrtc/system_wrappers/interface/scoped_ptr.h" | #include "webrtc/system_wrappers/interface/scoped_ptr.h" | ||||||
| #include "webrtc/system_wrappers/interface/tick_util.h" | #include "webrtc/system_wrappers/interface/tick_util.h" | ||||||
| #include "webrtc/video_engine/vie_remb.h" | #include "webrtc/video_engine/vie_remb.h" | ||||||
|  |  | ||||||
| using ::testing::_; | using ::testing::_; | ||||||
| using ::testing::AnyNumber; | using ::testing::AnyNumber; | ||||||
|  | using ::testing::NiceMock; | ||||||
| using ::testing::Return; | using ::testing::Return; | ||||||
|  |  | ||||||
| namespace webrtc { | namespace webrtc { | ||||||
|  |  | ||||||
| class TestProcessThread : public ProcessThread { |  | ||||||
|  public: |  | ||||||
|   explicit TestProcessThread() {} |  | ||||||
|   ~TestProcessThread() {} |  | ||||||
|   virtual int32_t Start() { return 0; } |  | ||||||
|   virtual int32_t Stop() { return 0; } |  | ||||||
|   virtual int32_t RegisterModule(Module* module) { return 0; } |  | ||||||
|   virtual int32_t DeRegisterModule(const Module* module) { return 0; } |  | ||||||
| }; |  | ||||||
|  |  | ||||||
| class ViERembTest : public ::testing::Test { | class ViERembTest : public ::testing::Test { | ||||||
|  protected: |  protected: | ||||||
|   virtual void SetUp() { |   virtual void SetUp() { | ||||||
|     TickTime::UseFakeClock(12345); |     TickTime::UseFakeClock(12345); | ||||||
|     process_thread_.reset(new TestProcessThread); |     process_thread_.reset(new NiceMock<MockProcessThread>); | ||||||
|     vie_remb_.reset(new VieRemb()); |     vie_remb_.reset(new VieRemb()); | ||||||
|   } |   } | ||||||
|   scoped_ptr<TestProcessThread> process_thread_; |   scoped_ptr<MockProcessThread> process_thread_; | ||||||
|   scoped_ptr<VieRemb> vie_remb_; |   scoped_ptr<VieRemb> vie_remb_; | ||||||
| }; | }; | ||||||
|  |  | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user
	 wuchengli@chromium.org
					wuchengli@chromium.org