From d7452a016812ab1de69c3d7a53caca5b06c64990 Mon Sep 17 00:00:00 2001 From: "magjed@webrtc.org" Date: Tue, 10 Mar 2015 15:12:26 +0000 Subject: [PATCH] Revert "Make the entry point for VideoFrames to webrtc const ref I420VideoFrame." This reverts commit r8633. Reason for revert: Performance regressions in browser_tests_new_vie and webrtc_perf_tests. BUG=1128,chromium:465287,chromium:465306 TBR=pbos,mflodman,perkj Review URL: https://webrtc-codereview.appspot.com/46549004 Cr-Commit-Position: refs/heads/master@{#8670} git-svn-id: http://webrtc.googlecode.com/svn/trunk@8670 4adac7df-926f-26a2-2b94-8c16560cd09d --- talk/media/webrtc/webrtcvideocapturer.cc | 7 +- talk/media/webrtc/webrtcvideocapturer.h | 4 +- talk/media/webrtc/webrtcvideoengine2.cc | 6 +- .../webrtc/webrtcvideoengine2_unittest.cc | 5 +- .../webrtc/webrtcvideoengine2_unittest.h | 2 +- webrtc/common_video/i420_video_frame.cc | 8 -- .../common_video/i420_video_frame_unittest.cc | 57 -------------- .../include/video_capture_defines.h | 5 +- .../test/video_capture_unittest.cc | 75 ++++++++++++++++++- .../video_capture/video_capture_impl.cc | 34 ++++++++- .../video_capture/video_capture_impl.h | 14 +++- webrtc/test/frame_generator_capturer.cc | 6 +- webrtc/test/vcm_capturer.cc | 4 +- webrtc/test/vcm_capturer.h | 2 +- webrtc/video/end_to_end_tests.cc | 8 +- webrtc/video/full_stack.cc | 8 +- webrtc/video/send_statistics_proxy.cc | 13 ++-- webrtc/video/send_statistics_proxy.h | 15 +++- .../video/send_statistics_proxy_unittest.cc | 10 +++ webrtc/video/video_send_stream.cc | 8 +- webrtc/video/video_send_stream.h | 2 +- webrtc/video/video_send_stream_tests.cc | 43 +++++++++-- webrtc/video_engine/include/vie_capture.h | 2 +- webrtc/video_engine/vie_capturer.cc | 72 +++++++----------- webrtc/video_engine/vie_capturer.h | 24 +++--- webrtc/video_engine/vie_capturer_unittest.cc | 50 +++++-------- webrtc/video_engine/vie_encoder.cc | 8 ++ webrtc/video_frame.h | 8 +- webrtc/video_send_stream.h | 2 +- 29 files changed, 281 insertions(+), 221 deletions(-) diff --git a/talk/media/webrtc/webrtcvideocapturer.cc b/talk/media/webrtc/webrtcvideocapturer.cc index 0e8f56bfd..aaa6f1e48 100644 --- a/talk/media/webrtc/webrtcvideocapturer.cc +++ b/talk/media/webrtc/webrtcvideocapturer.cc @@ -353,9 +353,8 @@ bool WebRtcVideoCapturer::GetPreferredFourccs( return true; } -void WebRtcVideoCapturer::OnIncomingCapturedFrame( - const int32_t id, - const webrtc::I420VideoFrame& sample) { +void WebRtcVideoCapturer::OnIncomingCapturedFrame(const int32_t id, + webrtc::I420VideoFrame& sample) { // This would be a normal CritScope, except that it's possible that: // (1) whatever system component producing this frame has taken a lock, and // (2) Stop() probably calls back into that system component, which may take @@ -396,7 +395,7 @@ void WebRtcVideoCapturer::OnCaptureDelayChanged(const int32_t id, } void WebRtcVideoCapturer::SignalFrameCapturedOnStartThread( - const webrtc::I420VideoFrame* frame) { + webrtc::I420VideoFrame* frame) { DCHECK(start_thread_->IsCurrent()); // Signal down stream components on captured frame. // The CapturedFrame class doesn't support planes. We have to ExtractBuffer diff --git a/talk/media/webrtc/webrtcvideocapturer.h b/talk/media/webrtc/webrtcvideocapturer.h index 56896f9cd..c0f7807e6 100644 --- a/talk/media/webrtc/webrtcvideocapturer.h +++ b/talk/media/webrtc/webrtcvideocapturer.h @@ -81,7 +81,7 @@ class WebRtcVideoCapturer : public VideoCapturer, private: // Callback when a frame is captured by camera. virtual void OnIncomingCapturedFrame(const int32_t id, - const webrtc::I420VideoFrame& frame); + webrtc::I420VideoFrame& frame); virtual void OnCaptureDelayChanged(const int32_t id, const int32_t delay); @@ -91,7 +91,7 @@ class WebRtcVideoCapturer : public VideoCapturer, // directly from OnIncomingCapturedFrame. // TODO(tommi): Remove this workaround when we've updated the WebRTC capturers // to follow the same contract. - void SignalFrameCapturedOnStartThread(const webrtc::I420VideoFrame* frame); + void SignalFrameCapturedOnStartThread(webrtc::I420VideoFrame* frame); rtc::scoped_ptr factory_; webrtc::VideoCaptureModule* module_; diff --git a/talk/media/webrtc/webrtcvideoengine2.cc b/talk/media/webrtc/webrtcvideoengine2.cc index 23c8a9d22..5886a2e62 100644 --- a/talk/media/webrtc/webrtcvideoengine2.cc +++ b/talk/media/webrtc/webrtcvideoengine2.cc @@ -1427,11 +1427,11 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::InputFrame( SetDimensions( video_frame_.width(), video_frame_.height(), capturer->IsScreencast()); - LOG(LS_VERBOSE) << "IncomingCapturedFrame: " << video_frame_.width() << "x" + LOG(LS_VERBOSE) << "SwapFrame: " << video_frame_.width() << "x" << video_frame_.height() << " -> (codec) " << parameters_.encoder_config.streams.back().width << "x" << parameters_.encoder_config.streams.back().height; - stream_->Input()->IncomingCapturedFrame(video_frame_); + stream_->Input()->SwapFrame(&video_frame_); } bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer( @@ -1451,7 +1451,7 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer( CreateBlackFrame(&black_frame, last_dimensions_.width, last_dimensions_.height); - stream_->Input()->IncomingCapturedFrame(black_frame); + stream_->Input()->SwapFrame(&black_frame); } capturer_ = NULL; diff --git a/talk/media/webrtc/webrtcvideoengine2_unittest.cc b/talk/media/webrtc/webrtcvideoengine2_unittest.cc index 4cf0462b0..9b77ca4c0 100644 --- a/talk/media/webrtc/webrtcvideoengine2_unittest.cc +++ b/talk/media/webrtc/webrtcvideoengine2_unittest.cc @@ -139,10 +139,9 @@ int FakeVideoSendStream::GetLastHeight() const { return last_frame_.height(); } -void FakeVideoSendStream::IncomingCapturedFrame( - const webrtc::I420VideoFrame& frame) { +void FakeVideoSendStream::SwapFrame(webrtc::I420VideoFrame* frame) { ++num_swapped_frames_; - last_frame_.ShallowCopy(frame); + last_frame_.SwapFrame(frame); } void FakeVideoSendStream::SetStats( diff --git a/talk/media/webrtc/webrtcvideoengine2_unittest.h b/talk/media/webrtc/webrtcvideoengine2_unittest.h index 7032dbe92..0b598014a 100644 --- a/talk/media/webrtc/webrtcvideoengine2_unittest.h +++ b/talk/media/webrtc/webrtcvideoengine2_unittest.h @@ -54,7 +54,7 @@ class FakeVideoSendStream : public webrtc::VideoSendStream, void SetStats(const webrtc::VideoSendStream::Stats& stats); private: - void IncomingCapturedFrame(const webrtc::I420VideoFrame& frame) override; + void SwapFrame(webrtc::I420VideoFrame* frame) override; webrtc::VideoSendStream::Stats GetStats() override; bool ReconfigureVideoEncoder( diff --git a/webrtc/common_video/i420_video_frame.cc b/webrtc/common_video/i420_video_frame.cc index 5b26cbbc0..0afdf10c6 100644 --- a/webrtc/common_video/i420_video_frame.cc +++ b/webrtc/common_video/i420_video_frame.cc @@ -139,14 +139,6 @@ int I420VideoFrame::CopyFrame(const I420VideoFrame& videoFrame) { return 0; } -void I420VideoFrame::ShallowCopy(const I420VideoFrame& videoFrame) { - video_frame_buffer_ = videoFrame.video_frame_buffer(); - timestamp_ = videoFrame.timestamp_; - ntp_time_ms_ = videoFrame.ntp_time_ms_; - render_time_ms_ = videoFrame.render_time_ms_; - rotation_ = videoFrame.rotation_; -} - I420VideoFrame* I420VideoFrame::CloneFrame() const { rtc::scoped_ptr new_frame(new I420VideoFrame()); if (new_frame->CopyFrame(*this) == -1) { diff --git a/webrtc/common_video/i420_video_frame_unittest.cc b/webrtc/common_video/i420_video_frame_unittest.cc index e02dded99..013382eab 100644 --- a/webrtc/common_video/i420_video_frame_unittest.cc +++ b/webrtc/common_video/i420_video_frame_unittest.cc @@ -130,63 +130,6 @@ TEST(TestI420VideoFrame, CopyFrame) { EXPECT_TRUE(EqualFrames(small_frame, big_frame)); } -TEST(TestI420VideoFrame, ShallowCopy) { - uint32_t timestamp = 1; - int64_t ntp_time_ms = 2; - int64_t render_time_ms = 3; - int stride_y = 15; - int stride_u = 10; - int stride_v = 10; - int width = 15; - int height = 15; - - const int kSizeY = 400; - const int kSizeU = 100; - const int kSizeV = 100; - const VideoRotation kRotation = kVideoRotation_270; - uint8_t buffer_y[kSizeY]; - uint8_t buffer_u[kSizeU]; - uint8_t buffer_v[kSizeV]; - memset(buffer_y, 16, kSizeY); - memset(buffer_u, 8, kSizeU); - memset(buffer_v, 4, kSizeV); - I420VideoFrame frame1; - EXPECT_EQ(0, frame1.CreateFrame(kSizeY, buffer_y, kSizeU, buffer_u, kSizeV, - buffer_v, width, height, stride_y, stride_u, - stride_v, kRotation)); - frame1.set_timestamp(timestamp); - frame1.set_ntp_time_ms(ntp_time_ms); - frame1.set_render_time_ms(render_time_ms); - I420VideoFrame frame2; - frame2.ShallowCopy(frame1); - - // To be able to access the buffers, we need const pointers to the frames. - const I420VideoFrame* const_frame1_ptr = &frame1; - const I420VideoFrame* const_frame2_ptr = &frame2; - - EXPECT_TRUE(const_frame1_ptr->buffer(kYPlane) == - const_frame2_ptr->buffer(kYPlane)); - EXPECT_TRUE(const_frame1_ptr->buffer(kUPlane) == - const_frame2_ptr->buffer(kUPlane)); - EXPECT_TRUE(const_frame1_ptr->buffer(kVPlane) == - const_frame2_ptr->buffer(kVPlane)); - - EXPECT_EQ(frame2.timestamp(), frame1.timestamp()); - EXPECT_EQ(frame2.ntp_time_ms(), frame1.ntp_time_ms()); - EXPECT_EQ(frame2.render_time_ms(), frame1.render_time_ms()); - EXPECT_EQ(frame2.rotation(), frame1.rotation()); - - frame2.set_timestamp(timestamp + 1); - frame2.set_ntp_time_ms(ntp_time_ms + 1); - frame2.set_render_time_ms(render_time_ms + 1); - frame2.set_rotation(kVideoRotation_90); - - EXPECT_NE(frame2.timestamp(), frame1.timestamp()); - EXPECT_NE(frame2.ntp_time_ms(), frame1.ntp_time_ms()); - EXPECT_NE(frame2.render_time_ms(), frame1.render_time_ms()); - EXPECT_NE(frame2.rotation(), frame1.rotation()); -} - TEST(TestI420VideoFrame, CloneFrame) { I420VideoFrame frame1; rtc::scoped_ptr frame2; diff --git a/webrtc/modules/video_capture/include/video_capture_defines.h b/webrtc/modules/video_capture/include/video_capture_defines.h index 63a5b7a8b..93a03f331 100644 --- a/webrtc/modules/video_capture/include/video_capture_defines.h +++ b/webrtc/modules/video_capture/include/video_capture_defines.h @@ -86,6 +86,9 @@ public: size_t videoFrameLength, const VideoCaptureCapability& frameInfo, int64_t captureTime = 0) = 0; + virtual int32_t IncomingI420VideoFrame(I420VideoFrame* video_frame, + int64_t captureTime = 0) = 0; + protected: ~VideoCaptureExternal() {} }; @@ -95,7 +98,7 @@ class VideoCaptureDataCallback { public: virtual void OnIncomingCapturedFrame(const int32_t id, - const I420VideoFrame& videoFrame) = 0; + I420VideoFrame& videoFrame) = 0; virtual void OnCaptureDelayChanged(const int32_t id, const int32_t delay) = 0; protected: diff --git a/webrtc/modules/video_capture/test/video_capture_unittest.cc b/webrtc/modules/video_capture/test/video_capture_unittest.cc index 2470b2d22..04a93a86b 100644 --- a/webrtc/modules/video_capture/test/video_capture_unittest.cc +++ b/webrtc/modules/video_capture/test/video_capture_unittest.cc @@ -104,9 +104,8 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback { printf("No of timing warnings %d\n", timing_warnings_); } - virtual void OnIncomingCapturedFrame( - const int32_t id, - const webrtc::I420VideoFrame& videoFrame) { + virtual void OnIncomingCapturedFrame(const int32_t id, + webrtc::I420VideoFrame& videoFrame) { CriticalSectionScoped cs(capture_cs_.get()); int height = videoFrame.height(); int width = videoFrame.width(); @@ -480,6 +479,76 @@ TEST_F(VideoCaptureExternalTest, TestExternalCapture) { EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_)); } +// Test input of planar I420 frames. +// NOTE: flaky, sometimes fails on the last CompareLastFrame. +// http://code.google.com/p/webrtc/issues/detail?id=777 +TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) { + webrtc::I420VideoFrame frame_i420; + frame_i420.CopyFrame(test_frame_); + + EXPECT_EQ(0, + capture_input_interface_->IncomingI420VideoFrame(&frame_i420, 0)); + EXPECT_TRUE(capture_callback_.CompareLastFrame(frame_i420)); + + // Test with a frame with pitch not equal to width + memset(test_frame_.buffer(webrtc::kYPlane), 0xAA, + test_frame_.allocated_size(webrtc::kYPlane)); + memset(test_frame_.buffer(webrtc::kUPlane), 0xAA, + test_frame_.allocated_size(webrtc::kUPlane)); + memset(test_frame_.buffer(webrtc::kVPlane), 0xAA, + test_frame_.allocated_size(webrtc::kVPlane)); + webrtc::I420VideoFrame aligned_test_frame; + int y_pitch = kTestWidth + 2; + int u_pitch = kTestWidth / 2 + 1; + int v_pitch = u_pitch; + aligned_test_frame.CreateEmptyFrame(kTestWidth, kTestHeight, + y_pitch, u_pitch, v_pitch); + memset(aligned_test_frame.buffer(webrtc::kYPlane), 0, + kTestWidth * kTestHeight); + memset(aligned_test_frame.buffer(webrtc::kUPlane), 0, + (kTestWidth + 1) / 2 * (kTestHeight + 1) / 2); + memset(aligned_test_frame.buffer(webrtc::kVPlane), 0, + (kTestWidth + 1) / 2 * (kTestHeight + 1) / 2); + // Copy the test_frame_ to aligned_test_frame. + int y_width = kTestWidth; + int uv_width = kTestWidth / 2; + int y_rows = kTestHeight; + int uv_rows = kTestHeight / 2; + const webrtc::I420VideoFrame& const_test_frame = test_frame_; + const unsigned char* y_plane = const_test_frame.buffer(webrtc::kYPlane); + const unsigned char* u_plane = const_test_frame.buffer(webrtc::kUPlane); + const unsigned char* v_plane = const_test_frame.buffer(webrtc::kVPlane); + // Copy Y + unsigned char* current_pointer = aligned_test_frame.buffer(webrtc::kYPlane); + for (int i = 0; i < y_rows; ++i) { + memcpy(current_pointer, y_plane, y_width); + // Remove the alignment which ViE doesn't support. + current_pointer += y_pitch; + y_plane += y_width; + } + // Copy U + current_pointer = aligned_test_frame.buffer(webrtc::kUPlane); + for (int i = 0; i < uv_rows; ++i) { + memcpy(current_pointer, u_plane, uv_width); + // Remove the alignment which ViE doesn't support. + current_pointer += u_pitch; + u_plane += uv_width; + } + // Copy V + current_pointer = aligned_test_frame.buffer(webrtc::kVPlane); + for (int i = 0; i < uv_rows; ++i) { + memcpy(current_pointer, v_plane, uv_width); + // Remove the alignment which ViE doesn't support. + current_pointer += v_pitch; + v_plane += uv_width; + } + frame_i420.CopyFrame(aligned_test_frame); + + EXPECT_EQ(0, + capture_input_interface_->IncomingI420VideoFrame(&frame_i420, 0)); + EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_)); +} + // Test frame rate and no picture alarm. // Flaky on Win32, see webrtc:3270. TEST_F(VideoCaptureExternalTest, DISABLED_ON_WIN(FrameRate)) { diff --git a/webrtc/modules/video_capture/video_capture_impl.cc b/webrtc/modules/video_capture/video_capture_impl.cc index 3bf5e4bb8..a6a9cd1a4 100644 --- a/webrtc/modules/video_capture/video_capture_impl.cc +++ b/webrtc/modules/video_capture/video_capture_impl.cc @@ -157,6 +157,10 @@ VideoCaptureImpl::VideoCaptureImpl(const int32_t id) _captureCallBack(NULL), _lastProcessFrameCount(TickTime::Now()), _rotateFrame(kVideoRotation_0), + last_capture_time_(0), + delta_ntp_internal_ms_( + Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() - + TickTime::MillisecondTimestamp()), apply_rotation_(true) { _requestedCapability.width = kDefaultWidth; _requestedCapability.height = kDefaultHeight; @@ -211,7 +215,8 @@ int32_t VideoCaptureImpl::CaptureDelay() return _setCaptureDelay; } -int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& captureFrame) { +int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& captureFrame, + int64_t capture_time) { UpdateFrameCount(); // frame count used for local frame rate callback. const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay; @@ -220,6 +225,19 @@ int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& captureFrame) { _setCaptureDelay = _captureDelay; } + // Set the capture time + if (capture_time != 0) { + captureFrame.set_render_time_ms(capture_time - delta_ntp_internal_ms_); + } else { + captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp()); + } + + if (captureFrame.render_time_ms() == last_capture_time_) { + // We don't allow the same capture time for two frames, drop this one. + return -1; + } + last_capture_time_ = captureFrame.render_time_ms(); + if (_dataCallBack) { if (callOnCaptureDelayChanged) { _dataCallBack->OnCaptureDelayChanged(_id, _captureDelay); @@ -303,10 +321,8 @@ int32_t VideoCaptureImpl::IncomingFrame( } else { _captureFrame.set_rotation(kVideoRotation_0); } - _captureFrame.set_ntp_time_ms(captureTime); - _captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp()); - DeliverCapturedFrame(_captureFrame); + DeliverCapturedFrame(_captureFrame, captureTime); } else // Encoded format { @@ -317,6 +333,16 @@ int32_t VideoCaptureImpl::IncomingFrame( return 0; } +int32_t VideoCaptureImpl::IncomingI420VideoFrame(I420VideoFrame* video_frame, + int64_t captureTime) { + + CriticalSectionScoped cs(&_apiCs); + CriticalSectionScoped cs2(&_callBackCs); + DeliverCapturedFrame(*video_frame, captureTime); + + return 0; +} + int32_t VideoCaptureImpl::SetCaptureRotation(VideoRotation rotation) { CriticalSectionScoped cs(&_apiCs); CriticalSectionScoped cs2(&_callBackCs); diff --git a/webrtc/modules/video_capture/video_capture_impl.h b/webrtc/modules/video_capture/video_capture_impl.h index fd4b39894..1a2c8bfac 100644 --- a/webrtc/modules/video_capture/video_capture_impl.h +++ b/webrtc/modules/video_capture/video_capture_impl.h @@ -82,12 +82,15 @@ public: virtual int32_t Process(); // Implement VideoCaptureExternal - // |capture_time| must be specified in NTP time format in milliseconds. + // |capture_time| must be specified in the NTP time format in milliseconds. virtual int32_t IncomingFrame(uint8_t* videoFrame, size_t videoFrameLength, const VideoCaptureCapability& frameInfo, int64_t captureTime = 0); + virtual int32_t IncomingI420VideoFrame(I420VideoFrame* video_frame, + int64_t captureTime = 0); + // Platform dependent virtual int32_t StartCapture(const VideoCaptureCapability& capability) { @@ -104,7 +107,8 @@ public: protected: VideoCaptureImpl(const int32_t id); virtual ~VideoCaptureImpl(); - int32_t DeliverCapturedFrame(I420VideoFrame& captureFrame); + int32_t DeliverCapturedFrame(I420VideoFrame& captureFrame, + int64_t capture_time); int32_t _id; // Module ID char* _deviceUniqueId; // current Device unique name; @@ -134,6 +138,12 @@ private: I420VideoFrame _captureFrame; + // Used to make sure incoming timestamp is increasing for every frame. + int64_t last_capture_time_; + + // Delta used for translating between NTP and internal timestamps. + const int64_t delta_ntp_internal_ms_; + // Indicate whether rotation should be applied before delivered externally. bool apply_rotation_; }; diff --git a/webrtc/test/frame_generator_capturer.cc b/webrtc/test/frame_generator_capturer.cc index f78a597f6..721c29af4 100644 --- a/webrtc/test/frame_generator_capturer.cc +++ b/webrtc/test/frame_generator_capturer.cc @@ -114,11 +114,11 @@ void FrameGeneratorCapturer::InsertFrame() { CriticalSectionScoped cs(lock_.get()); if (sending_) { I420VideoFrame* frame = frame_generator_->NextFrame(); - frame->set_ntp_time_ms(clock_->CurrentNtpInMilliseconds()); + frame->set_render_time_ms(clock_->CurrentNtpInMilliseconds()); if (first_frame_capture_time_ == -1) { - first_frame_capture_time_ = frame->ntp_time_ms(); + first_frame_capture_time_ = frame->render_time_ms(); } - input_->IncomingCapturedFrame(*frame); + input_->SwapFrame(frame); } } tick_->Wait(WEBRTC_EVENT_INFINITE); diff --git a/webrtc/test/vcm_capturer.cc b/webrtc/test/vcm_capturer.cc index f9976d21c..a5820bfe1 100644 --- a/webrtc/test/vcm_capturer.cc +++ b/webrtc/test/vcm_capturer.cc @@ -87,9 +87,9 @@ void VcmCapturer::Destroy() { VcmCapturer::~VcmCapturer() { Destroy(); } void VcmCapturer::OnIncomingCapturedFrame(const int32_t id, - const I420VideoFrame& frame) { + I420VideoFrame& frame) { if (started_) - input_->IncomingCapturedFrame(frame); + input_->SwapFrame(&frame); } void VcmCapturer::OnCaptureDelayChanged(const int32_t id, const int32_t delay) { diff --git a/webrtc/test/vcm_capturer.h b/webrtc/test/vcm_capturer.h index c73eeb1ce..1cb5b4e00 100644 --- a/webrtc/test/vcm_capturer.h +++ b/webrtc/test/vcm_capturer.h @@ -28,7 +28,7 @@ class VcmCapturer : public VideoCapturer, public VideoCaptureDataCallback { void Stop() override; void OnIncomingCapturedFrame(const int32_t id, - const I420VideoFrame& frame) override; // NOLINT + I420VideoFrame& frame) override; // NOLINT void OnCaptureDelayChanged(const int32_t id, const int32_t delay) override; private: diff --git a/webrtc/video/end_to_end_tests.cc b/webrtc/video/end_to_end_tests.cc index c3f3fd514..a8f6f0962 100644 --- a/webrtc/video/end_to_end_tests.cc +++ b/webrtc/video/end_to_end_tests.cc @@ -170,7 +170,7 @@ TEST_F(EndToEndTest, RendersSingleDelayedFrame) { // check that the callbacks are done after processing video. rtc::scoped_ptr frame_generator( test::FrameGenerator::CreateChromaGenerator(kWidth, kHeight)); - send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame()); + send_stream_->Input()->SwapFrame(frame_generator->NextFrame()); EXPECT_EQ(kEventSignaled, pre_render_callback.Wait()) << "Timed out while waiting for pre-render callback."; EXPECT_EQ(kEventSignaled, renderer.Wait()) @@ -218,7 +218,7 @@ TEST_F(EndToEndTest, TransmitsFirstFrame) { rtc::scoped_ptr frame_generator( test::FrameGenerator::CreateChromaGenerator( encoder_config_.streams[0].width, encoder_config_.streams[0].height)); - send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame()); + send_stream_->Input()->SwapFrame(frame_generator->NextFrame()); EXPECT_EQ(kEventSignaled, renderer.Wait()) << "Timed out while waiting for the frame to render."; @@ -833,7 +833,7 @@ TEST_F(EndToEndTest, UsesFrameCallbacks) { // check that the callbacks are done after processing video. rtc::scoped_ptr frame_generator( test::FrameGenerator::CreateChromaGenerator(kWidth / 2, kHeight / 2)); - send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame()); + send_stream_->Input()->SwapFrame(frame_generator->NextFrame()); EXPECT_EQ(kEventSignaled, pre_encode_callback.Wait()) << "Timed out while waiting for pre-encode callback."; @@ -1263,7 +1263,7 @@ TEST_F(EndToEndTest, ObserversEncodedFrames) { rtc::scoped_ptr frame_generator( test::FrameGenerator::CreateChromaGenerator( encoder_config_.streams[0].width, encoder_config_.streams[0].height)); - send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame()); + send_stream_->Input()->SwapFrame(frame_generator->NextFrame()); EXPECT_EQ(kEventSignaled, post_encode_observer.Wait()) << "Timed out while waiting for send-side encoded-frame callback."; diff --git a/webrtc/video/full_stack.cc b/webrtc/video/full_stack.cc index 5dfd8e5f4..033bf4fc5 100644 --- a/webrtc/video/full_stack.cc +++ b/webrtc/video/full_stack.cc @@ -144,7 +144,7 @@ class VideoAnalyzer : public PacketReceiver, return receiver_->DeliverPacket(packet, length); } - void IncomingCapturedFrame(const I420VideoFrame& video_frame) override { + void SwapFrame(I420VideoFrame* video_frame) override { I420VideoFrame* copy = NULL; { CriticalSectionScoped lock(crit_.get()); @@ -156,8 +156,8 @@ class VideoAnalyzer : public PacketReceiver, if (copy == NULL) copy = new I420VideoFrame(); - copy->CopyFrame(video_frame); - copy->set_timestamp(copy->ntp_time_ms() * 90); + copy->CopyFrame(*video_frame); + copy->set_timestamp(copy->render_time_ms() * 90); { CriticalSectionScoped lock(crit_.get()); @@ -167,7 +167,7 @@ class VideoAnalyzer : public PacketReceiver, frames_.push_back(copy); } - input_->IncomingCapturedFrame(video_frame); + input_->SwapFrame(video_frame); } bool SendRtp(const uint8_t* packet, size_t length) override { diff --git a/webrtc/video/send_statistics_proxy.cc b/webrtc/video/send_statistics_proxy.cc index 594cde5fa..33255f9b9 100644 --- a/webrtc/video/send_statistics_proxy.cc +++ b/webrtc/video/send_statistics_proxy.cc @@ -50,11 +50,15 @@ void SendStatisticsProxy::SuspendChange(int video_channel, bool is_suspended) { stats_.suspended = is_suspended; } +void SendStatisticsProxy::CapturedFrameRate(const int capture_id, + const unsigned char frame_rate) { + CriticalSectionScoped lock(crit_.get()); + stats_.input_frame_rate = frame_rate; +} + VideoSendStream::Stats SendStatisticsProxy::GetStats() { CriticalSectionScoped lock(crit_.get()); PurgeOldStats(); - stats_.input_frame_rate = - static_cast(input_frame_rate_tracker_.units_second()); return stats_; } @@ -118,11 +122,6 @@ void SendStatisticsProxy::OnSendEncodedImage( update_times_[ssrc].resolution_update_ms = clock_->TimeInMilliseconds(); } -void SendStatisticsProxy::OnIncomingFrame() { - CriticalSectionScoped lock(crit_.get()); - input_frame_rate_tracker_.Update(1); -} - void SendStatisticsProxy::RtcpPacketTypesCounterUpdated( uint32_t ssrc, const RtcpPacketTypeCounter& packet_counter) { diff --git a/webrtc/video/send_statistics_proxy.h b/webrtc/video/send_statistics_proxy.h index 0a048a59c..a60d495de 100644 --- a/webrtc/video/send_statistics_proxy.h +++ b/webrtc/video/send_statistics_proxy.h @@ -13,7 +13,6 @@ #include -#include "webrtc/base/ratetracker.h" #include "webrtc/base/scoped_ptr.h" #include "webrtc/base/thread_annotations.h" #include "webrtc/common_types.h" @@ -36,6 +35,7 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver, public BitrateStatisticsObserver, public FrameCountObserver, public ViEEncoderObserver, + public ViECaptureObserver, public VideoEncoderRateObserver, public SendSideDelayObserver { public: @@ -48,8 +48,6 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver, virtual void OnSendEncodedImage(const EncodedImage& encoded_image, const RTPVideoHeader* rtp_video_header); - // Used to update incoming frame rate. - void OnIncomingFrame(); // From VideoEncoderRateObserver. void OnSetRates(uint32_t bitrate_bps, int framerate) override; @@ -85,6 +83,16 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver, void SuspendChange(int video_channel, bool is_suspended) override; + // From ViECaptureObserver. + void BrightnessAlarm(const int capture_id, + const Brightness brightness) override {} + + void CapturedFrameRate(const int capture_id, + const unsigned char frame_rate) override; + + void NoPictureAlarm(const int capture_id, const CaptureAlarm alarm) override { + } + void SendSideDelayUpdated(int avg_delay_ms, int max_delay_ms, uint32_t ssrc) override; @@ -102,7 +110,6 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver, const VideoSendStream::Config config_; rtc::scoped_ptr crit_; VideoSendStream::Stats stats_ GUARDED_BY(crit_); - rtc::RateTracker input_frame_rate_tracker_ GUARDED_BY(crit_); std::map update_times_ GUARDED_BY(crit_); }; diff --git a/webrtc/video/send_statistics_proxy_unittest.cc b/webrtc/video/send_statistics_proxy_unittest.cc index 0243add3b..c2ebf5f40 100644 --- a/webrtc/video/send_statistics_proxy_unittest.cc +++ b/webrtc/video/send_statistics_proxy_unittest.cc @@ -128,6 +128,16 @@ TEST_F(SendStatisticsProxyTest, RtcpStatistics) { ExpectEqual(expected_, stats); } +TEST_F(SendStatisticsProxyTest, CaptureFramerate) { + const int capture_fps = 31; + + ViECaptureObserver* capture_observer = statistics_proxy_.get(); + capture_observer->CapturedFrameRate(0, capture_fps); + + VideoSendStream::Stats stats = statistics_proxy_->GetStats(); + EXPECT_EQ(capture_fps, stats.input_frame_rate); +} + TEST_F(SendStatisticsProxyTest, EncodedBitrateAndFramerate) { const int media_bitrate_bps = 500; const int encode_fps = 29; diff --git a/webrtc/video/video_send_stream.cc b/webrtc/video/video_send_stream.cc index b21b71322..881802afb 100644 --- a/webrtc/video/video_send_stream.cc +++ b/webrtc/video/video_send_stream.cc @@ -237,6 +237,7 @@ VideoSendStream::VideoSendStream( rtp_rtcp_->RegisterSendFrameCountObserver(channel_, &stats_proxy_); codec_->RegisterEncoderObserver(channel_, stats_proxy_); + capture_->RegisterObserver(capture_id_, stats_proxy_); } VideoSendStream::~VideoSendStream() { @@ -273,13 +274,12 @@ VideoSendStream::~VideoSendStream() { rtp_rtcp_->Release(); } -void VideoSendStream::IncomingCapturedFrame(const I420VideoFrame& frame) { +void VideoSendStream::SwapFrame(I420VideoFrame* frame) { // TODO(pbos): Local rendering should not be done on the capture thread. if (config_.local_renderer != NULL) - config_.local_renderer->RenderFrame(frame, 0); + config_.local_renderer->RenderFrame(*frame, 0); - stats_proxy_.OnIncomingFrame(); - external_capture_->IncomingFrame(frame); + external_capture_->SwapFrame(frame); } VideoSendStreamInput* VideoSendStream::Input() { return this; } diff --git a/webrtc/video/video_send_stream.h b/webrtc/video/video_send_stream.h index 898b81028..648a64ed7 100644 --- a/webrtc/video/video_send_stream.h +++ b/webrtc/video/video_send_stream.h @@ -63,7 +63,7 @@ class VideoSendStream : public webrtc::VideoSendStream, bool DeliverRtcp(const uint8_t* packet, size_t length); // From VideoSendStreamInput. - void IncomingCapturedFrame(const I420VideoFrame& frame) override; + void SwapFrame(I420VideoFrame* frame) override; // From webrtc::VideoSendStream. VideoSendStreamInput* Input() override; diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc index 72fab4bc2..e21474dfb 100644 --- a/webrtc/video/video_send_stream_tests.cc +++ b/webrtc/video/video_send_stream_tests.cc @@ -266,6 +266,35 @@ class FakeReceiveStatistics : public NullReceiveStatistics { StatisticianMap stats_map_; }; +TEST_F(VideoSendStreamTest, SwapsI420VideoFrames) { + static const size_t kWidth = 320; + static const size_t kHeight = 240; + + test::NullTransport transport; + Call::Config call_config(&transport); + CreateSenderCall(call_config); + + CreateSendConfig(1); + CreateStreams(); + send_stream_->Start(); + + I420VideoFrame frame; + const int stride_uv = (kWidth + 1) / 2; + frame.CreateEmptyFrame(kWidth, kHeight, kWidth, stride_uv, stride_uv); + uint8_t* old_y_buffer = frame.buffer(kYPlane); + // Initialize memory to avoid DrMemory errors. + const int half_height = (kHeight + 1) / 2; + memset(frame.buffer(kYPlane), 0, kWidth * kHeight); + memset(frame.buffer(kUPlane), 0, stride_uv * half_height); + memset(frame.buffer(kVPlane), 0, stride_uv * half_height); + + send_stream_->Input()->SwapFrame(&frame); + + EXPECT_NE(frame.buffer(kYPlane), old_y_buffer); + + DestroyStreams(); +} + TEST_F(VideoSendStreamTest, SupportsFec) { class FecObserver : public test::SendTest { public: @@ -1015,13 +1044,15 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndI420VideoFrames) { new webrtc::RefCountImpl(); input_frames.push_back(new I420VideoFrame(handle1, width, height, 1, 1)); input_frames.push_back(new I420VideoFrame(handle2, width, height, 2, 2)); - input_frames.push_back(CreateI420VideoFrame(width, height, 3)); - input_frames.push_back(CreateI420VideoFrame(width, height, 4)); - input_frames.push_back(new I420VideoFrame(handle3, width, height, 5, 5)); + input_frames.push_back(CreateI420VideoFrame(width, height, 1)); + input_frames.push_back(CreateI420VideoFrame(width, height, 2)); + input_frames.push_back(new I420VideoFrame(handle3, width, height, 3, 3)); send_stream_->Start(); for (size_t i = 0; i < input_frames.size(); i++) { - send_stream_->Input()->IncomingCapturedFrame(*input_frames[i]); + // Make a copy of the input frame because the buffer will be swapped. + rtc::scoped_ptr frame(input_frames[i]->CloneFrame()); + send_stream_->Input()->SwapFrame(frame.get()); // Do not send the next frame too fast, so the frame dropper won't drop it. if (i < input_frames.size() - 1) SleepMs(1000 / encoder_config_.streams[0].max_framerate); @@ -1051,7 +1082,6 @@ void ExpectEqualTextureFrames(const I420VideoFrame& frame1, EXPECT_EQ(frame1.native_handle(), frame2.native_handle()); EXPECT_EQ(frame1.width(), frame2.width()); EXPECT_EQ(frame1.height(), frame2.height()); - EXPECT_EQ(frame1.render_time_ms(), frame2.render_time_ms()); } void ExpectEqualBufferFrames(const I420VideoFrame& frame1, @@ -1061,7 +1091,7 @@ void ExpectEqualBufferFrames(const I420VideoFrame& frame1, EXPECT_EQ(frame1.stride(kYPlane), frame2.stride(kYPlane)); EXPECT_EQ(frame1.stride(kUPlane), frame2.stride(kUPlane)); EXPECT_EQ(frame1.stride(kVPlane), frame2.stride(kVPlane)); - EXPECT_EQ(frame1.render_time_ms(), frame2.render_time_ms()); + EXPECT_EQ(frame1.ntp_time_ms(), frame2.ntp_time_ms()); ASSERT_EQ(frame1.allocated_size(kYPlane), frame2.allocated_size(kYPlane)); EXPECT_EQ(0, memcmp(frame1.buffer(kYPlane), @@ -1104,6 +1134,7 @@ I420VideoFrame* CreateI420VideoFrame(int width, int height, uint8_t data) { width / 2, width / 2); frame->set_timestamp(data); + frame->set_ntp_time_ms(data); frame->set_render_time_ms(data); return frame; } diff --git a/webrtc/video_engine/include/vie_capture.h b/webrtc/video_engine/include/vie_capture.h index 537e59612..caaeacef2 100644 --- a/webrtc/video_engine/include/vie_capture.h +++ b/webrtc/video_engine/include/vie_capture.h @@ -113,7 +113,7 @@ class WEBRTC_DLLEXPORT ViEExternalCapture { const ViEVideoFrameI420& video_frame, unsigned long long capture_time = 0) = 0; - virtual void IncomingFrame(const I420VideoFrame& frame) {} + virtual void SwapFrame(I420VideoFrame* frame) {} }; // This class declares an abstract interface for a user defined observer. It is diff --git a/webrtc/video_engine/vie_capturer.cc b/webrtc/video_engine/vie_capturer.cc index bd9d8a31e..6e69bb4f8 100644 --- a/webrtc/video_engine/vie_capturer.cc +++ b/webrtc/video_engine/vie_capturer.cc @@ -21,7 +21,6 @@ #include "webrtc/system_wrappers/interface/event_wrapper.h" #include "webrtc/system_wrappers/interface/logging.h" #include "webrtc/system_wrappers/interface/thread_wrapper.h" -#include "webrtc/system_wrappers/interface/tick_util.h" #include "webrtc/system_wrappers/interface/trace_event.h" #include "webrtc/video_engine/include/vie_image_process.h" #include "webrtc/video_engine/overuse_frame_detector.h" @@ -76,10 +75,6 @@ ViECapturer::ViECapturer(int capture_id, capture_event_(*EventWrapper::Create()), deliver_event_(*EventWrapper::Create()), stop_(0), - last_captured_timestamp_(0), - delta_ntp_internal_ms_( - Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() - - TickTime::MillisecondTimestamp()), effect_filter_(NULL), image_proc_module_(NULL), image_proc_module_ref_counter_(0), @@ -312,6 +307,10 @@ int ViECapturer::IncomingFrame(unsigned char* video_frame, int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame, unsigned long long capture_time) { // NOLINT + if (!external_capture_module_) { + return -1; + } + int size_y = video_frame.height * video_frame.y_pitch; int size_u = video_frame.u_pitch * ((video_frame.height + 1) / 2); int size_v = video_frame.v_pitch * ((video_frame.height + 1) / 2); @@ -327,61 +326,46 @@ int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame, video_frame.y_pitch, video_frame.u_pitch, video_frame.v_pitch); + if (ret < 0) { LOG_F(LS_ERROR) << "Could not create I420Frame."; return -1; } - incoming_frame_.set_ntp_time_ms(capture_time); - OnIncomingCapturedFrame(-1, incoming_frame_); - return 0; + return external_capture_module_->IncomingI420VideoFrame(&incoming_frame_, + capture_time); } -void ViECapturer::IncomingFrame(const I420VideoFrame& frame) { - OnIncomingCapturedFrame(-1, frame); +void ViECapturer::SwapFrame(I420VideoFrame* frame) { + external_capture_module_->IncomingI420VideoFrame(frame, + frame->render_time_ms()); + frame->set_timestamp(0); + frame->set_ntp_time_ms(0); + frame->set_render_time_ms(0); } void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id, - const I420VideoFrame& video_frame) { + I420VideoFrame& video_frame) { CriticalSectionScoped cs(capture_cs_.get()); - captured_frame_.reset(new I420VideoFrame()); - captured_frame_->ShallowCopy(video_frame); + // Make sure we render this frame earlier since we know the render time set + // is slightly off since it's being set when the frame has been received from + // the camera, and not when the camera actually captured the frame. + video_frame.set_render_time_ms(video_frame.render_time_ms() - FrameDelay()); - if (captured_frame_->ntp_time_ms() != 0) { - // If a ntp time stamp is set, this is the time stamp we will use. - captured_frame_->set_render_time_ms( - captured_frame_->ntp_time_ms() - delta_ntp_internal_ms_); - } else { // ntp time stamp not set. - int64_t render_time = captured_frame_->render_time_ms() != 0 ? - captured_frame_->render_time_ms() : TickTime::MillisecondTimestamp(); - - // Make sure we render this frame earlier since we know the render time set - // is slightly off since it's being set when the frame was received - // from the camera, and not when the camera actually captured the frame. - render_time -= FrameDelay(); - captured_frame_->set_render_time_ms(render_time); - captured_frame_->set_ntp_time_ms( - render_time + delta_ntp_internal_ms_); - } - - if (captured_frame_->ntp_time_ms() <= last_captured_timestamp_) { - // We don't allow the same capture time for two frames, drop this one. - return; - } - last_captured_timestamp_ = captured_frame_->ntp_time_ms(); - - // Convert ntp time, in ms, to RTP timestamp. - const int kMsToRtpTimestamp = 90; - captured_frame_->set_timestamp(kMsToRtpTimestamp * - static_cast(captured_frame_->ntp_time_ms())); - - overuse_detector_->FrameCaptured(captured_frame_->width(), - captured_frame_->height(), - captured_frame_->render_time_ms()); + overuse_detector_->FrameCaptured(video_frame.width(), + video_frame.height(), + video_frame.render_time_ms()); TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(), "render_time", video_frame.render_time_ms()); + if (video_frame.native_handle() != NULL) { + captured_frame_.reset(video_frame.CloneFrame()); + } else { + if (captured_frame_ == NULL || captured_frame_->native_handle() != NULL) + captured_frame_.reset(new I420VideoFrame()); + captured_frame_->SwapFrame(&video_frame); + } capture_event_.Set(); } diff --git a/webrtc/video_engine/vie_capturer.h b/webrtc/video_engine/vie_capturer.h index 79a305e4d..9f077e101 100644 --- a/webrtc/video_engine/vie_capturer.h +++ b/webrtc/video_engine/vie_capturer.h @@ -69,17 +69,17 @@ class ViECapturer int FrameCallbackChanged(); // Implements ExternalCapture. - int IncomingFrame(unsigned char* video_frame, - size_t video_frame_length, - uint16_t width, - uint16_t height, - RawVideoType video_type, - unsigned long long capture_time = 0) override; + virtual int IncomingFrame(unsigned char* video_frame, + size_t video_frame_length, + uint16_t width, + uint16_t height, + RawVideoType video_type, + unsigned long long capture_time = 0); // NOLINT - int IncomingFrameI420(const ViEVideoFrameI420& video_frame, - unsigned long long capture_time = 0) override; + virtual int IncomingFrameI420(const ViEVideoFrameI420& video_frame, + unsigned long long capture_time = 0); // NOLINT - void IncomingFrame(const I420VideoFrame& frame) override; + void SwapFrame(I420VideoFrame* frame) override; // Start/Stop. int32_t Start( @@ -123,7 +123,7 @@ class ViECapturer // Implements VideoCaptureDataCallback. virtual void OnIncomingCapturedFrame(const int32_t id, - const I420VideoFrame& video_frame); + I420VideoFrame& video_frame); virtual void OnCaptureDelayChanged(const int32_t id, const int32_t delay); @@ -172,10 +172,6 @@ class ViECapturer volatile int stop_; rtc::scoped_ptr captured_frame_; - // Used to make sure incoming time stamp is increasing for every frame. - int64_t last_captured_timestamp_; - // Delta used for translating between NTP and internal timestamps. - const int64_t delta_ntp_internal_ms_; rtc::scoped_ptr deliver_frame_; // Image processing. diff --git a/webrtc/video_engine/vie_capturer_unittest.cc b/webrtc/video_engine/vie_capturer_unittest.cc index 4ca95cb25..fa3a9d213 100644 --- a/webrtc/video_engine/vie_capturer_unittest.cc +++ b/webrtc/video_engine/vie_capturer_unittest.cc @@ -129,37 +129,13 @@ class ViECapturerTest : public ::testing::Test { std::vector output_frame_ybuffers_; }; -TEST_F(ViECapturerTest, TestNtpTimeStampSetIfRenderTimeSet) { - input_frames_.push_back(CreateI420VideoFrame(static_cast(0))); - input_frames_[0]->set_render_time_ms(5); - input_frames_[0]->set_ntp_time_ms(0); - - AddInputFrame(input_frames_[0]); - WaitOutputFrame(); - EXPECT_GT(output_frames_[0]->ntp_time_ms(), - input_frames_[0]->render_time_ms()); -} - -TEST_F(ViECapturerTest, TestRtpTimeStampSet) { - input_frames_.push_back(CreateI420VideoFrame(static_cast(0))); - input_frames_[0]->set_render_time_ms(0); - input_frames_[0]->set_ntp_time_ms(1); - input_frames_[0]->set_timestamp(0); - - AddInputFrame(input_frames_[0]); - WaitOutputFrame(); - EXPECT_EQ(output_frames_[0]->timestamp(), - input_frames_[0]->ntp_time_ms() * 90); -} - TEST_F(ViECapturerTest, TestTextureFrames) { const int kNumFrame = 3; for (int i = 0 ; i < kNumFrame; ++i) { webrtc::RefCountImpl* handle = new webrtc::RefCountImpl(); // Add one to |i| so that width/height > 0. - input_frames_.push_back( - new I420VideoFrame(handle, i + 1, i + 1, i + 1, i + 1)); + input_frames_.push_back(new I420VideoFrame(handle, i + 1, i + 1, i, i)); AddInputFrame(input_frames_[i]); WaitOutputFrame(); } @@ -169,17 +145,20 @@ TEST_F(ViECapturerTest, TestTextureFrames) { TEST_F(ViECapturerTest, TestI420Frames) { const int kNumFrame = 4; + ScopedVector copied_input_frames; std::vector ybuffer_pointers; for (int i = 0; i < kNumFrame; ++i) { input_frames_.push_back(CreateI420VideoFrame(static_cast(i + 1))); const I420VideoFrame* const_input_frame = input_frames_[i]; ybuffer_pointers.push_back(const_input_frame->buffer(kYPlane)); + // Copy input frames because the buffer data will be swapped. + copied_input_frames.push_back(input_frames_[i]->CloneFrame()); AddInputFrame(input_frames_[i]); WaitOutputFrame(); } - EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); - // Make sure the buffer is not copied. + EXPECT_TRUE(EqualFramesVector(copied_input_frames, output_frames_)); + // Make sure the buffer is swapped and not copied. for (int i = 0; i < kNumFrame; ++i) EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]); } @@ -191,8 +170,10 @@ TEST_F(ViECapturerTest, TestI420FrameAfterTextureFrame) { AddInputFrame(input_frames_[0]); WaitOutputFrame(); - input_frames_.push_back(CreateI420VideoFrame(2)); - AddInputFrame(input_frames_[1]); + input_frames_.push_back(CreateI420VideoFrame(1)); + rtc::scoped_ptr copied_input_frame( + input_frames_[1]->CloneFrame()); + AddInputFrame(copied_input_frame.get()); WaitOutputFrame(); EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); @@ -200,12 +181,14 @@ TEST_F(ViECapturerTest, TestI420FrameAfterTextureFrame) { TEST_F(ViECapturerTest, TestTextureFrameAfterI420Frame) { input_frames_.push_back(CreateI420VideoFrame(1)); - AddInputFrame(input_frames_[0]); + rtc::scoped_ptr copied_input_frame( + input_frames_[0]->CloneFrame()); + AddInputFrame(copied_input_frame.get()); WaitOutputFrame(); webrtc::RefCountImpl* handle = new webrtc::RefCountImpl(); - input_frames_.push_back(new I420VideoFrame(handle, 1, 1, 2, 2)); + input_frames_.push_back(new I420VideoFrame(handle, 1, 1, 1, 1)); AddInputFrame(input_frames_[1]); WaitOutputFrame(); @@ -224,6 +207,7 @@ bool EqualTextureFrames(const I420VideoFrame& frame1, return ((frame1.native_handle() == frame2.native_handle()) && (frame1.width() == frame2.width()) && (frame1.height() == frame2.height()) && + (frame1.timestamp() == frame2.timestamp()) && (frame1.render_time_ms() == frame2.render_time_ms())); } @@ -234,6 +218,8 @@ bool EqualBufferFrames(const I420VideoFrame& frame1, (frame1.stride(kYPlane) == frame2.stride(kYPlane)) && (frame1.stride(kUPlane) == frame2.stride(kUPlane)) && (frame1.stride(kVPlane) == frame2.stride(kVPlane)) && + (frame1.timestamp() == frame2.timestamp()) && + (frame1.ntp_time_ms() == frame2.ntp_time_ms()) && (frame1.render_time_ms() == frame2.render_time_ms()) && (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) && (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) && @@ -268,6 +254,8 @@ I420VideoFrame* CreateI420VideoFrame(uint8_t data) { frame->CreateFrame( kSizeY, buffer, kSizeUV, buffer, kSizeUV, buffer, width, height, width, width / 2, width / 2); + frame->set_timestamp(data); + frame->set_ntp_time_ms(data); frame->set_render_time_ms(data); return frame; } diff --git a/webrtc/video_engine/vie_encoder.cc b/webrtc/video_engine/vie_encoder.cc index a3f63a9d6..f86beef4f 100644 --- a/webrtc/video_engine/vie_encoder.cc +++ b/webrtc/video_engine/vie_encoder.cc @@ -582,8 +582,16 @@ void ViEEncoder::DeliverFrame(int id, TraceFrameDropEnd(); } + // Convert render time, in ms, to RTP timestamp. + const int kMsToRtpTimestamp = 90; + const uint32_t time_stamp = + kMsToRtpTimestamp * + static_cast(video_frame->render_time_ms()); + TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame->render_time_ms(), "Encode"); + video_frame->set_timestamp(time_stamp); + I420VideoFrame* decimated_frame = NULL; // TODO(wuchengli): support texture frames. if (video_frame->native_handle() == NULL) { diff --git a/webrtc/video_frame.h b/webrtc/video_frame.h index 98743e9b3..2fc8b9ec6 100644 --- a/webrtc/video_frame.h +++ b/webrtc/video_frame.h @@ -73,15 +73,11 @@ class I420VideoFrame { int stride_v, VideoRotation rotation); - // Deep copy frame: If required size is bigger than allocated one, new - // buffers of adequate size will be allocated. + // Copy frame: If required size is bigger than allocated one, new buffers of + // adequate size will be allocated. // Return value: 0 on success, -1 on error. int CopyFrame(const I420VideoFrame& videoFrame); - // Creates a shallow copy of |videoFrame|, i.e, the this object will retain a - // reference to the video buffer also retained by |videoFrame|. - void ShallowCopy(const I420VideoFrame& videoFrame); - // Make a copy of |this|. The caller owns the returned frame. // Return value: a new frame on success, NULL on error. I420VideoFrame* CloneFrame() const; diff --git a/webrtc/video_send_stream.h b/webrtc/video_send_stream.h index 4ae0b6b58..c55d9e3e5 100644 --- a/webrtc/video_send_stream.h +++ b/webrtc/video_send_stream.h @@ -29,7 +29,7 @@ class VideoSendStreamInput { // These methods do not lock internally and must be called sequentially. // If your application switches input sources synchronization must be done // externally to make sure that any old frames are not delivered concurrently. - virtual void IncomingCapturedFrame(const I420VideoFrame& video_frame) = 0; + virtual void SwapFrame(I420VideoFrame* video_frame) = 0; protected: virtual ~VideoSendStreamInput() {}