diff --git a/webrtc/common_video/i420_video_frame.cc b/webrtc/common_video/i420_video_frame.cc index 0afdf10c6..9e87e40f6 100644 --- a/webrtc/common_video/i420_video_frame.cc +++ b/webrtc/common_video/i420_video_frame.cc @@ -18,11 +18,10 @@ namespace webrtc { -I420VideoFrame::I420VideoFrame() - : timestamp_(0), - ntp_time_ms_(0), - render_time_ms_(0), - rotation_(kVideoRotation_0) { +I420VideoFrame::I420VideoFrame() { + // Intentionally using Reset instead of initializer list so that any missed + // fields in Reset will be caught by memory checkers. + Reset(); } I420VideoFrame::I420VideoFrame( @@ -156,6 +155,14 @@ void I420VideoFrame::SwapFrame(I420VideoFrame* videoFrame) { std::swap(rotation_, videoFrame->rotation_); } +void I420VideoFrame::Reset() { + video_frame_buffer_ = nullptr; + timestamp_ = 0; + ntp_time_ms_ = 0; + render_time_ms_ = 0; + rotation_ = kVideoRotation_0; +} + uint8_t* I420VideoFrame::buffer(PlaneType type) { return video_frame_buffer_ ? video_frame_buffer_->data(type) : nullptr; } diff --git a/webrtc/common_video/i420_video_frame_unittest.cc b/webrtc/common_video/i420_video_frame_unittest.cc index 013382eab..849592405 100644 --- a/webrtc/common_video/i420_video_frame_unittest.cc +++ b/webrtc/common_video/i420_video_frame_unittest.cc @@ -130,6 +130,21 @@ TEST(TestI420VideoFrame, CopyFrame) { EXPECT_TRUE(EqualFrames(small_frame, big_frame)); } +TEST(TestI420VideoFrame, Reset) { + I420VideoFrame frame; + ASSERT_TRUE(frame.CreateEmptyFrame(5, 5, 5, 5, 5) == 0); + frame.set_ntp_time_ms(1); + frame.set_timestamp(2); + frame.set_render_time_ms(3); + ASSERT_TRUE(frame.video_frame_buffer() != NULL); + + frame.Reset(); + EXPECT_EQ(0u, frame.ntp_time_ms()); + EXPECT_EQ(0u, frame.render_time_ms()); + EXPECT_EQ(0u, frame.timestamp()); + EXPECT_TRUE(frame.video_frame_buffer() == NULL); +} + TEST(TestI420VideoFrame, CloneFrame) { I420VideoFrame frame1; rtc::scoped_ptr frame2; diff --git a/webrtc/common_video/interface/video_frame_buffer.h b/webrtc/common_video/interface/video_frame_buffer.h index 9b6438a77..e340d8b35 100644 --- a/webrtc/common_video/interface/video_frame_buffer.h +++ b/webrtc/common_video/interface/video_frame_buffer.h @@ -69,10 +69,10 @@ class I420Buffer : public VideoFrameBuffer { int stride(PlaneType type) const override; rtc::scoped_refptr native_handle() const override; - private: - friend class rtc::RefCountedObject; + protected: ~I420Buffer() override; + private: const int width_; const int height_; const int stride_y_; diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc index e21474dfb..e13a78ced 100644 --- a/webrtc/video/video_send_stream_tests.cc +++ b/webrtc/video/video_send_stream_tests.cc @@ -266,35 +266,6 @@ class FakeReceiveStatistics : public NullReceiveStatistics { StatisticianMap stats_map_; }; -TEST_F(VideoSendStreamTest, SwapsI420VideoFrames) { - static const size_t kWidth = 320; - static const size_t kHeight = 240; - - test::NullTransport transport; - Call::Config call_config(&transport); - CreateSenderCall(call_config); - - CreateSendConfig(1); - CreateStreams(); - send_stream_->Start(); - - I420VideoFrame frame; - const int stride_uv = (kWidth + 1) / 2; - frame.CreateEmptyFrame(kWidth, kHeight, kWidth, stride_uv, stride_uv); - uint8_t* old_y_buffer = frame.buffer(kYPlane); - // Initialize memory to avoid DrMemory errors. - const int half_height = (kHeight + 1) / 2; - memset(frame.buffer(kYPlane), 0, kWidth * kHeight); - memset(frame.buffer(kUPlane), 0, stride_uv * half_height); - memset(frame.buffer(kVPlane), 0, stride_uv * half_height); - - send_stream_->Input()->SwapFrame(&frame); - - EXPECT_NE(frame.buffer(kYPlane), old_y_buffer); - - DestroyStreams(); -} - TEST_F(VideoSendStreamTest, SupportsFec) { class FecObserver : public test::SendTest { public: diff --git a/webrtc/video_engine/vie_capturer.cc b/webrtc/video_engine/vie_capturer.cc index 6e69bb4f8..bc964250a 100644 --- a/webrtc/video_engine/vie_capturer.cc +++ b/webrtc/video_engine/vie_capturer.cc @@ -62,7 +62,7 @@ ViECapturer::ViECapturer(int capture_id, ProcessThread& module_process_thread) : ViEFrameProviderBase(capture_id, engine_id), capture_cs_(CriticalSectionWrapper::CreateCriticalSection()), - deliver_cs_(CriticalSectionWrapper::CreateCriticalSection()), + effects_and_stats_cs_(CriticalSectionWrapper::CreateCriticalSection()), capture_module_(NULL), external_capture_module_(NULL), module_process_thread_(module_process_thread), @@ -359,13 +359,7 @@ void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id, TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(), "render_time", video_frame.render_time_ms()); - if (video_frame.native_handle() != NULL) { - captured_frame_.reset(video_frame.CloneFrame()); - } else { - if (captured_frame_ == NULL || captured_frame_->native_handle() != NULL) - captured_frame_.reset(new I420VideoFrame()); - captured_frame_->SwapFrame(&video_frame); - } + captured_frame_ = video_frame; capture_event_.Set(); } @@ -380,7 +374,7 @@ void ViECapturer::OnCaptureDelayChanged(const int32_t id, int32_t ViECapturer::RegisterEffectFilter( ViEEffectFilter* effect_filter) { - CriticalSectionScoped cs(deliver_cs_.get()); + CriticalSectionScoped cs(effects_and_stats_cs_.get()); if (effect_filter != NULL && effect_filter_ != NULL) { LOG_F(LS_ERROR) << "Effect filter already registered."; @@ -415,7 +409,7 @@ int32_t ViECapturer::DecImageProcRefCount() { } int32_t ViECapturer::EnableDeflickering(bool enable) { - CriticalSectionScoped cs(deliver_cs_.get()); + CriticalSectionScoped cs(effects_and_stats_cs_.get()); if (enable) { if (deflicker_frame_stats_) { return -1; @@ -436,7 +430,7 @@ int32_t ViECapturer::EnableDeflickering(bool enable) { } int32_t ViECapturer::EnableBrightnessAlarm(bool enable) { - CriticalSectionScoped cs(deliver_cs_.get()); + CriticalSectionScoped cs(effects_and_stats_cs_.get()); if (enable) { if (brightness_frame_stats_) { return -1; @@ -468,15 +462,19 @@ bool ViECapturer::ViECaptureProcess() { overuse_detector_->FrameProcessingStarted(); int64_t encode_start_time = -1; - deliver_cs_->Enter(); - if (SwapCapturedAndDeliverFrameIfAvailable()) { - capture_time = deliver_frame_->render_time_ms(); - encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds(); - DeliverI420Frame(deliver_frame_.get()); - if (deliver_frame_->native_handle() != NULL) - deliver_frame_.reset(); // Release the texture so it can be reused. + I420VideoFrame deliver_frame; + { + CriticalSectionScoped cs(capture_cs_.get()); + if (!captured_frame_.IsZeroSize()) { + deliver_frame = captured_frame_; + captured_frame_.Reset(); + } + } + if (!deliver_frame.IsZeroSize()) { + capture_time = deliver_frame.render_time_ms(); + encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds(); + DeliverI420Frame(&deliver_frame); } - deliver_cs_->Leave(); if (current_brightness_level_ != reported_brightness_level_) { CriticalSectionScoped cs(observer_cs_.get()); if (observer_) { @@ -504,46 +502,49 @@ void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) { } // Apply image enhancement and effect filter. - if (deflicker_frame_stats_) { - if (image_proc_module_->GetFrameStats(deflicker_frame_stats_, - *video_frame) == 0) { - image_proc_module_->Deflickering(video_frame, deflicker_frame_stats_); - } else { - LOG_F(LS_ERROR) << "Could not get frame stats."; - } - } - if (brightness_frame_stats_) { - if (image_proc_module_->GetFrameStats(brightness_frame_stats_, - *video_frame) == 0) { - int32_t brightness = image_proc_module_->BrightnessDetection( - *video_frame, *brightness_frame_stats_); - - switch (brightness) { - case VideoProcessingModule::kNoWarning: - current_brightness_level_ = Normal; - break; - case VideoProcessingModule::kDarkWarning: - current_brightness_level_ = Dark; - break; - case VideoProcessingModule::kBrightWarning: - current_brightness_level_ = Bright; - break; - default: - break; + { + CriticalSectionScoped cs(effects_and_stats_cs_.get()); + if (deflicker_frame_stats_) { + if (image_proc_module_->GetFrameStats(deflicker_frame_stats_, + *video_frame) == 0) { + image_proc_module_->Deflickering(video_frame, deflicker_frame_stats_); + } else { + LOG_F(LS_ERROR) << "Could not get frame stats."; } } - } - if (effect_filter_) { - size_t length = - CalcBufferSize(kI420, video_frame->width(), video_frame->height()); - rtc::scoped_ptr video_buffer(new uint8_t[length]); - ExtractBuffer(*video_frame, length, video_buffer.get()); - effect_filter_->Transform(length, - video_buffer.get(), - video_frame->ntp_time_ms(), - video_frame->timestamp(), - video_frame->width(), - video_frame->height()); + if (brightness_frame_stats_) { + if (image_proc_module_->GetFrameStats(brightness_frame_stats_, + *video_frame) == 0) { + int32_t brightness = image_proc_module_->BrightnessDetection( + *video_frame, *brightness_frame_stats_); + + switch (brightness) { + case VideoProcessingModule::kNoWarning: + current_brightness_level_ = Normal; + break; + case VideoProcessingModule::kDarkWarning: + current_brightness_level_ = Dark; + break; + case VideoProcessingModule::kBrightWarning: + current_brightness_level_ = Bright; + break; + default: + break; + } + } + } + if (effect_filter_) { + size_t length = + CalcBufferSize(kI420, video_frame->width(), video_frame->height()); + rtc::scoped_ptr video_buffer(new uint8_t[length]); + ExtractBuffer(*video_frame, length, video_buffer.get()); + effect_filter_->Transform(length, + video_buffer.get(), + video_frame->ntp_time_ms(), + video_frame->timestamp(), + video_frame->width(), + video_frame->height()); + } } // Deliver the captured frame to all observers (channels, renderer or file). ViEFrameProviderBase::DeliverFrame(video_frame, std::vector()); @@ -600,13 +601,4 @@ void ViECapturer::OnNoPictureAlarm(const int32_t id, observer_->NoPictureAlarm(id, vie_alarm); } -bool ViECapturer::SwapCapturedAndDeliverFrameIfAvailable() { - CriticalSectionScoped cs(capture_cs_.get()); - if (captured_frame_ == NULL) - return false; - - deliver_frame_.reset(captured_frame_.release()); - return true; -} - } // namespace webrtc diff --git a/webrtc/video_engine/vie_capturer.h b/webrtc/video_engine/vie_capturer.h index 9f077e101..681b7e09a 100644 --- a/webrtc/video_engine/vie_capturer.h +++ b/webrtc/video_engine/vie_capturer.h @@ -147,14 +147,12 @@ class ViECapturer static bool ViECaptureThreadFunction(void* obj); bool ViECaptureProcess(); + private: void DeliverI420Frame(I420VideoFrame* video_frame); - private: - bool SwapCapturedAndDeliverFrameIfAvailable(); - - // Never take capture_cs_ before deliver_cs_! + // Never take capture_cs_ before effects_and_stats_cs_! rtc::scoped_ptr capture_cs_; - rtc::scoped_ptr deliver_cs_; + rtc::scoped_ptr effects_and_stats_cs_; VideoCaptureModule* capture_module_; VideoCaptureExternal* external_capture_module_; ProcessThread& module_process_thread_; @@ -171,15 +169,16 @@ class ViECapturer volatile int stop_; - rtc::scoped_ptr captured_frame_; - rtc::scoped_ptr deliver_frame_; + I420VideoFrame captured_frame_ GUARDED_BY(capture_cs_.get()); // Image processing. - ViEEffectFilter* effect_filter_; + ViEEffectFilter* effect_filter_ GUARDED_BY(effects_and_stats_cs_.get()); VideoProcessingModule* image_proc_module_; int image_proc_module_ref_counter_; - VideoProcessingModule::FrameStats* deflicker_frame_stats_; - VideoProcessingModule::FrameStats* brightness_frame_stats_; + VideoProcessingModule::FrameStats* deflicker_frame_stats_ + GUARDED_BY(effects_and_stats_cs_.get()); + VideoProcessingModule::FrameStats* brightness_frame_stats_ + GUARDED_BY(effects_and_stats_cs_.get()); Brightness current_brightness_level_; Brightness reported_brightness_level_; diff --git a/webrtc/video_frame.h b/webrtc/video_frame.h index 2fc8b9ec6..6ba2971eb 100644 --- a/webrtc/video_frame.h +++ b/webrtc/video_frame.h @@ -85,6 +85,9 @@ class I420VideoFrame { // Swap Frame. void SwapFrame(I420VideoFrame* videoFrame); + // Release frame buffer and reset time stamps. + void Reset(); + // Get pointer to buffer per plane. uint8_t* buffer(PlaneType type); // Overloading with const.