Remove the need for scoped_ptr<I420VideoFrame> in VieCapturer.

Remove the need for scoped_ptr<I420VideoFrame> in VieCapturer.
This adds the method I420VideoFrame::Reset and replace the use of scoped_ptr in ViECapturer.
Also, a unittest is added to check that ViECapturer does not retain a frame after it has been delivered.

BUG=1128
R=mflodman@webrtc.org, pbos@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/43669004

Cr-Commit-Position: refs/heads/master@{#8678}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8678 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
magjed@webrtc.org 2015-03-11 10:06:58 +00:00
parent 9bfa5f0405
commit 0d9bb8e499
7 changed files with 99 additions and 112 deletions

View File

@ -18,11 +18,10 @@
namespace webrtc { namespace webrtc {
I420VideoFrame::I420VideoFrame() I420VideoFrame::I420VideoFrame() {
: timestamp_(0), // Intentionally using Reset instead of initializer list so that any missed
ntp_time_ms_(0), // fields in Reset will be caught by memory checkers.
render_time_ms_(0), Reset();
rotation_(kVideoRotation_0) {
} }
I420VideoFrame::I420VideoFrame( I420VideoFrame::I420VideoFrame(
@ -156,6 +155,14 @@ void I420VideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
std::swap(rotation_, videoFrame->rotation_); std::swap(rotation_, videoFrame->rotation_);
} }
void I420VideoFrame::Reset() {
video_frame_buffer_ = nullptr;
timestamp_ = 0;
ntp_time_ms_ = 0;
render_time_ms_ = 0;
rotation_ = kVideoRotation_0;
}
uint8_t* I420VideoFrame::buffer(PlaneType type) { uint8_t* I420VideoFrame::buffer(PlaneType type) {
return video_frame_buffer_ ? video_frame_buffer_->data(type) : nullptr; return video_frame_buffer_ ? video_frame_buffer_->data(type) : nullptr;
} }

View File

@ -130,6 +130,21 @@ TEST(TestI420VideoFrame, CopyFrame) {
EXPECT_TRUE(EqualFrames(small_frame, big_frame)); EXPECT_TRUE(EqualFrames(small_frame, big_frame));
} }
TEST(TestI420VideoFrame, Reset) {
I420VideoFrame frame;
ASSERT_TRUE(frame.CreateEmptyFrame(5, 5, 5, 5, 5) == 0);
frame.set_ntp_time_ms(1);
frame.set_timestamp(2);
frame.set_render_time_ms(3);
ASSERT_TRUE(frame.video_frame_buffer() != NULL);
frame.Reset();
EXPECT_EQ(0u, frame.ntp_time_ms());
EXPECT_EQ(0u, frame.render_time_ms());
EXPECT_EQ(0u, frame.timestamp());
EXPECT_TRUE(frame.video_frame_buffer() == NULL);
}
TEST(TestI420VideoFrame, CloneFrame) { TEST(TestI420VideoFrame, CloneFrame) {
I420VideoFrame frame1; I420VideoFrame frame1;
rtc::scoped_ptr<I420VideoFrame> frame2; rtc::scoped_ptr<I420VideoFrame> frame2;

View File

@ -69,10 +69,10 @@ class I420Buffer : public VideoFrameBuffer {
int stride(PlaneType type) const override; int stride(PlaneType type) const override;
rtc::scoped_refptr<NativeHandle> native_handle() const override; rtc::scoped_refptr<NativeHandle> native_handle() const override;
private: protected:
friend class rtc::RefCountedObject<I420Buffer>;
~I420Buffer() override; ~I420Buffer() override;
private:
const int width_; const int width_;
const int height_; const int height_;
const int stride_y_; const int stride_y_;

View File

@ -266,35 +266,6 @@ class FakeReceiveStatistics : public NullReceiveStatistics {
StatisticianMap stats_map_; StatisticianMap stats_map_;
}; };
TEST_F(VideoSendStreamTest, SwapsI420VideoFrames) {
static const size_t kWidth = 320;
static const size_t kHeight = 240;
test::NullTransport transport;
Call::Config call_config(&transport);
CreateSenderCall(call_config);
CreateSendConfig(1);
CreateStreams();
send_stream_->Start();
I420VideoFrame frame;
const int stride_uv = (kWidth + 1) / 2;
frame.CreateEmptyFrame(kWidth, kHeight, kWidth, stride_uv, stride_uv);
uint8_t* old_y_buffer = frame.buffer(kYPlane);
// Initialize memory to avoid DrMemory errors.
const int half_height = (kHeight + 1) / 2;
memset(frame.buffer(kYPlane), 0, kWidth * kHeight);
memset(frame.buffer(kUPlane), 0, stride_uv * half_height);
memset(frame.buffer(kVPlane), 0, stride_uv * half_height);
send_stream_->Input()->SwapFrame(&frame);
EXPECT_NE(frame.buffer(kYPlane), old_y_buffer);
DestroyStreams();
}
TEST_F(VideoSendStreamTest, SupportsFec) { TEST_F(VideoSendStreamTest, SupportsFec) {
class FecObserver : public test::SendTest { class FecObserver : public test::SendTest {
public: public:

View File

@ -62,7 +62,7 @@ ViECapturer::ViECapturer(int capture_id,
ProcessThread& module_process_thread) ProcessThread& module_process_thread)
: ViEFrameProviderBase(capture_id, engine_id), : ViEFrameProviderBase(capture_id, engine_id),
capture_cs_(CriticalSectionWrapper::CreateCriticalSection()), capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
deliver_cs_(CriticalSectionWrapper::CreateCriticalSection()), effects_and_stats_cs_(CriticalSectionWrapper::CreateCriticalSection()),
capture_module_(NULL), capture_module_(NULL),
external_capture_module_(NULL), external_capture_module_(NULL),
module_process_thread_(module_process_thread), module_process_thread_(module_process_thread),
@ -359,13 +359,7 @@ void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(), TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
"render_time", video_frame.render_time_ms()); "render_time", video_frame.render_time_ms());
if (video_frame.native_handle() != NULL) { captured_frame_ = video_frame;
captured_frame_.reset(video_frame.CloneFrame());
} else {
if (captured_frame_ == NULL || captured_frame_->native_handle() != NULL)
captured_frame_.reset(new I420VideoFrame());
captured_frame_->SwapFrame(&video_frame);
}
capture_event_.Set(); capture_event_.Set();
} }
@ -380,7 +374,7 @@ void ViECapturer::OnCaptureDelayChanged(const int32_t id,
int32_t ViECapturer::RegisterEffectFilter( int32_t ViECapturer::RegisterEffectFilter(
ViEEffectFilter* effect_filter) { ViEEffectFilter* effect_filter) {
CriticalSectionScoped cs(deliver_cs_.get()); CriticalSectionScoped cs(effects_and_stats_cs_.get());
if (effect_filter != NULL && effect_filter_ != NULL) { if (effect_filter != NULL && effect_filter_ != NULL) {
LOG_F(LS_ERROR) << "Effect filter already registered."; LOG_F(LS_ERROR) << "Effect filter already registered.";
@ -415,7 +409,7 @@ int32_t ViECapturer::DecImageProcRefCount() {
} }
int32_t ViECapturer::EnableDeflickering(bool enable) { int32_t ViECapturer::EnableDeflickering(bool enable) {
CriticalSectionScoped cs(deliver_cs_.get()); CriticalSectionScoped cs(effects_and_stats_cs_.get());
if (enable) { if (enable) {
if (deflicker_frame_stats_) { if (deflicker_frame_stats_) {
return -1; return -1;
@ -436,7 +430,7 @@ int32_t ViECapturer::EnableDeflickering(bool enable) {
} }
int32_t ViECapturer::EnableBrightnessAlarm(bool enable) { int32_t ViECapturer::EnableBrightnessAlarm(bool enable) {
CriticalSectionScoped cs(deliver_cs_.get()); CriticalSectionScoped cs(effects_and_stats_cs_.get());
if (enable) { if (enable) {
if (brightness_frame_stats_) { if (brightness_frame_stats_) {
return -1; return -1;
@ -468,15 +462,19 @@ bool ViECapturer::ViECaptureProcess() {
overuse_detector_->FrameProcessingStarted(); overuse_detector_->FrameProcessingStarted();
int64_t encode_start_time = -1; int64_t encode_start_time = -1;
deliver_cs_->Enter(); I420VideoFrame deliver_frame;
if (SwapCapturedAndDeliverFrameIfAvailable()) { {
capture_time = deliver_frame_->render_time_ms(); CriticalSectionScoped cs(capture_cs_.get());
encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds(); if (!captured_frame_.IsZeroSize()) {
DeliverI420Frame(deliver_frame_.get()); deliver_frame = captured_frame_;
if (deliver_frame_->native_handle() != NULL) captured_frame_.Reset();
deliver_frame_.reset(); // Release the texture so it can be reused. }
}
if (!deliver_frame.IsZeroSize()) {
capture_time = deliver_frame.render_time_ms();
encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds();
DeliverI420Frame(&deliver_frame);
} }
deliver_cs_->Leave();
if (current_brightness_level_ != reported_brightness_level_) { if (current_brightness_level_ != reported_brightness_level_) {
CriticalSectionScoped cs(observer_cs_.get()); CriticalSectionScoped cs(observer_cs_.get());
if (observer_) { if (observer_) {
@ -504,6 +502,8 @@ void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
} }
// Apply image enhancement and effect filter. // Apply image enhancement and effect filter.
{
CriticalSectionScoped cs(effects_and_stats_cs_.get());
if (deflicker_frame_stats_) { if (deflicker_frame_stats_) {
if (image_proc_module_->GetFrameStats(deflicker_frame_stats_, if (image_proc_module_->GetFrameStats(deflicker_frame_stats_,
*video_frame) == 0) { *video_frame) == 0) {
@ -545,6 +545,7 @@ void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
video_frame->width(), video_frame->width(),
video_frame->height()); video_frame->height());
} }
}
// Deliver the captured frame to all observers (channels, renderer or file). // Deliver the captured frame to all observers (channels, renderer or file).
ViEFrameProviderBase::DeliverFrame(video_frame, std::vector<uint32_t>()); ViEFrameProviderBase::DeliverFrame(video_frame, std::vector<uint32_t>());
} }
@ -600,13 +601,4 @@ void ViECapturer::OnNoPictureAlarm(const int32_t id,
observer_->NoPictureAlarm(id, vie_alarm); observer_->NoPictureAlarm(id, vie_alarm);
} }
bool ViECapturer::SwapCapturedAndDeliverFrameIfAvailable() {
CriticalSectionScoped cs(capture_cs_.get());
if (captured_frame_ == NULL)
return false;
deliver_frame_.reset(captured_frame_.release());
return true;
}
} // namespace webrtc } // namespace webrtc

View File

@ -147,14 +147,12 @@ class ViECapturer
static bool ViECaptureThreadFunction(void* obj); static bool ViECaptureThreadFunction(void* obj);
bool ViECaptureProcess(); bool ViECaptureProcess();
private:
void DeliverI420Frame(I420VideoFrame* video_frame); void DeliverI420Frame(I420VideoFrame* video_frame);
private: // Never take capture_cs_ before effects_and_stats_cs_!
bool SwapCapturedAndDeliverFrameIfAvailable();
// Never take capture_cs_ before deliver_cs_!
rtc::scoped_ptr<CriticalSectionWrapper> capture_cs_; rtc::scoped_ptr<CriticalSectionWrapper> capture_cs_;
rtc::scoped_ptr<CriticalSectionWrapper> deliver_cs_; rtc::scoped_ptr<CriticalSectionWrapper> effects_and_stats_cs_;
VideoCaptureModule* capture_module_; VideoCaptureModule* capture_module_;
VideoCaptureExternal* external_capture_module_; VideoCaptureExternal* external_capture_module_;
ProcessThread& module_process_thread_; ProcessThread& module_process_thread_;
@ -171,15 +169,16 @@ class ViECapturer
volatile int stop_; volatile int stop_;
rtc::scoped_ptr<I420VideoFrame> captured_frame_; I420VideoFrame captured_frame_ GUARDED_BY(capture_cs_.get());
rtc::scoped_ptr<I420VideoFrame> deliver_frame_;
// Image processing. // Image processing.
ViEEffectFilter* effect_filter_; ViEEffectFilter* effect_filter_ GUARDED_BY(effects_and_stats_cs_.get());
VideoProcessingModule* image_proc_module_; VideoProcessingModule* image_proc_module_;
int image_proc_module_ref_counter_; int image_proc_module_ref_counter_;
VideoProcessingModule::FrameStats* deflicker_frame_stats_; VideoProcessingModule::FrameStats* deflicker_frame_stats_
VideoProcessingModule::FrameStats* brightness_frame_stats_; GUARDED_BY(effects_and_stats_cs_.get());
VideoProcessingModule::FrameStats* brightness_frame_stats_
GUARDED_BY(effects_and_stats_cs_.get());
Brightness current_brightness_level_; Brightness current_brightness_level_;
Brightness reported_brightness_level_; Brightness reported_brightness_level_;

View File

@ -85,6 +85,9 @@ class I420VideoFrame {
// Swap Frame. // Swap Frame.
void SwapFrame(I420VideoFrame* videoFrame); void SwapFrame(I420VideoFrame* videoFrame);
// Release frame buffer and reset time stamps.
void Reset();
// Get pointer to buffer per plane. // Get pointer to buffer per plane.
uint8_t* buffer(PlaneType type); uint8_t* buffer(PlaneType type);
// Overloading with const. // Overloading with const.