Remove the need for scoped_ptr<I420VideoFrame> in VieCapturer.
Remove the need for scoped_ptr<I420VideoFrame> in VieCapturer. This adds the method I420VideoFrame::Reset and replace the use of scoped_ptr in ViECapturer. Also, a unittest is added to check that ViECapturer does not retain a frame after it has been delivered. BUG=1128 R=mflodman@webrtc.org, pbos@webrtc.org Review URL: https://webrtc-codereview.appspot.com/43669004 Cr-Commit-Position: refs/heads/master@{#8678} git-svn-id: http://webrtc.googlecode.com/svn/trunk@8678 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
9bfa5f0405
commit
0d9bb8e499
@ -18,11 +18,10 @@
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
I420VideoFrame::I420VideoFrame()
|
||||
: timestamp_(0),
|
||||
ntp_time_ms_(0),
|
||||
render_time_ms_(0),
|
||||
rotation_(kVideoRotation_0) {
|
||||
I420VideoFrame::I420VideoFrame() {
|
||||
// Intentionally using Reset instead of initializer list so that any missed
|
||||
// fields in Reset will be caught by memory checkers.
|
||||
Reset();
|
||||
}
|
||||
|
||||
I420VideoFrame::I420VideoFrame(
|
||||
@ -156,6 +155,14 @@ void I420VideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
|
||||
std::swap(rotation_, videoFrame->rotation_);
|
||||
}
|
||||
|
||||
void I420VideoFrame::Reset() {
|
||||
video_frame_buffer_ = nullptr;
|
||||
timestamp_ = 0;
|
||||
ntp_time_ms_ = 0;
|
||||
render_time_ms_ = 0;
|
||||
rotation_ = kVideoRotation_0;
|
||||
}
|
||||
|
||||
uint8_t* I420VideoFrame::buffer(PlaneType type) {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->data(type) : nullptr;
|
||||
}
|
||||
|
@ -130,6 +130,21 @@ TEST(TestI420VideoFrame, CopyFrame) {
|
||||
EXPECT_TRUE(EqualFrames(small_frame, big_frame));
|
||||
}
|
||||
|
||||
TEST(TestI420VideoFrame, Reset) {
|
||||
I420VideoFrame frame;
|
||||
ASSERT_TRUE(frame.CreateEmptyFrame(5, 5, 5, 5, 5) == 0);
|
||||
frame.set_ntp_time_ms(1);
|
||||
frame.set_timestamp(2);
|
||||
frame.set_render_time_ms(3);
|
||||
ASSERT_TRUE(frame.video_frame_buffer() != NULL);
|
||||
|
||||
frame.Reset();
|
||||
EXPECT_EQ(0u, frame.ntp_time_ms());
|
||||
EXPECT_EQ(0u, frame.render_time_ms());
|
||||
EXPECT_EQ(0u, frame.timestamp());
|
||||
EXPECT_TRUE(frame.video_frame_buffer() == NULL);
|
||||
}
|
||||
|
||||
TEST(TestI420VideoFrame, CloneFrame) {
|
||||
I420VideoFrame frame1;
|
||||
rtc::scoped_ptr<I420VideoFrame> frame2;
|
||||
|
@ -69,10 +69,10 @@ class I420Buffer : public VideoFrameBuffer {
|
||||
int stride(PlaneType type) const override;
|
||||
rtc::scoped_refptr<NativeHandle> native_handle() const override;
|
||||
|
||||
private:
|
||||
friend class rtc::RefCountedObject<I420Buffer>;
|
||||
protected:
|
||||
~I420Buffer() override;
|
||||
|
||||
private:
|
||||
const int width_;
|
||||
const int height_;
|
||||
const int stride_y_;
|
||||
|
@ -266,35 +266,6 @@ class FakeReceiveStatistics : public NullReceiveStatistics {
|
||||
StatisticianMap stats_map_;
|
||||
};
|
||||
|
||||
TEST_F(VideoSendStreamTest, SwapsI420VideoFrames) {
|
||||
static const size_t kWidth = 320;
|
||||
static const size_t kHeight = 240;
|
||||
|
||||
test::NullTransport transport;
|
||||
Call::Config call_config(&transport);
|
||||
CreateSenderCall(call_config);
|
||||
|
||||
CreateSendConfig(1);
|
||||
CreateStreams();
|
||||
send_stream_->Start();
|
||||
|
||||
I420VideoFrame frame;
|
||||
const int stride_uv = (kWidth + 1) / 2;
|
||||
frame.CreateEmptyFrame(kWidth, kHeight, kWidth, stride_uv, stride_uv);
|
||||
uint8_t* old_y_buffer = frame.buffer(kYPlane);
|
||||
// Initialize memory to avoid DrMemory errors.
|
||||
const int half_height = (kHeight + 1) / 2;
|
||||
memset(frame.buffer(kYPlane), 0, kWidth * kHeight);
|
||||
memset(frame.buffer(kUPlane), 0, stride_uv * half_height);
|
||||
memset(frame.buffer(kVPlane), 0, stride_uv * half_height);
|
||||
|
||||
send_stream_->Input()->SwapFrame(&frame);
|
||||
|
||||
EXPECT_NE(frame.buffer(kYPlane), old_y_buffer);
|
||||
|
||||
DestroyStreams();
|
||||
}
|
||||
|
||||
TEST_F(VideoSendStreamTest, SupportsFec) {
|
||||
class FecObserver : public test::SendTest {
|
||||
public:
|
||||
|
@ -62,7 +62,7 @@ ViECapturer::ViECapturer(int capture_id,
|
||||
ProcessThread& module_process_thread)
|
||||
: ViEFrameProviderBase(capture_id, engine_id),
|
||||
capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
|
||||
deliver_cs_(CriticalSectionWrapper::CreateCriticalSection()),
|
||||
effects_and_stats_cs_(CriticalSectionWrapper::CreateCriticalSection()),
|
||||
capture_module_(NULL),
|
||||
external_capture_module_(NULL),
|
||||
module_process_thread_(module_process_thread),
|
||||
@ -359,13 +359,7 @@ void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
|
||||
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
|
||||
"render_time", video_frame.render_time_ms());
|
||||
|
||||
if (video_frame.native_handle() != NULL) {
|
||||
captured_frame_.reset(video_frame.CloneFrame());
|
||||
} else {
|
||||
if (captured_frame_ == NULL || captured_frame_->native_handle() != NULL)
|
||||
captured_frame_.reset(new I420VideoFrame());
|
||||
captured_frame_->SwapFrame(&video_frame);
|
||||
}
|
||||
captured_frame_ = video_frame;
|
||||
capture_event_.Set();
|
||||
}
|
||||
|
||||
@ -380,7 +374,7 @@ void ViECapturer::OnCaptureDelayChanged(const int32_t id,
|
||||
|
||||
int32_t ViECapturer::RegisterEffectFilter(
|
||||
ViEEffectFilter* effect_filter) {
|
||||
CriticalSectionScoped cs(deliver_cs_.get());
|
||||
CriticalSectionScoped cs(effects_and_stats_cs_.get());
|
||||
|
||||
if (effect_filter != NULL && effect_filter_ != NULL) {
|
||||
LOG_F(LS_ERROR) << "Effect filter already registered.";
|
||||
@ -415,7 +409,7 @@ int32_t ViECapturer::DecImageProcRefCount() {
|
||||
}
|
||||
|
||||
int32_t ViECapturer::EnableDeflickering(bool enable) {
|
||||
CriticalSectionScoped cs(deliver_cs_.get());
|
||||
CriticalSectionScoped cs(effects_and_stats_cs_.get());
|
||||
if (enable) {
|
||||
if (deflicker_frame_stats_) {
|
||||
return -1;
|
||||
@ -436,7 +430,7 @@ int32_t ViECapturer::EnableDeflickering(bool enable) {
|
||||
}
|
||||
|
||||
int32_t ViECapturer::EnableBrightnessAlarm(bool enable) {
|
||||
CriticalSectionScoped cs(deliver_cs_.get());
|
||||
CriticalSectionScoped cs(effects_and_stats_cs_.get());
|
||||
if (enable) {
|
||||
if (brightness_frame_stats_) {
|
||||
return -1;
|
||||
@ -468,15 +462,19 @@ bool ViECapturer::ViECaptureProcess() {
|
||||
|
||||
overuse_detector_->FrameProcessingStarted();
|
||||
int64_t encode_start_time = -1;
|
||||
deliver_cs_->Enter();
|
||||
if (SwapCapturedAndDeliverFrameIfAvailable()) {
|
||||
capture_time = deliver_frame_->render_time_ms();
|
||||
encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds();
|
||||
DeliverI420Frame(deliver_frame_.get());
|
||||
if (deliver_frame_->native_handle() != NULL)
|
||||
deliver_frame_.reset(); // Release the texture so it can be reused.
|
||||
I420VideoFrame deliver_frame;
|
||||
{
|
||||
CriticalSectionScoped cs(capture_cs_.get());
|
||||
if (!captured_frame_.IsZeroSize()) {
|
||||
deliver_frame = captured_frame_;
|
||||
captured_frame_.Reset();
|
||||
}
|
||||
}
|
||||
if (!deliver_frame.IsZeroSize()) {
|
||||
capture_time = deliver_frame.render_time_ms();
|
||||
encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds();
|
||||
DeliverI420Frame(&deliver_frame);
|
||||
}
|
||||
deliver_cs_->Leave();
|
||||
if (current_brightness_level_ != reported_brightness_level_) {
|
||||
CriticalSectionScoped cs(observer_cs_.get());
|
||||
if (observer_) {
|
||||
@ -504,46 +502,49 @@ void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
|
||||
}
|
||||
|
||||
// Apply image enhancement and effect filter.
|
||||
if (deflicker_frame_stats_) {
|
||||
if (image_proc_module_->GetFrameStats(deflicker_frame_stats_,
|
||||
*video_frame) == 0) {
|
||||
image_proc_module_->Deflickering(video_frame, deflicker_frame_stats_);
|
||||
} else {
|
||||
LOG_F(LS_ERROR) << "Could not get frame stats.";
|
||||
}
|
||||
}
|
||||
if (brightness_frame_stats_) {
|
||||
if (image_proc_module_->GetFrameStats(brightness_frame_stats_,
|
||||
*video_frame) == 0) {
|
||||
int32_t brightness = image_proc_module_->BrightnessDetection(
|
||||
*video_frame, *brightness_frame_stats_);
|
||||
|
||||
switch (brightness) {
|
||||
case VideoProcessingModule::kNoWarning:
|
||||
current_brightness_level_ = Normal;
|
||||
break;
|
||||
case VideoProcessingModule::kDarkWarning:
|
||||
current_brightness_level_ = Dark;
|
||||
break;
|
||||
case VideoProcessingModule::kBrightWarning:
|
||||
current_brightness_level_ = Bright;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
{
|
||||
CriticalSectionScoped cs(effects_and_stats_cs_.get());
|
||||
if (deflicker_frame_stats_) {
|
||||
if (image_proc_module_->GetFrameStats(deflicker_frame_stats_,
|
||||
*video_frame) == 0) {
|
||||
image_proc_module_->Deflickering(video_frame, deflicker_frame_stats_);
|
||||
} else {
|
||||
LOG_F(LS_ERROR) << "Could not get frame stats.";
|
||||
}
|
||||
}
|
||||
}
|
||||
if (effect_filter_) {
|
||||
size_t length =
|
||||
CalcBufferSize(kI420, video_frame->width(), video_frame->height());
|
||||
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[length]);
|
||||
ExtractBuffer(*video_frame, length, video_buffer.get());
|
||||
effect_filter_->Transform(length,
|
||||
video_buffer.get(),
|
||||
video_frame->ntp_time_ms(),
|
||||
video_frame->timestamp(),
|
||||
video_frame->width(),
|
||||
video_frame->height());
|
||||
if (brightness_frame_stats_) {
|
||||
if (image_proc_module_->GetFrameStats(brightness_frame_stats_,
|
||||
*video_frame) == 0) {
|
||||
int32_t brightness = image_proc_module_->BrightnessDetection(
|
||||
*video_frame, *brightness_frame_stats_);
|
||||
|
||||
switch (brightness) {
|
||||
case VideoProcessingModule::kNoWarning:
|
||||
current_brightness_level_ = Normal;
|
||||
break;
|
||||
case VideoProcessingModule::kDarkWarning:
|
||||
current_brightness_level_ = Dark;
|
||||
break;
|
||||
case VideoProcessingModule::kBrightWarning:
|
||||
current_brightness_level_ = Bright;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (effect_filter_) {
|
||||
size_t length =
|
||||
CalcBufferSize(kI420, video_frame->width(), video_frame->height());
|
||||
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[length]);
|
||||
ExtractBuffer(*video_frame, length, video_buffer.get());
|
||||
effect_filter_->Transform(length,
|
||||
video_buffer.get(),
|
||||
video_frame->ntp_time_ms(),
|
||||
video_frame->timestamp(),
|
||||
video_frame->width(),
|
||||
video_frame->height());
|
||||
}
|
||||
}
|
||||
// Deliver the captured frame to all observers (channels, renderer or file).
|
||||
ViEFrameProviderBase::DeliverFrame(video_frame, std::vector<uint32_t>());
|
||||
@ -600,13 +601,4 @@ void ViECapturer::OnNoPictureAlarm(const int32_t id,
|
||||
observer_->NoPictureAlarm(id, vie_alarm);
|
||||
}
|
||||
|
||||
bool ViECapturer::SwapCapturedAndDeliverFrameIfAvailable() {
|
||||
CriticalSectionScoped cs(capture_cs_.get());
|
||||
if (captured_frame_ == NULL)
|
||||
return false;
|
||||
|
||||
deliver_frame_.reset(captured_frame_.release());
|
||||
return true;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
@ -147,14 +147,12 @@ class ViECapturer
|
||||
static bool ViECaptureThreadFunction(void* obj);
|
||||
bool ViECaptureProcess();
|
||||
|
||||
private:
|
||||
void DeliverI420Frame(I420VideoFrame* video_frame);
|
||||
|
||||
private:
|
||||
bool SwapCapturedAndDeliverFrameIfAvailable();
|
||||
|
||||
// Never take capture_cs_ before deliver_cs_!
|
||||
// Never take capture_cs_ before effects_and_stats_cs_!
|
||||
rtc::scoped_ptr<CriticalSectionWrapper> capture_cs_;
|
||||
rtc::scoped_ptr<CriticalSectionWrapper> deliver_cs_;
|
||||
rtc::scoped_ptr<CriticalSectionWrapper> effects_and_stats_cs_;
|
||||
VideoCaptureModule* capture_module_;
|
||||
VideoCaptureExternal* external_capture_module_;
|
||||
ProcessThread& module_process_thread_;
|
||||
@ -171,15 +169,16 @@ class ViECapturer
|
||||
|
||||
volatile int stop_;
|
||||
|
||||
rtc::scoped_ptr<I420VideoFrame> captured_frame_;
|
||||
rtc::scoped_ptr<I420VideoFrame> deliver_frame_;
|
||||
I420VideoFrame captured_frame_ GUARDED_BY(capture_cs_.get());
|
||||
|
||||
// Image processing.
|
||||
ViEEffectFilter* effect_filter_;
|
||||
ViEEffectFilter* effect_filter_ GUARDED_BY(effects_and_stats_cs_.get());
|
||||
VideoProcessingModule* image_proc_module_;
|
||||
int image_proc_module_ref_counter_;
|
||||
VideoProcessingModule::FrameStats* deflicker_frame_stats_;
|
||||
VideoProcessingModule::FrameStats* brightness_frame_stats_;
|
||||
VideoProcessingModule::FrameStats* deflicker_frame_stats_
|
||||
GUARDED_BY(effects_and_stats_cs_.get());
|
||||
VideoProcessingModule::FrameStats* brightness_frame_stats_
|
||||
GUARDED_BY(effects_and_stats_cs_.get());
|
||||
Brightness current_brightness_level_;
|
||||
Brightness reported_brightness_level_;
|
||||
|
||||
|
@ -85,6 +85,9 @@ class I420VideoFrame {
|
||||
// Swap Frame.
|
||||
void SwapFrame(I420VideoFrame* videoFrame);
|
||||
|
||||
// Release frame buffer and reset time stamps.
|
||||
void Reset();
|
||||
|
||||
// Get pointer to buffer per plane.
|
||||
uint8_t* buffer(PlaneType type);
|
||||
// Overloading with const.
|
||||
|
Loading…
x
Reference in New Issue
Block a user