Revert 8136 "Remove frame copy in ViEExternalRendererImpl::Rende..."

> Remove frame copy in ViEExternalRendererImpl::RenderFrame
> 
> Add new interface for delivering frames to ExternalRenderer. The purpose is to avoid having to extract a packed buffer from I420VideoFrame, which will cause a deep frame copy.
> 
> BUG=1128
> R=mflodman@webrtc.org
> 
> Review URL: https://webrtc-codereview.appspot.com/36489004

TBR=magjed@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/37749004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@8144 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
tkchin@webrtc.org 2015-01-23 21:20:41 +00:00
parent 0f98844749
commit 7519de519e
8 changed files with 34 additions and 118 deletions

View File

@ -417,40 +417,6 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
}
}
virtual int DeliverI420Frame(const webrtc::I420VideoFrame* webrtc_frame) {
rtc::CritScope cs(&crit_);
DCHECK(webrtc_frame);
if (capture_start_rtp_time_stamp_ < 0)
capture_start_rtp_time_stamp_ = webrtc_frame->timestamp();
const int kVideoCodecClockratekHz = cricket::kVideoCodecClockrate / 1000;
const int64 elapsed_time_ms =
(rtp_ts_wraparound_handler_.Unwrap(webrtc_frame->timestamp()) -
capture_start_rtp_time_stamp_) /
kVideoCodecClockratekHz;
if (webrtc_frame->ntp_time_ms() > 0) {
capture_start_ntp_time_ms_ =
webrtc_frame->ntp_time_ms() - elapsed_time_ms;
}
frame_rate_tracker_.Update(1);
if (!renderer_)
return 0;
const int64 elapsed_time_ns =
elapsed_time_ms * rtc::kNumNanosecsPerMillisec;
const int64 render_time_ns =
webrtc_frame->render_time_ms() * rtc::kNumNanosecsPerMillisec;
if (!webrtc_frame->native_handle()) {
WebRtcVideoRenderFrame cricket_frame(webrtc_frame, render_time_ns,
elapsed_time_ns);
return renderer_->RenderFrame(&cricket_frame) ? 0 : -1;
} else {
return DeliverTextureFrame(webrtc_frame->native_handle(), render_time_ns,
elapsed_time_ns);
}
}
virtual bool IsTextureSupported() { return true; }
int DeliverBufferFrame(unsigned char* buffer, size_t buffer_size,

View File

@ -361,18 +361,7 @@ void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h, size_t pixel_width,
WebRtcVideoRenderFrame::WebRtcVideoRenderFrame(
const webrtc::I420VideoFrame* frame)
: frame_(frame),
// Convert millisecond render time to ns timestamp.
// Convert 90K rtp timestamp to ns timestamp.
timestamp_((frame_->timestamp() / 90) * rtc::kNumNanosecsPerMillisec),
elapsed_time_(frame_->render_time_ms() * rtc::kNumNanosecsPerMillisec) {
}
WebRtcVideoRenderFrame::WebRtcVideoRenderFrame(
const webrtc::I420VideoFrame* frame,
int64_t timestamp,
int64_t elapsed_time)
: frame_(frame), timestamp_(timestamp), elapsed_time_(elapsed_time) {
: frame_(frame) {
}
bool WebRtcVideoRenderFrame::InitToBlack(int w,
@ -453,16 +442,18 @@ size_t WebRtcVideoRenderFrame::GetPixelHeight() const {
}
int64_t WebRtcVideoRenderFrame::GetElapsedTime() const {
return elapsed_time_;
// Convert millisecond render time to ns timestamp.
return frame_->render_time_ms() * rtc::kNumNanosecsPerMillisec;
}
int64_t WebRtcVideoRenderFrame::GetTimeStamp() const {
return timestamp_;
// Convert 90K rtp timestamp to ns timestamp.
return (frame_->timestamp() / 90) * rtc::kNumNanosecsPerMillisec;
}
void WebRtcVideoRenderFrame::SetElapsedTime(int64_t elapsed_time) {
elapsed_time_ = elapsed_time;
UNIMPLEMENTED;
}
void WebRtcVideoRenderFrame::SetTimeStamp(int64_t time_stamp) {
timestamp_ = time_stamp;
UNIMPLEMENTED;
}
int WebRtcVideoRenderFrame::GetRotation() const {

View File

@ -140,9 +140,6 @@ class WebRtcVideoFrame : public VideoFrame {
class WebRtcVideoRenderFrame : public VideoFrame {
public:
explicit WebRtcVideoRenderFrame(const webrtc::I420VideoFrame* frame);
WebRtcVideoRenderFrame(const webrtc::I420VideoFrame* frame,
int64_t timestamp,
int64_t elapsed_time);
virtual bool InitToBlack(int w,
int h,
@ -195,8 +192,6 @@ class WebRtcVideoRenderFrame : public VideoFrame {
private:
const webrtc::I420VideoFrame* const frame_;
int64_t timestamp_;
int64_t elapsed_time_;
};
} // namespace cricket

View File

@ -20,7 +20,6 @@
namespace webrtc {
class I420VideoFrame;
class VideoEngine;
class VideoRender;
class VideoRenderCallback;
@ -48,9 +47,6 @@ class ExternalRenderer {
// Handle of the underlying video frame.
void* handle) = 0;
// Alternative interface for I420 frames.
virtual int DeliverI420Frame(const I420VideoFrame* webrtc_frame) = 0;
// Returns true if the renderer supports textures. DeliverFrame can be called
// with NULL |buffer| and non-NULL |handle|.
virtual bool IsTextureSupported() = 0;

View File

@ -71,13 +71,6 @@ public:
return 0;
}
virtual int DeliverI420Frame(const webrtc::I420VideoFrame* webrtc_frame) {
EXPECT_TRUE(webrtc_frame);
EXPECT_EQ(webrtc_frame->width(), _width);
EXPECT_EQ(webrtc_frame->height(), _height);
return 0;
}
virtual bool IsTextureSupported() { return false; }
public:

View File

@ -12,7 +12,6 @@
#include <assert.h>
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
@ -20,7 +19,16 @@
namespace test {
struct Frame {
public:
Frame() : buffer(nullptr), buffer_size(0), timestamp(0), render_time(0) {}
Frame(unsigned char* buffer,
size_t buffer_size,
uint32_t timestamp,
int64_t render_time)
: buffer(new unsigned char[buffer_size]),
buffer_size(buffer_size),
timestamp(timestamp),
render_time(render_time) {
memcpy(this->buffer.get(), buffer, buffer_size);
}
webrtc::scoped_ptr<unsigned char[]> buffer;
size_t buffer_size;
@ -121,53 +129,19 @@ int ViEToFileRenderer::DeliverFrame(unsigned char *buffer,
webrtc::CriticalSectionScoped lock(frame_queue_cs_.get());
test::Frame* frame;
if (free_frame_queue_.empty()) {
frame = new test::Frame();
frame = new test::Frame(buffer, buffer_size, time_stamp, render_time);
} else {
// Reuse an already allocated frame.
frame = free_frame_queue_.front();
free_frame_queue_.pop_front();
}
if (frame->buffer_size < buffer_size) {
frame->buffer.reset(new unsigned char[buffer_size]);
if (frame->buffer_size < buffer_size) {
frame->buffer.reset(new unsigned char[buffer_size]);
}
memcpy(frame->buffer.get(), buffer, buffer_size);
frame->buffer_size = buffer_size;
frame->timestamp = time_stamp;
frame->render_time = render_time;
}
memcpy(frame->buffer.get(), buffer, buffer_size);
frame->timestamp = time_stamp;
frame->render_time = render_time;
render_queue_.push_back(frame);
// Signal that a frame is ready to be written to file.
frame_render_event_->Set();
return 0;
}
int ViEToFileRenderer::DeliverI420Frame(
const webrtc::I420VideoFrame* input_frame) {
assert(input_frame);
const size_t buffer_size = CalcBufferSize(webrtc::kI420, input_frame->width(),
input_frame->height());
webrtc::CriticalSectionScoped lock(frame_queue_cs_.get());
test::Frame* frame;
if (free_frame_queue_.empty()) {
frame = new test::Frame();
} else {
// Reuse an already allocated frame.
frame = free_frame_queue_.front();
free_frame_queue_.pop_front();
}
if (frame->buffer_size < buffer_size) {
frame->buffer.reset(new unsigned char[buffer_size]);
frame->buffer_size = buffer_size;
}
const int length =
ExtractBuffer(*input_frame, frame->buffer_size, frame->buffer.get());
assert(static_cast<size_t>(length) == buffer_size);
if (length < 0)
return -1;
frame->timestamp = input_frame->timestamp();
frame->render_time = input_frame->render_time_ms();
render_queue_.push_back(frame);
// Signal that a frame is ready to be written to file.
frame_render_event_->Set();

View File

@ -64,8 +64,6 @@ class ViEToFileRenderer: public webrtc::ExternalRenderer {
int64_t render_time,
void* handle) OVERRIDE;
int DeliverI420Frame(const webrtc::I420VideoFrame* webrtc_frame) OVERRIDE;
bool IsTextureSupported() OVERRIDE;
const std::string GetFullOutputPath() const;

View File

@ -189,13 +189,6 @@ int32_t ViEExternalRendererImpl::RenderFrame(
return 0;
}
// Fast path for I420 without frame copy.
if (external_renderer_format_ == kVideoI420) {
NotifyFrameSizeChange(stream_id, video_frame);
external_renderer_->DeliverI420Frame(&video_frame);
return 0;
}
VideoFrame* out_frame = converted_frame_.get();
// Convert to requested format.
@ -211,6 +204,16 @@ int32_t ViEExternalRendererImpl::RenderFrame(
converted_frame_->VerifyAndAllocate(buffer_size);
switch (external_renderer_format_) {
case kVideoI420: {
// TODO(mikhal): need to copy the buffer as is.
// can the output here be a I420 frame?
int length = ExtractBuffer(video_frame, out_frame->Size(),
out_frame->Buffer());
if (length < 0)
return -1;
out_frame->SetLength(length);
break;
}
case kVideoYV12:
case kVideoYUY2:
case kVideoUYVY: