Remove frame copy in ViEExternalRendererImpl::RenderFrame
Add new interface for delivering frames to ExternalRenderer. The purpose is to avoid having to extract a packed buffer from I420VideoFrame, which will cause a deep frame copy. BUG=1128,4227 R=mflodman@webrtc.org Committed: https://code.google.com/p/webrtc/source/detail?r=8136 Review URL: https://webrtc-codereview.appspot.com/36489004 Cr-Commit-Position: refs/heads/master@{#8199} git-svn-id: http://webrtc.googlecode.com/svn/trunk@8199 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
a87c398a41
commit
a26f511dd2
@ -383,19 +383,8 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
|
||||
int64_t render_time,
|
||||
void* handle) {
|
||||
rtc::CritScope cs(&crit_);
|
||||
if (capture_start_rtp_time_stamp_ < 0) {
|
||||
capture_start_rtp_time_stamp_ = rtp_time_stamp;
|
||||
}
|
||||
|
||||
const int kVideoCodecClockratekHz = cricket::kVideoCodecClockrate / 1000;
|
||||
|
||||
int64 elapsed_time_ms =
|
||||
(rtp_ts_wraparound_handler_.Unwrap(rtp_time_stamp) -
|
||||
capture_start_rtp_time_stamp_) / kVideoCodecClockratekHz;
|
||||
if (ntp_time_ms > 0) {
|
||||
capture_start_ntp_time_ms_ = ntp_time_ms - elapsed_time_ms;
|
||||
}
|
||||
frame_rate_tracker_.Update(1);
|
||||
const int64_t elapsed_time_ms = ElapsedTimeMs(rtp_time_stamp);
|
||||
UpdateFrameStats(elapsed_time_ms, ntp_time_ms);
|
||||
if (!renderer_) {
|
||||
return 0;
|
||||
}
|
||||
@ -417,6 +406,25 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
virtual int DeliverI420Frame(const webrtc::I420VideoFrame* webrtc_frame) {
|
||||
rtc::CritScope cs(&crit_);
|
||||
DCHECK(webrtc_frame);
|
||||
const int64_t elapsed_time_ms = ElapsedTimeMs(webrtc_frame->timestamp());
|
||||
UpdateFrameStats(elapsed_time_ms, webrtc_frame->ntp_time_ms());
|
||||
if (!renderer_) {
|
||||
return 0;
|
||||
}
|
||||
if (!webrtc_frame->native_handle()) {
|
||||
WebRtcVideoRenderFrame cricket_frame(webrtc_frame, elapsed_time_ms);
|
||||
return renderer_->RenderFrame(&cricket_frame) ? 0 : -1;
|
||||
} else {
|
||||
return DeliverTextureFrame(
|
||||
webrtc_frame->native_handle(),
|
||||
webrtc_frame->render_time_ms() * rtc::kNumNanosecsPerMillisec,
|
||||
elapsed_time_ms * rtc::kNumNanosecsPerMillisec);
|
||||
}
|
||||
}
|
||||
|
||||
virtual bool IsTextureSupported() { return true; }
|
||||
|
||||
int DeliverBufferFrame(unsigned char* buffer, size_t buffer_size,
|
||||
@ -469,6 +477,22 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
|
||||
}
|
||||
|
||||
private:
|
||||
int64_t ElapsedTimeMs(uint32_t rtp_time_stamp) {
|
||||
if (capture_start_rtp_time_stamp_ < 0) {
|
||||
capture_start_rtp_time_stamp_ = rtp_time_stamp;
|
||||
}
|
||||
const int kVideoCodecClockratekHz = cricket::kVideoCodecClockrate / 1000;
|
||||
return (rtp_ts_wraparound_handler_.Unwrap(rtp_time_stamp) -
|
||||
capture_start_rtp_time_stamp_) / kVideoCodecClockratekHz;
|
||||
}
|
||||
|
||||
void UpdateFrameStats(int64_t elapsed_time_ms, int64_t ntp_time_ms) {
|
||||
if (ntp_time_ms > 0) {
|
||||
capture_start_ntp_time_ms_ = ntp_time_ms - elapsed_time_ms;
|
||||
}
|
||||
frame_rate_tracker_.Update(1);
|
||||
}
|
||||
|
||||
rtc::CriticalSection crit_;
|
||||
VideoRenderer* renderer_;
|
||||
int channel_id_;
|
||||
|
@ -460,9 +460,19 @@ int WebRtcVideoRenderFrame::GetRotation() const {
|
||||
return ROTATION_0;
|
||||
}
|
||||
|
||||
// TODO(magjed): Make this copy shallow instead of deep, BUG=1128. There is no
|
||||
// way to guarantee that the underlying webrtc::I420VideoFrame |frame_| will
|
||||
// outlive the returned object. The only safe option is to make a deep copy.
|
||||
// This can be fixed by making webrtc::I420VideoFrame reference counted, or
|
||||
// adding a similar shallow copy function to it.
|
||||
VideoFrame* WebRtcVideoRenderFrame::Copy() const {
|
||||
UNIMPLEMENTED;
|
||||
return NULL;
|
||||
WebRtcVideoFrame* new_frame = new WebRtcVideoFrame();
|
||||
new_frame->InitToEmptyBuffer(frame_->width(), frame_->height(), 1, 1,
|
||||
GetElapsedTime(), GetTimeStamp());
|
||||
CopyToPlanes(new_frame->GetYPlane(), new_frame->GetUPlane(),
|
||||
new_frame->GetVPlane(), new_frame->GetYPitch(),
|
||||
new_frame->GetUPitch(), new_frame->GetVPitch());
|
||||
return new_frame;
|
||||
}
|
||||
|
||||
bool WebRtcVideoRenderFrame::MakeExclusive() {
|
||||
|
@ -58,6 +58,9 @@ class WebRtcVideoFrame : public VideoFrame {
|
||||
|
||||
bool Init(const CapturedFrame* frame, int dw, int dh);
|
||||
|
||||
void InitToEmptyBuffer(int w, int h, size_t pixel_width, size_t pixel_height,
|
||||
int64_t elapsed_time, int64_t time_stamp);
|
||||
|
||||
// Aliases this WebRtcVideoFrame to a CapturedFrame. |frame| must outlive
|
||||
// this WebRtcVideoFrame.
|
||||
bool Alias(const CapturedFrame* frame, int dw, int dh);
|
||||
@ -122,8 +125,6 @@ class WebRtcVideoFrame : public VideoFrame {
|
||||
size_t pixel_height,
|
||||
int64_t elapsed_time,
|
||||
int64_t time_stamp) const;
|
||||
void InitToEmptyBuffer(int w, int h, size_t pixel_width, size_t pixel_height,
|
||||
int64_t elapsed_time, int64_t time_stamp);
|
||||
|
||||
rtc::scoped_refptr<RefCountedBuffer> video_buffer_;
|
||||
size_t pixel_width_;
|
||||
|
@ -20,6 +20,7 @@
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class I420VideoFrame;
|
||||
class VideoEngine;
|
||||
class VideoRender;
|
||||
class VideoRenderCallback;
|
||||
@ -47,6 +48,9 @@ class ExternalRenderer {
|
||||
// Handle of the underlying video frame.
|
||||
void* handle) = 0;
|
||||
|
||||
// Alternative interface for I420 frames.
|
||||
virtual int DeliverI420Frame(const I420VideoFrame* webrtc_frame) = 0;
|
||||
|
||||
// Returns true if the renderer supports textures. DeliverFrame can be called
|
||||
// with NULL |buffer| and non-NULL |handle|.
|
||||
virtual bool IsTextureSupported() = 0;
|
||||
|
@ -595,6 +595,15 @@ int FrameDropDetector::GetNumberOfFramesDroppedAt(State state) {
|
||||
int FrameDropMonitoringRemoteFileRenderer::DeliverFrame(
|
||||
unsigned char *buffer, size_t buffer_size, uint32_t time_stamp,
|
||||
int64_t ntp_time_ms, int64_t render_time, void* /*handle*/) {
|
||||
ReportFrameStats(time_stamp, render_time);
|
||||
return ViEToFileRenderer::DeliverFrame(buffer, buffer_size,
|
||||
time_stamp, ntp_time_ms,
|
||||
render_time, NULL);
|
||||
}
|
||||
|
||||
void FrameDropMonitoringRemoteFileRenderer::ReportFrameStats(
|
||||
uint32_t time_stamp,
|
||||
int64_t render_time) {
|
||||
// |render_time| provides the ideal render time for this frame. If that time
|
||||
// has already passed we will render it immediately.
|
||||
int64_t report_render_time_us = render_time * 1000;
|
||||
@ -605,9 +614,12 @@ int FrameDropMonitoringRemoteFileRenderer::DeliverFrame(
|
||||
// Register that this frame has been rendered.
|
||||
frame_drop_detector_->ReportFrameState(FrameDropDetector::kRendered,
|
||||
time_stamp, report_render_time_us);
|
||||
return ViEToFileRenderer::DeliverFrame(buffer, buffer_size,
|
||||
time_stamp, ntp_time_ms,
|
||||
render_time, NULL);
|
||||
}
|
||||
|
||||
int FrameDropMonitoringRemoteFileRenderer::DeliverI420Frame(
|
||||
const webrtc::I420VideoFrame* webrtc_frame) {
|
||||
ReportFrameStats(webrtc_frame->timestamp(), webrtc_frame->render_time_ms());
|
||||
return ViEToFileRenderer::DeliverI420Frame(webrtc_frame);
|
||||
}
|
||||
|
||||
int FrameDropMonitoringRemoteFileRenderer::FrameSizeChange(
|
||||
|
@ -230,7 +230,11 @@ class FrameDropMonitoringRemoteFileRenderer : public ViEToFileRenderer {
|
||||
int64_t ntp_time_ms,
|
||||
int64_t render_time,
|
||||
void* handle) OVERRIDE;
|
||||
int DeliverI420Frame(const webrtc::I420VideoFrame* webrtc_frame) OVERRIDE;
|
||||
|
||||
private:
|
||||
void ReportFrameStats(uint32_t time_stamp, int64_t render_time);
|
||||
|
||||
FrameDropDetector* frame_drop_detector_;
|
||||
};
|
||||
|
||||
|
@ -71,6 +71,13 @@ public:
|
||||
return 0;
|
||||
}
|
||||
|
||||
virtual int DeliverI420Frame(const webrtc::I420VideoFrame* webrtc_frame) {
|
||||
EXPECT_TRUE(webrtc_frame);
|
||||
EXPECT_EQ(webrtc_frame->width(), _width);
|
||||
EXPECT_EQ(webrtc_frame->height(), _height);
|
||||
return 0;
|
||||
}
|
||||
|
||||
virtual bool IsTextureSupported() { return false; }
|
||||
|
||||
public:
|
||||
|
@ -12,6 +12,7 @@
|
||||
|
||||
#include <assert.h>
|
||||
|
||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
|
||||
#include "webrtc/system_wrappers/interface/event_wrapper.h"
|
||||
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
|
||||
@ -19,16 +20,7 @@
|
||||
namespace test {
|
||||
struct Frame {
|
||||
public:
|
||||
Frame(unsigned char* buffer,
|
||||
size_t buffer_size,
|
||||
uint32_t timestamp,
|
||||
int64_t render_time)
|
||||
: buffer(new unsigned char[buffer_size]),
|
||||
buffer_size(buffer_size),
|
||||
timestamp(timestamp),
|
||||
render_time(render_time) {
|
||||
memcpy(this->buffer.get(), buffer, buffer_size);
|
||||
}
|
||||
Frame() : buffer(nullptr), buffer_size(0), timestamp(0), render_time(0) {}
|
||||
|
||||
webrtc::scoped_ptr<unsigned char[]> buffer;
|
||||
size_t buffer_size;
|
||||
@ -120,6 +112,22 @@ void ViEToFileRenderer::ForgetOutputFile() {
|
||||
output_path_ = "";
|
||||
}
|
||||
|
||||
test::Frame* ViEToFileRenderer::NewFrame(size_t buffer_size) {
|
||||
test::Frame* frame;
|
||||
if (free_frame_queue_.empty()) {
|
||||
frame = new test::Frame();
|
||||
} else {
|
||||
// Reuse an already allocated frame.
|
||||
frame = free_frame_queue_.front();
|
||||
free_frame_queue_.pop_front();
|
||||
}
|
||||
if (frame->buffer_size < buffer_size) {
|
||||
frame->buffer.reset(new unsigned char[buffer_size]);
|
||||
frame->buffer_size = buffer_size;
|
||||
}
|
||||
return frame;
|
||||
}
|
||||
|
||||
int ViEToFileRenderer::DeliverFrame(unsigned char *buffer,
|
||||
size_t buffer_size,
|
||||
uint32_t time_stamp,
|
||||
@ -127,21 +135,32 @@ int ViEToFileRenderer::DeliverFrame(unsigned char *buffer,
|
||||
int64_t render_time,
|
||||
void* /*handle*/) {
|
||||
webrtc::CriticalSectionScoped lock(frame_queue_cs_.get());
|
||||
test::Frame* frame;
|
||||
if (free_frame_queue_.empty()) {
|
||||
frame = new test::Frame(buffer, buffer_size, time_stamp, render_time);
|
||||
} else {
|
||||
// Reuse an already allocated frame.
|
||||
frame = free_frame_queue_.front();
|
||||
free_frame_queue_.pop_front();
|
||||
if (frame->buffer_size < buffer_size) {
|
||||
frame->buffer.reset(new unsigned char[buffer_size]);
|
||||
}
|
||||
memcpy(frame->buffer.get(), buffer, buffer_size);
|
||||
frame->buffer_size = buffer_size;
|
||||
frame->timestamp = time_stamp;
|
||||
frame->render_time = render_time;
|
||||
}
|
||||
test::Frame* frame = NewFrame(buffer_size);
|
||||
memcpy(frame->buffer.get(), buffer, buffer_size);
|
||||
frame->timestamp = time_stamp;
|
||||
frame->render_time = render_time;
|
||||
|
||||
render_queue_.push_back(frame);
|
||||
// Signal that a frame is ready to be written to file.
|
||||
frame_render_event_->Set();
|
||||
return 0;
|
||||
}
|
||||
|
||||
int ViEToFileRenderer::DeliverI420Frame(
|
||||
const webrtc::I420VideoFrame* input_frame) {
|
||||
assert(input_frame);
|
||||
const size_t buffer_size = CalcBufferSize(webrtc::kI420, input_frame->width(),
|
||||
input_frame->height());
|
||||
webrtc::CriticalSectionScoped lock(frame_queue_cs_.get());
|
||||
test::Frame* frame = NewFrame(buffer_size);
|
||||
const int length =
|
||||
ExtractBuffer(*input_frame, frame->buffer_size, frame->buffer.get());
|
||||
assert(static_cast<size_t>(length) == buffer_size);
|
||||
if (length < 0)
|
||||
return -1;
|
||||
frame->timestamp = input_frame->timestamp();
|
||||
frame->render_time = input_frame->render_time_ms();
|
||||
|
||||
render_queue_.push_back(frame);
|
||||
// Signal that a frame is ready to be written to file.
|
||||
frame_render_event_->Set();
|
||||
|
@ -64,6 +64,8 @@ class ViEToFileRenderer: public webrtc::ExternalRenderer {
|
||||
int64_t render_time,
|
||||
void* handle) OVERRIDE;
|
||||
|
||||
int DeliverI420Frame(const webrtc::I420VideoFrame* webrtc_frame) OVERRIDE;
|
||||
|
||||
bool IsTextureSupported() OVERRIDE;
|
||||
|
||||
const std::string GetFullOutputPath() const;
|
||||
@ -71,6 +73,9 @@ class ViEToFileRenderer: public webrtc::ExternalRenderer {
|
||||
private:
|
||||
typedef std::list<test::Frame*> FrameQueue;
|
||||
|
||||
// Returns a frame with the specified |buffer_size|. Tries to avoid allocating
|
||||
// new frames by reusing frames from |free_frame_queue_|.
|
||||
test::Frame* NewFrame(size_t buffer_size);
|
||||
static bool RunRenderThread(void* obj);
|
||||
void ForgetOutputFile();
|
||||
bool ProcessRenderQueue();
|
||||
|
@ -189,6 +189,13 @@ int32_t ViEExternalRendererImpl::RenderFrame(
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Fast path for I420 without frame copy.
|
||||
if (external_renderer_format_ == kVideoI420) {
|
||||
NotifyFrameSizeChange(stream_id, video_frame);
|
||||
external_renderer_->DeliverI420Frame(&video_frame);
|
||||
return 0;
|
||||
}
|
||||
|
||||
VideoFrame* out_frame = converted_frame_.get();
|
||||
|
||||
// Convert to requested format.
|
||||
@ -204,16 +211,6 @@ int32_t ViEExternalRendererImpl::RenderFrame(
|
||||
converted_frame_->VerifyAndAllocate(buffer_size);
|
||||
|
||||
switch (external_renderer_format_) {
|
||||
case kVideoI420: {
|
||||
// TODO(mikhal): need to copy the buffer as is.
|
||||
// can the output here be a I420 frame?
|
||||
int length = ExtractBuffer(video_frame, out_frame->Size(),
|
||||
out_frame->Buffer());
|
||||
if (length < 0)
|
||||
return -1;
|
||||
out_frame->SetLength(length);
|
||||
break;
|
||||
}
|
||||
case kVideoYV12:
|
||||
case kVideoYUY2:
|
||||
case kVideoUYVY:
|
||||
|
Loading…
x
Reference in New Issue
Block a user