Always use DeliverI420Frame in WebRtcVideoEngine.
Moves native_handle() path to DeliverI420Frame and CHECKs that DeliverFrame is not being used anymore. R=magjed@webrtc.org, mflodman@webrtc.org BUG= Review URL: https://webrtc-codereview.appspot.com/38019004 Cr-Commit-Position: refs/heads/master@{#8312} git-svn-id: http://webrtc.googlecode.com/svn/trunk@8312 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
@@ -382,68 +382,31 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
|
|||||||
int64_t ntp_time_ms,
|
int64_t ntp_time_ms,
|
||||||
int64_t render_time,
|
int64_t render_time,
|
||||||
void* handle) {
|
void* handle) {
|
||||||
rtc::CritScope cs(&crit_);
|
CHECK(false) << "All frames should be delivered as I420 frames through "
|
||||||
const int64_t elapsed_time_ms = ElapsedTimeMs(rtp_time_stamp);
|
"DeliverI420Frame.";
|
||||||
UpdateFrameStats(elapsed_time_ms, ntp_time_ms);
|
return 0;
|
||||||
if (!renderer_) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
// Convert elapsed_time_ms to ns timestamp.
|
|
||||||
int64 elapsed_time_ns =
|
|
||||||
elapsed_time_ms * rtc::kNumNanosecsPerMillisec;
|
|
||||||
// Convert milisecond render time to ns timestamp.
|
|
||||||
int64 render_time_ns = render_time *
|
|
||||||
rtc::kNumNanosecsPerMillisec;
|
|
||||||
// Note that here we send the |elapsed_time_ns| to renderer as the
|
|
||||||
// cricket::VideoFrame's elapsed_time_ and the |render_time_ns| as the
|
|
||||||
// cricket::VideoFrame's time_stamp_.
|
|
||||||
if (!handle) {
|
|
||||||
return DeliverBufferFrame(buffer, buffer_size, render_time_ns,
|
|
||||||
elapsed_time_ns);
|
|
||||||
} else {
|
|
||||||
return DeliverTextureFrame(handle, render_time_ns,
|
|
||||||
elapsed_time_ns);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual int DeliverI420Frame(const webrtc::I420VideoFrame* webrtc_frame) {
|
virtual int DeliverI420Frame(const webrtc::I420VideoFrame& webrtc_frame) {
|
||||||
rtc::CritScope cs(&crit_);
|
rtc::CritScope cs(&crit_);
|
||||||
DCHECK(webrtc_frame);
|
const int64_t elapsed_time_ms = ElapsedTimeMs(webrtc_frame.timestamp());
|
||||||
const int64_t elapsed_time_ms = ElapsedTimeMs(webrtc_frame->timestamp());
|
UpdateFrameStats(elapsed_time_ms, webrtc_frame.ntp_time_ms());
|
||||||
UpdateFrameStats(elapsed_time_ms, webrtc_frame->ntp_time_ms());
|
|
||||||
if (!renderer_) {
|
if (!renderer_) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
if (!webrtc_frame->native_handle()) {
|
if (webrtc_frame.native_handle() == NULL) {
|
||||||
WebRtcVideoRenderFrame cricket_frame(webrtc_frame, elapsed_time_ms);
|
WebRtcVideoRenderFrame cricket_frame(&webrtc_frame, elapsed_time_ms);
|
||||||
return renderer_->RenderFrame(&cricket_frame) ? 0 : -1;
|
return renderer_->RenderFrame(&cricket_frame) ? 0 : -1;
|
||||||
} else {
|
} else {
|
||||||
return DeliverTextureFrame(
|
return DeliverTextureFrame(
|
||||||
webrtc_frame->native_handle(),
|
webrtc_frame.native_handle(),
|
||||||
webrtc_frame->render_time_ms() * rtc::kNumNanosecsPerMillisec,
|
webrtc_frame.render_time_ms() * rtc::kNumNanosecsPerMillisec,
|
||||||
elapsed_time_ms * rtc::kNumNanosecsPerMillisec);
|
elapsed_time_ms * rtc::kNumNanosecsPerMillisec);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual bool IsTextureSupported() { return true; }
|
virtual bool IsTextureSupported() { return true; }
|
||||||
|
|
||||||
int DeliverBufferFrame(unsigned char* buffer, size_t buffer_size,
|
|
||||||
int64 time_stamp, int64 elapsed_time) {
|
|
||||||
WebRtcVideoFrame video_frame;
|
|
||||||
video_frame.Alias(buffer, buffer_size, width_, height_, 1, 1, elapsed_time,
|
|
||||||
time_stamp, webrtc::kVideoRotation_0);
|
|
||||||
|
|
||||||
// Sanity check on decoded frame size.
|
|
||||||
if (buffer_size != VideoFrame::SizeOf(width_, height_)) {
|
|
||||||
LOG(LS_WARNING) << "WebRtcRenderAdapter (channel " << channel_id_
|
|
||||||
<< ") received a strange frame size: "
|
|
||||||
<< buffer_size;
|
|
||||||
}
|
|
||||||
|
|
||||||
int ret = renderer_->RenderFrame(&video_frame) ? 0 : -1;
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
int DeliverTextureFrame(void* handle, int64 time_stamp, int64 elapsed_time) {
|
int DeliverTextureFrame(void* handle, int64 time_stamp, int64 elapsed_time) {
|
||||||
WebRtcTextureVideoFrame video_frame(
|
WebRtcTextureVideoFrame video_frame(
|
||||||
static_cast<webrtc::NativeHandle*>(handle), width_, height_,
|
static_cast<webrtc::NativeHandle*>(handle), width_, height_,
|
||||||
|
@@ -361,11 +361,11 @@ int VideoReceiveStream::DeliverFrame(unsigned char* buffer,
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int VideoReceiveStream::DeliverI420Frame(const I420VideoFrame* video_frame) {
|
int VideoReceiveStream::DeliverI420Frame(const I420VideoFrame& video_frame) {
|
||||||
if (config_.renderer != NULL)
|
if (config_.renderer != NULL)
|
||||||
config_.renderer->RenderFrame(
|
config_.renderer->RenderFrame(
|
||||||
*video_frame,
|
video_frame,
|
||||||
video_frame->render_time_ms() - clock_->TimeInMilliseconds());
|
video_frame.render_time_ms() - clock_->TimeInMilliseconds());
|
||||||
|
|
||||||
stats_proxy_->OnRenderedFrame();
|
stats_proxy_->OnRenderedFrame();
|
||||||
|
|
||||||
|
@@ -66,7 +66,7 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
|
|||||||
int64_t ntp_time_ms,
|
int64_t ntp_time_ms,
|
||||||
int64_t render_time_ms,
|
int64_t render_time_ms,
|
||||||
void* handle) override;
|
void* handle) override;
|
||||||
virtual int DeliverI420Frame(const I420VideoFrame* webrtc_frame) override;
|
virtual int DeliverI420Frame(const I420VideoFrame& webrtc_frame) override;
|
||||||
virtual bool IsTextureSupported() override;
|
virtual bool IsTextureSupported() override;
|
||||||
|
|
||||||
void SignalNetworkState(Call::NetworkState state);
|
void SignalNetworkState(Call::NetworkState state);
|
||||||
|
@@ -49,7 +49,7 @@ class ExternalRenderer {
|
|||||||
void* handle) = 0;
|
void* handle) = 0;
|
||||||
|
|
||||||
// Alternative interface for I420 frames.
|
// Alternative interface for I420 frames.
|
||||||
virtual int DeliverI420Frame(const I420VideoFrame* webrtc_frame) = 0;
|
virtual int DeliverI420Frame(const I420VideoFrame& webrtc_frame) = 0;
|
||||||
|
|
||||||
// Returns true if the renderer supports textures. DeliverFrame can be called
|
// Returns true if the renderer supports textures. DeliverFrame can be called
|
||||||
// with NULL |buffer| and non-NULL |handle|.
|
// with NULL |buffer| and non-NULL |handle|.
|
||||||
|
@@ -617,8 +617,8 @@ void FrameDropMonitoringRemoteFileRenderer::ReportFrameStats(
|
|||||||
}
|
}
|
||||||
|
|
||||||
int FrameDropMonitoringRemoteFileRenderer::DeliverI420Frame(
|
int FrameDropMonitoringRemoteFileRenderer::DeliverI420Frame(
|
||||||
const webrtc::I420VideoFrame* webrtc_frame) {
|
const webrtc::I420VideoFrame& webrtc_frame) {
|
||||||
ReportFrameStats(webrtc_frame->timestamp(), webrtc_frame->render_time_ms());
|
ReportFrameStats(webrtc_frame.timestamp(), webrtc_frame.render_time_ms());
|
||||||
return ViEToFileRenderer::DeliverI420Frame(webrtc_frame);
|
return ViEToFileRenderer::DeliverI420Frame(webrtc_frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -230,7 +230,7 @@ class FrameDropMonitoringRemoteFileRenderer : public ViEToFileRenderer {
|
|||||||
int64_t ntp_time_ms,
|
int64_t ntp_time_ms,
|
||||||
int64_t render_time,
|
int64_t render_time,
|
||||||
void* handle) OVERRIDE;
|
void* handle) OVERRIDE;
|
||||||
int DeliverI420Frame(const webrtc::I420VideoFrame* webrtc_frame) OVERRIDE;
|
int DeliverI420Frame(const webrtc::I420VideoFrame& webrtc_frame) OVERRIDE;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void ReportFrameStats(uint32_t time_stamp, int64_t render_time);
|
void ReportFrameStats(uint32_t time_stamp, int64_t render_time);
|
||||||
|
@@ -71,10 +71,9 @@ public:
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual int DeliverI420Frame(const webrtc::I420VideoFrame* webrtc_frame) {
|
virtual int DeliverI420Frame(const webrtc::I420VideoFrame& webrtc_frame) {
|
||||||
EXPECT_TRUE(webrtc_frame);
|
EXPECT_EQ(webrtc_frame.width(), _width);
|
||||||
EXPECT_EQ(webrtc_frame->width(), _width);
|
EXPECT_EQ(webrtc_frame.height(), _height);
|
||||||
EXPECT_EQ(webrtc_frame->height(), _height);
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -146,19 +146,18 @@ int ViEToFileRenderer::DeliverFrame(unsigned char *buffer,
|
|||||||
}
|
}
|
||||||
|
|
||||||
int ViEToFileRenderer::DeliverI420Frame(
|
int ViEToFileRenderer::DeliverI420Frame(
|
||||||
const webrtc::I420VideoFrame* input_frame) {
|
const webrtc::I420VideoFrame& input_frame) {
|
||||||
assert(input_frame);
|
const size_t buffer_size =
|
||||||
const size_t buffer_size = CalcBufferSize(webrtc::kI420, input_frame->width(),
|
CalcBufferSize(webrtc::kI420, input_frame.width(), input_frame.height());
|
||||||
input_frame->height());
|
|
||||||
webrtc::CriticalSectionScoped lock(frame_queue_cs_.get());
|
webrtc::CriticalSectionScoped lock(frame_queue_cs_.get());
|
||||||
test::Frame* frame = NewFrame(buffer_size);
|
test::Frame* frame = NewFrame(buffer_size);
|
||||||
const int length =
|
const int length =
|
||||||
ExtractBuffer(*input_frame, frame->buffer_size, frame->buffer.get());
|
ExtractBuffer(input_frame, frame->buffer_size, frame->buffer.get());
|
||||||
assert(static_cast<size_t>(length) == buffer_size);
|
assert(static_cast<size_t>(length) == buffer_size);
|
||||||
if (length < 0)
|
if (length < 0)
|
||||||
return -1;
|
return -1;
|
||||||
frame->timestamp = input_frame->timestamp();
|
frame->timestamp = input_frame.timestamp();
|
||||||
frame->render_time = input_frame->render_time_ms();
|
frame->render_time = input_frame.render_time_ms();
|
||||||
|
|
||||||
render_queue_.push_back(frame);
|
render_queue_.push_back(frame);
|
||||||
// Signal that a frame is ready to be written to file.
|
// Signal that a frame is ready to be written to file.
|
||||||
|
@@ -64,7 +64,7 @@ class ViEToFileRenderer: public webrtc::ExternalRenderer {
|
|||||||
int64_t render_time,
|
int64_t render_time,
|
||||||
void* handle) OVERRIDE;
|
void* handle) OVERRIDE;
|
||||||
|
|
||||||
int DeliverI420Frame(const webrtc::I420VideoFrame* webrtc_frame) OVERRIDE;
|
int DeliverI420Frame(const webrtc::I420VideoFrame& webrtc_frame) OVERRIDE;
|
||||||
|
|
||||||
bool IsTextureSupported() OVERRIDE;
|
bool IsTextureSupported() OVERRIDE;
|
||||||
|
|
||||||
|
@@ -170,9 +170,25 @@ int ViEExternalRendererImpl::SetViEExternalRenderer(
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t ViEExternalRendererImpl::RenderFrame(
|
int32_t ViEExternalRendererImpl::RenderFrame(const uint32_t stream_id,
|
||||||
const uint32_t stream_id,
|
I420VideoFrame& video_frame) {
|
||||||
I420VideoFrame& video_frame) {
|
if (external_renderer_format_ != kVideoI420)
|
||||||
|
return ConvertAndRenderFrame(stream_id, video_frame);
|
||||||
|
|
||||||
|
// Fast path for I420 without frame copy.
|
||||||
|
NotifyFrameSizeChange(stream_id, video_frame);
|
||||||
|
if (video_frame.native_handle() == NULL ||
|
||||||
|
external_renderer_->IsTextureSupported()) {
|
||||||
|
external_renderer_->DeliverI420Frame(video_frame);
|
||||||
|
} else {
|
||||||
|
// TODO(wuchengli): readback the pixels and deliver the frame.
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
int32_t ViEExternalRendererImpl::ConvertAndRenderFrame(
|
||||||
|
uint32_t stream_id,
|
||||||
|
I420VideoFrame& video_frame) {
|
||||||
if (video_frame.native_handle() != NULL) {
|
if (video_frame.native_handle() != NULL) {
|
||||||
NotifyFrameSizeChange(stream_id, video_frame);
|
NotifyFrameSizeChange(stream_id, video_frame);
|
||||||
|
|
||||||
@@ -189,13 +205,6 @@ int32_t ViEExternalRendererImpl::RenderFrame(
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fast path for I420 without frame copy.
|
|
||||||
if (external_renderer_format_ == kVideoI420) {
|
|
||||||
NotifyFrameSizeChange(stream_id, video_frame);
|
|
||||||
external_renderer_->DeliverI420Frame(&video_frame);
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
VideoFrame* out_frame = converted_frame_.get();
|
VideoFrame* out_frame = converted_frame_.get();
|
||||||
|
|
||||||
// Convert to requested format.
|
// Convert to requested format.
|
||||||
|
@@ -37,6 +37,8 @@ class ViEExternalRendererImpl : public VideoRenderCallback {
|
|||||||
private:
|
private:
|
||||||
void NotifyFrameSizeChange(const uint32_t stream_id,
|
void NotifyFrameSizeChange(const uint32_t stream_id,
|
||||||
I420VideoFrame& video_frame);
|
I420VideoFrame& video_frame);
|
||||||
|
int32_t ConvertAndRenderFrame(uint32_t stream_id,
|
||||||
|
I420VideoFrame& video_frame);
|
||||||
ExternalRenderer* external_renderer_;
|
ExternalRenderer* external_renderer_;
|
||||||
RawVideoType external_renderer_format_;
|
RawVideoType external_renderer_format_;
|
||||||
int external_renderer_width_;
|
int external_renderer_width_;
|
||||||
|
Reference in New Issue
Block a user