Use WebRtcVideoRenderFrame for texture frames.

Removes buffer/texture path separation inside WebRtcVideoEngine and
DeliverTextureFrame(). This unifies frame delivery with
WebRtcVideoEngine2 which is expected to automagically work with texture
frames after this change.

BUG=1788
R=magjed@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/38069005

Cr-Commit-Position: refs/heads/master@{#8326}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8326 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pbos@webrtc.org 2015-02-11 09:03:15 +00:00
parent 62f6e75673
commit 7cc92aaf37
2 changed files with 12 additions and 17 deletions

View File

@ -394,26 +394,13 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
if (!renderer_) {
return 0;
}
if (webrtc_frame.native_handle() == NULL) {
WebRtcVideoRenderFrame cricket_frame(&webrtc_frame, elapsed_time_ms);
return renderer_->RenderFrame(&cricket_frame) ? 0 : -1;
} else {
return DeliverTextureFrame(
webrtc_frame.native_handle(),
webrtc_frame.render_time_ms() * rtc::kNumNanosecsPerMillisec,
elapsed_time_ms * rtc::kNumNanosecsPerMillisec);
}
WebRtcVideoRenderFrame cricket_frame(&webrtc_frame, elapsed_time_ms);
return renderer_->RenderFrame(&cricket_frame) ? 0 : -1;
}
virtual bool IsTextureSupported() { return true; }
int DeliverTextureFrame(void* handle, int64 time_stamp, int64 elapsed_time) {
WebRtcTextureVideoFrame video_frame(
static_cast<webrtc::NativeHandle*>(handle), width_, height_,
elapsed_time, time_stamp);
return renderer_->RenderFrame(&video_frame);
}
unsigned int width() {
rtc::CritScope cs(&crit_);
return width_;

View File

@ -32,6 +32,7 @@
#include "libyuv/planar_functions.h"
#include "talk/media/base/videocapturer.h"
#include "talk/media/base/videocommon.h"
#include "talk/media/webrtc/webrtctexturevideoframe.h"
#include "webrtc/base/logging.h"
#include "webrtc/video_frame.h"
@ -453,7 +454,7 @@ int32 WebRtcVideoRenderFrame::GetVPitch() const {
}
void* WebRtcVideoRenderFrame::GetNativeHandle() const {
return NULL;
return frame_->native_handle();
}
size_t WebRtcVideoRenderFrame::GetPixelWidth() const {
@ -487,6 +488,13 @@ webrtc::VideoRotation WebRtcVideoRenderFrame::GetVideoRotation() const {
// This can be fixed by making webrtc::I420VideoFrame reference counted, or
// adding a similar shallow copy function to it.
VideoFrame* WebRtcVideoRenderFrame::Copy() const {
if (frame_->native_handle() != NULL) {
return new WebRtcTextureVideoFrame(
static_cast<webrtc::NativeHandle*>(frame_->native_handle()),
static_cast<size_t>(frame_->width()),
static_cast<size_t>(frame_->height()), GetElapsedTime(),
GetTimeStamp());
}
WebRtcVideoFrame* new_frame = new WebRtcVideoFrame();
new_frame->InitToEmptyBuffer(frame_->width(), frame_->height(), 1, 1,
GetElapsedTime(), GetTimeStamp());