Remove TextureVideoFrame

TextureVideoFrame is currently an empty shell that only provides a convenience constructor of I420VideoFrame with a texture buffer. This CL moves that constructor, and all unittests, of TextureVideoFrame into the base class. Then it's possible to completely remove TextureVideoFrame and all its files. Also, there is no point in having I420VideoFrame virtual anymore.

R=pbos@webrtc.org, perkj@webrtc.org, stefan@webrtc.org
TBR=mflodman

Review URL: https://webrtc-codereview.appspot.com/40229004

Cr-Commit-Position: refs/heads/master@{#8629}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8629 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
magjed@webrtc.org 2015-03-06 10:41:00 +00:00
parent 7158ec1727
commit 45cdcce5f5
16 changed files with 134 additions and 213 deletions

View File

@ -36,7 +36,6 @@
#include "webrtc/base/checks.h" #include "webrtc/base/checks.h"
#include "webrtc/base/logging.h" #include "webrtc/base/logging.h"
#include "webrtc/base/thread.h" #include "webrtc/base/thread.h"
#include "webrtc/common_video/interface/texture_video_frame.h"
#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
#include "webrtc/system_wrappers/interface/logcat_trace_context.h" #include "webrtc/system_wrappers/interface/logcat_trace_context.h"
#include "webrtc/system_wrappers/interface/tick_util.h" #include "webrtc/system_wrappers/interface/tick_util.h"
@ -54,7 +53,6 @@ using webrtc::DecodedImageCallback;
using webrtc::EncodedImage; using webrtc::EncodedImage;
using webrtc::I420VideoFrame; using webrtc::I420VideoFrame;
using webrtc::RTPFragmentationHeader; using webrtc::RTPFragmentationHeader;
using webrtc::TextureVideoFrame;
using webrtc::TickTime; using webrtc::TickTime;
using webrtc::VideoCodec; using webrtc::VideoCodec;
using webrtc::VideoCodecType; using webrtc::VideoCodecType;
@ -657,7 +655,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
int32_t callback_status = WEBRTC_VIDEO_CODEC_OK; int32_t callback_status = WEBRTC_VIDEO_CODEC_OK;
if (use_surface_) { if (use_surface_) {
native_handle_.SetTextureObject(surface_texture_, texture_id); native_handle_.SetTextureObject(surface_texture_, texture_id);
TextureVideoFrame texture_image( I420VideoFrame texture_image(
&native_handle_, width, height, output_timestamp_, 0); &native_handle_, width, height, output_timestamp_, 0);
texture_image.set_ntp_time_ms(output_ntp_time_ms_); texture_image.set_ntp_time_ms(output_ntp_time_ms_);
callback_status = callback_->Decoded(texture_image); callback_status = callback_->Decoded(texture_image);

View File

@ -29,11 +29,11 @@
#ifndef TALK_APP_WEBRTC_JAVA_JNI_NATIVE_HANDLE_IMPL_H_ #ifndef TALK_APP_WEBRTC_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
#define TALK_APP_WEBRTC_JAVA_JNI_NATIVE_HANDLE_IMPL_H_ #define TALK_APP_WEBRTC_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
#include "webrtc/common_video/interface/texture_video_frame.h" #include "webrtc/common_video/interface/native_handle.h"
namespace webrtc_jni { namespace webrtc_jni {
// Wrapper for texture object in TextureVideoFrame. // Wrapper for texture object in TextureBuffer.
class NativeHandleImpl : public webrtc::NativeHandle { class NativeHandleImpl : public webrtc::NativeHandle {
public: public:
NativeHandleImpl() : NativeHandleImpl() :

View File

@ -20,13 +20,11 @@ source_set("common_video") {
"i420_video_frame.cc", "i420_video_frame.cc",
"interface/i420_video_frame.h", "interface/i420_video_frame.h",
"interface/native_handle.h", "interface/native_handle.h",
"interface/texture_video_frame.h",
"interface/video_frame_buffer.h", "interface/video_frame_buffer.h",
"libyuv/include/scaler.h", "libyuv/include/scaler.h",
"libyuv/include/webrtc_libyuv.h", "libyuv/include/webrtc_libyuv.h",
"libyuv/scaler.cc", "libyuv/scaler.cc",
"libyuv/webrtc_libyuv.cc", "libyuv/webrtc_libyuv.cc",
"texture_video_frame.cc",
"video_frame_buffer.cc", "video_frame_buffer.cc",
] ]

View File

@ -41,14 +41,12 @@
'sources': [ 'sources': [
'interface/i420_video_frame.h', 'interface/i420_video_frame.h',
'interface/native_handle.h', 'interface/native_handle.h',
'interface/texture_video_frame.h',
'interface/video_frame_buffer.h', 'interface/video_frame_buffer.h',
'i420_video_frame.cc', 'i420_video_frame.cc',
'libyuv/include/webrtc_libyuv.h', 'libyuv/include/webrtc_libyuv.h',
'libyuv/include/scaler.h', 'libyuv/include/scaler.h',
'libyuv/webrtc_libyuv.cc', 'libyuv/webrtc_libyuv.cc',
'libyuv/scaler.cc', 'libyuv/scaler.cc',
'texture_video_frame.cc',
'video_frame_buffer.cc', 'video_frame_buffer.cc',
], ],
}, },

View File

@ -22,7 +22,6 @@
'i420_video_frame_unittest.cc', 'i420_video_frame_unittest.cc',
'libyuv/libyuv_unittest.cc', 'libyuv/libyuv_unittest.cc',
'libyuv/scaler_unittest.cc', 'libyuv/scaler_unittest.cc',
'texture_video_frame_unittest.cc'
], ],
# Disable warnings to enable Win64 build, issue 1323. # Disable warnings to enable Win64 build, issue 1323.
'msvs_disabled_warnings': [ 'msvs_disabled_warnings': [

View File

@ -37,7 +37,18 @@ I420VideoFrame::I420VideoFrame(
rotation_(rotation) { rotation_(rotation) {
} }
I420VideoFrame::~I420VideoFrame() {} I420VideoFrame::I420VideoFrame(NativeHandle* handle,
int width,
int height,
uint32_t timestamp,
int64_t render_time_ms)
: video_frame_buffer_(
new rtc::RefCountedObject<TextureBuffer>(handle, width, height)),
timestamp_(timestamp),
ntp_time_ms_(0),
render_time_ms_(render_time_ms),
rotation_(kVideoRotation_0) {
}
int I420VideoFrame::CreateEmptyFrame(int width, int height, int I420VideoFrame::CreateEmptyFrame(int width, int height,
int stride_y, int stride_u, int stride_v) { int stride_y, int stride_u, int stride_v) {

View File

@ -17,12 +17,27 @@
namespace webrtc { namespace webrtc {
class NativeHandleImpl : public NativeHandle {
public:
NativeHandleImpl() : ref_count_(0) {}
virtual ~NativeHandleImpl() {}
virtual int32_t AddRef() { return ++ref_count_; }
virtual int32_t Release() { return --ref_count_; }
virtual void* GetHandle() { return NULL; }
int32_t ref_count() { return ref_count_; }
private:
int32_t ref_count_;
};
bool EqualPlane(const uint8_t* data1, bool EqualPlane(const uint8_t* data1,
const uint8_t* data2, const uint8_t* data2,
int stride, int stride,
int width, int width,
int height); int height);
bool EqualFrames(const I420VideoFrame& frame1, const I420VideoFrame& frame2); bool EqualFrames(const I420VideoFrame& frame1, const I420VideoFrame& frame2);
bool EqualTextureFrames(const I420VideoFrame& frame1,
const I420VideoFrame& frame2);
int ExpectedSize(int plane_stride, int image_height, PlaneType type); int ExpectedSize(int plane_stride, int image_height, PlaneType type);
TEST(TestI420VideoFrame, InitialValues) { TEST(TestI420VideoFrame, InitialValues) {
@ -264,6 +279,38 @@ TEST(TestI420VideoFrame, FailToReuseAllocation) {
EXPECT_NE(v, frame1.buffer(kVPlane)); EXPECT_NE(v, frame1.buffer(kVPlane));
} }
TEST(TestI420VideoFrame, TextureInitialValues) {
NativeHandleImpl handle;
I420VideoFrame frame(&handle, 640, 480, 100, 10);
EXPECT_EQ(640, frame.width());
EXPECT_EQ(480, frame.height());
EXPECT_EQ(100u, frame.timestamp());
EXPECT_EQ(10, frame.render_time_ms());
EXPECT_EQ(&handle, frame.native_handle());
frame.set_timestamp(200);
EXPECT_EQ(200u, frame.timestamp());
frame.set_render_time_ms(20);
EXPECT_EQ(20, frame.render_time_ms());
}
TEST(TestI420VideoFrame, RefCount) {
NativeHandleImpl handle;
EXPECT_EQ(0, handle.ref_count());
I420VideoFrame *frame = new I420VideoFrame(&handle, 640, 480, 100, 200);
EXPECT_EQ(1, handle.ref_count());
delete frame;
EXPECT_EQ(0, handle.ref_count());
}
TEST(TestI420VideoFrame, CloneTextureFrame) {
NativeHandleImpl handle;
I420VideoFrame frame1(&handle, 640, 480, 100, 200);
rtc::scoped_ptr<I420VideoFrame> frame2(frame1.CloneFrame());
EXPECT_TRUE(frame2.get() != NULL);
EXPECT_TRUE(EqualTextureFrames(frame1, *frame2));
}
bool EqualPlane(const uint8_t* data1, bool EqualPlane(const uint8_t* data1,
const uint8_t* data2, const uint8_t* data2,
int stride, int stride,
@ -299,6 +346,15 @@ bool EqualFrames(const I420VideoFrame& frame1, const I420VideoFrame& frame2) {
frame1.stride(kVPlane), half_width, half_height); frame1.stride(kVPlane), half_width, half_height);
} }
bool EqualTextureFrames(const I420VideoFrame& frame1,
const I420VideoFrame& frame2) {
return ((frame1.native_handle() == frame2.native_handle()) &&
(frame1.width() == frame2.width()) &&
(frame1.height() == frame2.height()) &&
(frame1.timestamp() == frame2.timestamp()) &&
(frame1.render_time_ms() == frame2.render_time_ms()));
}
int ExpectedSize(int plane_stride, int image_height, PlaneType type) { int ExpectedSize(int plane_stride, int image_height, PlaneType type) {
if (type == kYPlane) { if (type == kYPlane) {
return (plane_stride * image_height); return (plane_stride * image_height);

View File

@ -17,7 +17,7 @@ namespace webrtc {
// A class to store an opaque handle of the underlying video frame. This is used // A class to store an opaque handle of the underlying video frame. This is used
// when the frame is backed by a texture. WebRTC carries the handle in // when the frame is backed by a texture. WebRTC carries the handle in
// TextureVideoFrame. This object keeps a reference to the handle. The reference // TextureBuffer. This object keeps a reference to the handle. The reference
// is cleared when the object is destroyed. It is important to destroy the // is cleared when the object is destroyed. It is important to destroy the
// object as soon as possible so the texture can be recycled. // object as soon as possible so the texture can be recycled.
class NativeHandle { class NativeHandle {

View File

@ -1,35 +0,0 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef COMMON_VIDEO_INTERFACE_TEXTURE_VIDEO_FRAME_H
#define COMMON_VIDEO_INTERFACE_TEXTURE_VIDEO_FRAME_H
// TextureVideoFrame class
//
// Storing and handling of video frames backed by textures.
#include "webrtc/common_video/interface/i420_video_frame.h"
#include "webrtc/common_video/interface/native_handle.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class TextureVideoFrame : public I420VideoFrame {
public:
TextureVideoFrame(NativeHandle* handle,
int width,
int height,
uint32_t timestamp,
int64_t render_time_ms);
};
} // namespace webrtc
#endif // COMMON_VIDEO_INTERFACE_TEXTURE_VIDEO_FRAME_H

View File

@ -1,29 +0,0 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/common_video/interface/texture_video_frame.h"
#include "webrtc/base/refcount.h"
namespace webrtc {
TextureVideoFrame::TextureVideoFrame(NativeHandle* handle,
int width,
int height,
uint32_t timestamp,
int64_t render_time_ms)
: I420VideoFrame(
new rtc::RefCountedObject<TextureBuffer>(handle, width, height),
timestamp,
render_time_ms,
kVideoRotation_0) {
}
} // namespace webrtc

View File

@ -1,75 +0,0 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/common_video/interface/texture_video_frame.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_video/interface/native_handle.h"
namespace webrtc {
class NativeHandleImpl : public NativeHandle {
public:
NativeHandleImpl() : ref_count_(0) {}
virtual ~NativeHandleImpl() {}
virtual int32_t AddRef() { return ++ref_count_; }
virtual int32_t Release() { return --ref_count_; }
virtual void* GetHandle() { return NULL; }
int32_t ref_count() { return ref_count_; }
private:
int32_t ref_count_;
};
bool EqualTextureFrames(const I420VideoFrame& frame1,
const I420VideoFrame& frame2);
TEST(TestTextureVideoFrame, InitialValues) {
NativeHandleImpl handle;
TextureVideoFrame frame(&handle, 640, 480, 100, 10);
EXPECT_EQ(640, frame.width());
EXPECT_EQ(480, frame.height());
EXPECT_EQ(100u, frame.timestamp());
EXPECT_EQ(10, frame.render_time_ms());
EXPECT_EQ(&handle, frame.native_handle());
frame.set_timestamp(200);
EXPECT_EQ(200u, frame.timestamp());
frame.set_render_time_ms(20);
EXPECT_EQ(20, frame.render_time_ms());
}
TEST(TestTextureVideoFrame, RefCount) {
NativeHandleImpl handle;
EXPECT_EQ(0, handle.ref_count());
TextureVideoFrame *frame = new TextureVideoFrame(&handle, 640, 480, 100, 200);
EXPECT_EQ(1, handle.ref_count());
delete frame;
EXPECT_EQ(0, handle.ref_count());
}
TEST(TestTextureVideoFrame, CloneFrame) {
NativeHandleImpl handle;
TextureVideoFrame frame1(&handle, 640, 480, 100, 200);
rtc::scoped_ptr<I420VideoFrame> frame2(frame1.CloneFrame());
EXPECT_TRUE(frame2.get() != NULL);
EXPECT_TRUE(EqualTextureFrames(frame1, *frame2));
}
bool EqualTextureFrames(const I420VideoFrame& frame1,
const I420VideoFrame& frame2) {
return ((frame1.native_handle() == frame2.native_handle()) &&
(frame1.width() == frame2.width()) &&
(frame1.height() == frame2.height()) &&
(frame1.timestamp() == frame2.timestamp()) &&
(frame1.render_time_ms() == frame2.render_time_ms()));
}
} // namespace webrtc

View File

@ -12,7 +12,6 @@
#include <assert.h> #include <assert.h>
#include "webrtc/common_video/interface/texture_video_frame.h"
#include "webrtc/modules/interface/module_common_types.h" #include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/system_wrappers/interface/tick_util.h" #include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/system_wrappers/interface/trace.h" #include "webrtc/system_wrappers/interface/trace.h"

View File

@ -15,7 +15,6 @@
#include "webrtc/call.h" #include "webrtc/call.h"
#include "webrtc/common_video/interface/i420_video_frame.h" #include "webrtc/common_video/interface/i420_video_frame.h"
#include "webrtc/common_video/interface/native_handle.h" #include "webrtc/common_video/interface/native_handle.h"
#include "webrtc/common_video/interface/texture_video_frame.h"
#include "webrtc/frame_callback.h" #include "webrtc/frame_callback.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h" #include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h" #include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
@ -1032,7 +1031,7 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndI420VideoFrames) {
send_config_.pre_encode_callback = &observer; send_config_.pre_encode_callback = &observer;
CreateStreams(); CreateStreams();
// Prepare five input frames. Send I420VideoFrame and TextureVideoFrame // Prepare five input frames. Send ordinary I420VideoFrame and texture frames
// alternatively. // alternatively.
ScopedVector<I420VideoFrame> input_frames; ScopedVector<I420VideoFrame> input_frames;
int width = static_cast<int>(encoder_config_.streams[0].width); int width = static_cast<int>(encoder_config_.streams[0].width);
@ -1043,11 +1042,11 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndI420VideoFrames) {
new webrtc::RefCountImpl<FakeNativeHandle>(); new webrtc::RefCountImpl<FakeNativeHandle>();
webrtc::RefCountImpl<FakeNativeHandle>* handle3 = webrtc::RefCountImpl<FakeNativeHandle>* handle3 =
new webrtc::RefCountImpl<FakeNativeHandle>(); new webrtc::RefCountImpl<FakeNativeHandle>();
input_frames.push_back(new TextureVideoFrame(handle1, width, height, 1, 1)); input_frames.push_back(new I420VideoFrame(handle1, width, height, 1, 1));
input_frames.push_back(new TextureVideoFrame(handle2, width, height, 2, 2)); input_frames.push_back(new I420VideoFrame(handle2, width, height, 2, 2));
input_frames.push_back(CreateI420VideoFrame(width, height, 1)); input_frames.push_back(CreateI420VideoFrame(width, height, 1));
input_frames.push_back(CreateI420VideoFrame(width, height, 2)); input_frames.push_back(CreateI420VideoFrame(width, height, 2));
input_frames.push_back(new TextureVideoFrame(handle3, width, height, 3, 3)); input_frames.push_back(new I420VideoFrame(handle3, width, height, 3, 3));
send_stream_->Start(); send_stream_->Start();
for (size_t i = 0; i < input_frames.size(); i++) { for (size_t i = 0; i < input_frames.size(); i++) {

View File

@ -10,7 +10,6 @@
#include "webrtc/video_engine/vie_capturer.h" #include "webrtc/video_engine/vie_capturer.h"
#include "webrtc/common_video/interface/texture_video_frame.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/interface/module_common_types.h" #include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/utility/interface/process_thread.h" #include "webrtc/modules/utility/interface/process_thread.h"

View File

@ -19,7 +19,6 @@
#include "webrtc/base/scoped_ptr.h" #include "webrtc/base/scoped_ptr.h"
#include "webrtc/common.h" #include "webrtc/common.h"
#include "webrtc/common_video/interface/native_handle.h" #include "webrtc/common_video/interface/native_handle.h"
#include "webrtc/common_video/interface/texture_video_frame.h"
#include "webrtc/modules/utility/interface/mock/mock_process_thread.h" #include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
#include "webrtc/modules/video_capture/include/mock/mock_video_capture.h" #include "webrtc/modules/video_capture/include/mock/mock_video_capture.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@ -136,7 +135,7 @@ TEST_F(ViECapturerTest, TestTextureFrames) {
webrtc::RefCountImpl<FakeNativeHandle>* handle = webrtc::RefCountImpl<FakeNativeHandle>* handle =
new webrtc::RefCountImpl<FakeNativeHandle>(); new webrtc::RefCountImpl<FakeNativeHandle>();
// Add one to |i| so that width/height > 0. // Add one to |i| so that width/height > 0.
input_frames_.push_back(new TextureVideoFrame(handle, i + 1, i + 1, i, i)); input_frames_.push_back(new I420VideoFrame(handle, i + 1, i + 1, i, i));
AddInputFrame(input_frames_[i]); AddInputFrame(input_frames_[i]);
WaitOutputFrame(); WaitOutputFrame();
} }
@ -167,7 +166,7 @@ TEST_F(ViECapturerTest, TestI420Frames) {
TEST_F(ViECapturerTest, TestI420FrameAfterTextureFrame) { TEST_F(ViECapturerTest, TestI420FrameAfterTextureFrame) {
webrtc::RefCountImpl<FakeNativeHandle>* handle = webrtc::RefCountImpl<FakeNativeHandle>* handle =
new webrtc::RefCountImpl<FakeNativeHandle>(); new webrtc::RefCountImpl<FakeNativeHandle>();
input_frames_.push_back(new TextureVideoFrame(handle, 1, 1, 1, 1)); input_frames_.push_back(new I420VideoFrame(handle, 1, 1, 1, 1));
AddInputFrame(input_frames_[0]); AddInputFrame(input_frames_[0]);
WaitOutputFrame(); WaitOutputFrame();
@ -189,7 +188,7 @@ TEST_F(ViECapturerTest, TestTextureFrameAfterI420Frame) {
webrtc::RefCountImpl<FakeNativeHandle>* handle = webrtc::RefCountImpl<FakeNativeHandle>* handle =
new webrtc::RefCountImpl<FakeNativeHandle>(); new webrtc::RefCountImpl<FakeNativeHandle>();
input_frames_.push_back(new TextureVideoFrame(handle, 1, 1, 1, 1)); input_frames_.push_back(new I420VideoFrame(handle, 1, 1, 1, 1));
AddInputFrame(input_frames_[1]); AddInputFrame(input_frames_[1]);
WaitOutputFrame(); WaitOutputFrame();

View File

@ -12,9 +12,10 @@
#define WEBRTC_VIDEO_FRAME_H_ #define WEBRTC_VIDEO_FRAME_H_
#include "webrtc/base/scoped_ref_ptr.h" #include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/common_video/interface/native_handle.h"
#include "webrtc/common_video/interface/video_frame_buffer.h" #include "webrtc/common_video/interface/video_frame_buffer.h"
#include "webrtc/typedefs.h"
#include "webrtc/common_video/rotation.h" #include "webrtc/common_video/rotation.h"
#include "webrtc/typedefs.h"
namespace webrtc { namespace webrtc {
@ -25,91 +26,95 @@ class I420VideoFrame {
uint32_t timestamp, uint32_t timestamp,
int64_t render_time_ms, int64_t render_time_ms,
VideoRotation rotation); VideoRotation rotation);
virtual ~I420VideoFrame(); I420VideoFrame(NativeHandle* handle,
int width,
int height,
uint32_t timestamp,
int64_t render_time_ms);
// CreateEmptyFrame: Sets frame dimensions and allocates buffers based // CreateEmptyFrame: Sets frame dimensions and allocates buffers based
// on set dimensions - height and plane stride. // on set dimensions - height and plane stride.
// If required size is bigger than the allocated one, new buffers of adequate // If required size is bigger than the allocated one, new buffers of adequate
// size will be allocated. // size will be allocated.
// Return value: 0 on success, -1 on error. // Return value: 0 on success, -1 on error.
virtual int CreateEmptyFrame(int width, int CreateEmptyFrame(int width,
int height, int height,
int stride_y, int stride_y,
int stride_u, int stride_u,
int stride_v); int stride_v);
// CreateFrame: Sets the frame's members and buffers. If required size is // CreateFrame: Sets the frame's members and buffers. If required size is
// bigger than allocated one, new buffers of adequate size will be allocated. // bigger than allocated one, new buffers of adequate size will be allocated.
// Return value: 0 on success, -1 on error. // Return value: 0 on success, -1 on error.
// TODO(magjed): Remove unnecessary buffer size arguments. // TODO(magjed): Remove unnecessary buffer size arguments.
virtual int CreateFrame(int size_y, int CreateFrame(int size_y,
const uint8_t* buffer_y, const uint8_t* buffer_y,
int size_u, int size_u,
const uint8_t* buffer_u, const uint8_t* buffer_u,
int size_v, int size_v,
const uint8_t* buffer_v, const uint8_t* buffer_v,
int width, int width,
int height, int height,
int stride_y, int stride_y,
int stride_u, int stride_u,
int stride_v); int stride_v);
// TODO(guoweis): remove the previous CreateFrame when chromium has this code. // TODO(guoweis): remove the previous CreateFrame when chromium has this code.
virtual int CreateFrame(int size_y, int CreateFrame(int size_y,
const uint8_t* buffer_y, const uint8_t* buffer_y,
int size_u, int size_u,
const uint8_t* buffer_u, const uint8_t* buffer_u,
int size_v, int size_v,
const uint8_t* buffer_v, const uint8_t* buffer_v,
int width, int width,
int height, int height,
int stride_y, int stride_y,
int stride_u, int stride_u,
int stride_v, int stride_v,
VideoRotation rotation); VideoRotation rotation);
// Copy frame: If required size is bigger than allocated one, new buffers of // Copy frame: If required size is bigger than allocated one, new buffers of
// adequate size will be allocated. // adequate size will be allocated.
// Return value: 0 on success, -1 on error. // Return value: 0 on success, -1 on error.
virtual int CopyFrame(const I420VideoFrame& videoFrame); int CopyFrame(const I420VideoFrame& videoFrame);
// Make a copy of |this|. The caller owns the returned frame. // Make a copy of |this|. The caller owns the returned frame.
// Return value: a new frame on success, NULL on error. // Return value: a new frame on success, NULL on error.
virtual I420VideoFrame* CloneFrame() const; I420VideoFrame* CloneFrame() const;
// Swap Frame. // Swap Frame.
virtual void SwapFrame(I420VideoFrame* videoFrame); void SwapFrame(I420VideoFrame* videoFrame);
// Get pointer to buffer per plane. // Get pointer to buffer per plane.
virtual uint8_t* buffer(PlaneType type); uint8_t* buffer(PlaneType type);
// Overloading with const. // Overloading with const.
virtual const uint8_t* buffer(PlaneType type) const; const uint8_t* buffer(PlaneType type) const;
// Get allocated size per plane. // Get allocated size per plane.
virtual int allocated_size(PlaneType type) const; int allocated_size(PlaneType type) const;
// Get allocated stride per plane. // Get allocated stride per plane.
virtual int stride(PlaneType type) const; int stride(PlaneType type) const;
// Get frame width. // Get frame width.
virtual int width() const; int width() const;
// Get frame height. // Get frame height.
virtual int height() const; int height() const;
// Set frame timestamp (90kHz). // Set frame timestamp (90kHz).
virtual void set_timestamp(uint32_t timestamp) { timestamp_ = timestamp; } void set_timestamp(uint32_t timestamp) { timestamp_ = timestamp; }
// Get frame timestamp (90kHz). // Get frame timestamp (90kHz).
virtual uint32_t timestamp() const { return timestamp_; } uint32_t timestamp() const { return timestamp_; }
// Set capture ntp time in miliseconds. // Set capture ntp time in miliseconds.
virtual void set_ntp_time_ms(int64_t ntp_time_ms) { void set_ntp_time_ms(int64_t ntp_time_ms) {
ntp_time_ms_ = ntp_time_ms; ntp_time_ms_ = ntp_time_ms;
} }
// Get capture ntp time in miliseconds. // Get capture ntp time in miliseconds.
virtual int64_t ntp_time_ms() const { return ntp_time_ms_; } int64_t ntp_time_ms() const { return ntp_time_ms_; }
// Naming convention for Coordination of Video Orientation. Please see // Naming convention for Coordination of Video Orientation. Please see
// http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ts_126114v120700p.pdf // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ts_126114v120700p.pdf
@ -121,30 +126,29 @@ class I420VideoFrame {
// "apply rotation" = modify a frame from being "pending" to being "not // "apply rotation" = modify a frame from being "pending" to being "not
// pending" rotation (a no-op for "unrotated"). // pending" rotation (a no-op for "unrotated").
// //
virtual VideoRotation rotation() const { return rotation_; } VideoRotation rotation() const { return rotation_; }
virtual void set_rotation(VideoRotation rotation) { void set_rotation(VideoRotation rotation) {
rotation_ = rotation; rotation_ = rotation;
} }
// Set render time in miliseconds. // Set render time in miliseconds.
virtual void set_render_time_ms(int64_t render_time_ms) { void set_render_time_ms(int64_t render_time_ms) {
render_time_ms_ = render_time_ms; render_time_ms_ = render_time_ms;
} }
// Get render time in miliseconds. // Get render time in miliseconds.
virtual int64_t render_time_ms() const { return render_time_ms_; } int64_t render_time_ms() const { return render_time_ms_; }
// Return true if underlying plane buffers are of zero size, false if not. // Return true if underlying plane buffers are of zero size, false if not.
virtual bool IsZeroSize() const; bool IsZeroSize() const;
// Return the handle of the underlying video frame. This is used when the // Return the handle of the underlying video frame. This is used when the
// frame is backed by a texture. The object should be destroyed when it is no // frame is backed by a texture. The object should be destroyed when it is no
// longer in use, so the underlying resource can be freed. // longer in use, so the underlying resource can be freed.
virtual void* native_handle() const; void* native_handle() const;
// Return the underlying buffer. // Return the underlying buffer.
virtual rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer() rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer() const;
const;
private: private:
// An opaque reference counted handle that stores the pixel data. // An opaque reference counted handle that stores the pixel data.