Revert 8599 "Revert 8580 "Unify underlying frame buffer in I420VideoFrame and...""

It's possible to build Chrome on Windows with this patch now.

BUG=1128

> This is unfortunately causing build problems in Chrome on Windows.

>> Unify underlying frame buffer in I420VideoFrame and WebRtcVideoFrame
>>
>> Currently, I420VideoFrame uses three webrtc::Plane to store pixel data, and WebRtcVideoFrame uses WebRtcVideoFrame::FrameBuffer/webrtc::VideoFrame. The two subclasses WebRtcTextureVideoFrame and TextureVideoFrame use a NativeHandle to store pixel data, and there is also a class WebRtcVideoRenderFrame that wraps an I420VideoFrame.
>>
>> This CL replaces these classes with a new interface VideoFrameBuffer that provides the common functionality. This makes it possible to remove deep frame copies between cricket::VideoFrame and I420VideoFrame.
>>
>> Some additional minor changes are:
>> * Disallow creation of 0x0 texture frames.
>> * Remove the half-implemented ref count functions in I420VideoFrame.
>> * Remove the Alias functionality in WebRtcVideoFrame
>>
>> The final goal is to eliminate all frame copies, but to limit the scope of this CL, some planned changes are postponed to follow-up CL:s (see planned changes in https://webrtc-codereview.appspot.com/38879004, or https://docs.google.com/document/d/1bxoJZNmlo-Z9GnQwIaWpEG6hDlL_W-bzka8Zb_K2NbA/preview). Specifically, this CL:
>> * Keeps empty subclasses WebRtcTextureVideoFrame and TextureVideoFrame, and just delegates the construction to the superclass.
>> * Keeps the deep copies from cricket::VideoFrame to I420VideoFrame.
>>
>> BUG=1128
>> R=mflodman@webrtc.org, pbos@webrtc.org, perkj@webrtc.org, tommi@webrtc.org
>>
>> Review URL: https://webrtc-codereview.appspot.com/42469004

R=pbos@webrtc.org
TBR=mflodman, pbos, perkj, tommi

Review URL: https://webrtc-codereview.appspot.com/45489004

Cr-Commit-Position: refs/heads/master@{#8616}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8616 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
magjed@webrtc.org 2015-03-05 14:03:08 +00:00
parent 67a9e40286
commit 2386d6dd92
22 changed files with 531 additions and 1381 deletions

View File

@ -45,8 +45,7 @@ class VideoFrameFactory {
// The returned frame aliases the aliased_frame if the input color
// space allows for aliasing, otherwise a color conversion will
// occur. For safety, |input_frame| must outlive the returned
// frame. Returns NULL if conversion fails.
// occur. Returns NULL if conversion fails.
// The returned frame will be a center crop of |input_frame| with
// size |cropped_width| x |cropped_height|.
@ -56,9 +55,7 @@ class VideoFrameFactory {
// The returned frame will be a center crop of |input_frame| with size
// |cropped_width| x |cropped_height|, scaled to |output_width| x
// |output_height|. If scaling has taken place, i.e. cropped input
// resolution != output resolution, the returned frame will remain valid
// until this function is called again.
// |output_height|.
virtual VideoFrame* CreateAliasedFrame(const CapturedFrame* input_frame,
int cropped_input_width,
int cropped_input_height,

View File

@ -27,165 +27,20 @@
#include "talk/media/webrtc/webrtctexturevideoframe.h"
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/stream.h"
#define UNIMPLEMENTED \
LOG(LS_ERROR) << "Call to unimplemented function "<< __FUNCTION__; \
ASSERT(false)
#include "webrtc/base/refcount.h"
namespace cricket {
WebRtcTextureVideoFrame::WebRtcTextureVideoFrame(
webrtc::NativeHandle* handle, int width, int height, int64_t elapsed_time,
int64_t time_stamp)
: handle_(handle), width_(width), height_(height),
elapsed_time_(elapsed_time), time_stamp_(time_stamp) {}
WebRtcTextureVideoFrame::~WebRtcTextureVideoFrame() {}
bool WebRtcTextureVideoFrame::InitToBlack(
int w, int h, size_t pixel_width, size_t pixel_height, int64_t elapsed_time,
int64_t time_stamp) {
UNIMPLEMENTED;
return false;
}
bool WebRtcTextureVideoFrame::Reset(uint32 fourcc,
int w,
int h,
int dw,
int dh,
uint8* sample,
size_t sample_size,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp,
webrtc::VideoRotation rotation) {
UNIMPLEMENTED;
return false;
}
const uint8* WebRtcTextureVideoFrame::GetYPlane() const {
UNIMPLEMENTED;
return NULL;
}
const uint8* WebRtcTextureVideoFrame::GetUPlane() const {
UNIMPLEMENTED;
return NULL;
}
const uint8* WebRtcTextureVideoFrame::GetVPlane() const {
UNIMPLEMENTED;
return NULL;
}
uint8* WebRtcTextureVideoFrame::GetYPlane() {
UNIMPLEMENTED;
return NULL;
}
uint8* WebRtcTextureVideoFrame::GetUPlane() {
UNIMPLEMENTED;
return NULL;
}
uint8* WebRtcTextureVideoFrame::GetVPlane() {
UNIMPLEMENTED;
return NULL;
}
int32 WebRtcTextureVideoFrame::GetYPitch() const {
UNIMPLEMENTED;
return width_;
}
int32 WebRtcTextureVideoFrame::GetUPitch() const {
UNIMPLEMENTED;
return (width_ + 1) / 2;
}
int32 WebRtcTextureVideoFrame::GetVPitch() const {
UNIMPLEMENTED;
return (width_ + 1) / 2;
}
VideoFrame* WebRtcTextureVideoFrame::Copy() const {
return new WebRtcTextureVideoFrame(
handle_, width_, height_, elapsed_time_, time_stamp_);
}
bool WebRtcTextureVideoFrame::MakeExclusive() {
UNIMPLEMENTED;
return false;
}
size_t WebRtcTextureVideoFrame::CopyToBuffer(uint8* buffer, size_t size) const {
UNIMPLEMENTED;
return 0;
}
size_t WebRtcTextureVideoFrame::ConvertToRgbBuffer(
uint32 to_fourcc, uint8* buffer, size_t size, int stride_rgb) const {
UNIMPLEMENTED;
return 0;
}
bool WebRtcTextureVideoFrame::CopyToPlanes(
uint8* dst_y, uint8* dst_u, uint8* dst_v, int32 dst_pitch_y,
int32 dst_pitch_u, int32 dst_pitch_v) const {
UNIMPLEMENTED;
return false;
}
void WebRtcTextureVideoFrame::CopyToFrame(VideoFrame* dst) const {
UNIMPLEMENTED;
}
rtc::StreamResult WebRtcTextureVideoFrame::Write(
rtc::StreamInterface* stream, int* error) {
UNIMPLEMENTED;
return rtc::SR_ERROR;
}
void WebRtcTextureVideoFrame::StretchToPlanes(
uint8* dst_y, uint8* dst_u, uint8* dst_v, int32 dst_pitch_y,
int32 dst_pitch_u, int32 dst_pitch_v, size_t width, size_t height,
bool interpolate, bool vert_crop) const {
UNIMPLEMENTED;
}
size_t WebRtcTextureVideoFrame::StretchToBuffer(
size_t dst_width, size_t dst_height, uint8* dst_buffer, size_t size,
bool interpolate, bool vert_crop) const {
UNIMPLEMENTED;
return 0;
}
void WebRtcTextureVideoFrame::StretchToFrame(
VideoFrame* dst, bool interpolate, bool vert_crop) const {
UNIMPLEMENTED;
}
VideoFrame* WebRtcTextureVideoFrame::Stretch(
size_t dst_width, size_t dst_height, bool interpolate,
bool vert_crop) const {
UNIMPLEMENTED;
return NULL;
}
bool WebRtcTextureVideoFrame::SetToBlack() {
UNIMPLEMENTED;
return false;
}
VideoFrame* WebRtcTextureVideoFrame::CreateEmptyFrame(
int w, int h, size_t pixel_width, size_t pixel_height, int64_t elapsed_time,
int64_t time_stamp) const {
UNIMPLEMENTED;
return NULL;
WebRtcTextureVideoFrame::WebRtcTextureVideoFrame(webrtc::NativeHandle* handle,
int width,
int height,
int64_t elapsed_time,
int64_t time_stamp)
: WebRtcVideoFrame(new rtc::RefCountedObject<webrtc::TextureBuffer>(handle,
width,
height),
elapsed_time,
time_stamp) {
}
} // namespace cricket

View File

@ -28,95 +28,15 @@
#ifndef TALK_MEDIA_WEBRTC_WEBRTCTEXTUREVIDEOFRAME_H_
#define TALK_MEDIA_WEBRTC_WEBRTCTEXTUREVIDEOFRAME_H_
#include "talk/media/base/videoframe.h"
#include "webrtc/base/refcount.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/common_video/interface/native_handle.h"
#include "talk/media/webrtc/webrtcvideoframe.h"
namespace cricket {
// A video frame backed by the texture via a native handle.
class WebRtcTextureVideoFrame : public VideoFrame {
class WebRtcTextureVideoFrame : public WebRtcVideoFrame {
public:
WebRtcTextureVideoFrame(webrtc::NativeHandle* handle, int width, int height,
int64_t elapsed_time, int64_t time_stamp);
virtual ~WebRtcTextureVideoFrame();
// From base class VideoFrame.
virtual bool InitToBlack(int w, int h, size_t pixel_width,
size_t pixel_height, int64_t elapsed_time,
int64_t time_stamp);
virtual bool Reset(uint32 fourcc,
int w,
int h,
int dw,
int dh,
uint8* sample,
size_t sample_size,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp,
webrtc::VideoRotation rotation);
virtual size_t GetWidth() const { return width_; }
virtual size_t GetHeight() const { return height_; }
virtual const uint8* GetYPlane() const;
virtual const uint8* GetUPlane() const;
virtual const uint8* GetVPlane() const;
virtual uint8* GetYPlane();
virtual uint8* GetUPlane();
virtual uint8* GetVPlane();
virtual int32 GetYPitch() const;
virtual int32 GetUPitch() const;
virtual int32 GetVPitch() const;
virtual size_t GetPixelWidth() const { return 1; }
virtual size_t GetPixelHeight() const { return 1; }
virtual int64_t GetElapsedTime() const { return elapsed_time_; }
virtual int64_t GetTimeStamp() const { return time_stamp_; }
virtual void SetElapsedTime(int64_t elapsed_time) {
elapsed_time_ = elapsed_time;
}
virtual void SetTimeStamp(int64_t time_stamp) { time_stamp_ = time_stamp; }
virtual webrtc::VideoRotation GetVideoRotation() const {
return webrtc::kVideoRotation_0;
}
virtual VideoFrame* Copy() const;
virtual bool MakeExclusive();
virtual size_t CopyToBuffer(uint8* buffer, size_t size) const;
virtual size_t ConvertToRgbBuffer(uint32 to_fourcc, uint8* buffer,
size_t size, int stride_rgb) const;
virtual void* GetNativeHandle() const { return handle_.get(); }
virtual bool CopyToPlanes(
uint8* dst_y, uint8* dst_u, uint8* dst_v,
int32 dst_pitch_y, int32 dst_pitch_u, int32 dst_pitch_v) const;
virtual void CopyToFrame(VideoFrame* target) const;
virtual rtc::StreamResult Write(rtc::StreamInterface* stream,
int* error);
virtual void StretchToPlanes(
uint8* y, uint8* u, uint8* v, int32 pitchY, int32 pitchU, int32 pitchV,
size_t width, size_t height, bool interpolate, bool crop) const;
virtual size_t StretchToBuffer(size_t w, size_t h, uint8* buffer, size_t size,
bool interpolate, bool crop) const;
virtual void StretchToFrame(VideoFrame* target, bool interpolate,
bool crop) const;
virtual VideoFrame* Stretch(size_t w, size_t h, bool interpolate,
bool crop) const;
virtual bool SetToBlack();
protected:
virtual VideoFrame* CreateEmptyFrame(int w, int h, size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp) const;
private:
// The handle of the underlying video frame.
rtc::scoped_refptr<webrtc::NativeHandle> handle_;
int width_;
int height_;
int64_t elapsed_time_;
int64_t time_stamp_;
};
} // namespace cricket

View File

@ -393,7 +393,10 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer {
return 0;
}
WebRtcVideoRenderFrame cricket_frame(&webrtc_frame, elapsed_time_ms);
WebRtcVideoFrame cricket_frame(
webrtc_frame.video_frame_buffer(),
elapsed_time_ms * rtc::kNumNanosecsPerMillisec,
webrtc_frame.render_time_ms() * rtc::kNumNanosecsPerMillisec);
return renderer_->RenderFrame(&cricket_frame) ? 0 : -1;
}

View File

@ -1993,7 +1993,10 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RenderFrame(
LOG(LS_VERBOSE) << "RenderFrame: (" << frame.width() << "x" << frame.height()
<< ")";
const WebRtcVideoRenderFrame render_frame(&frame, elapsed_time_ms);
const WebRtcVideoFrame render_frame(
frame.video_frame_buffer(),
elapsed_time_ms * rtc::kNumNanosecsPerMillisec,
frame.render_time_ms() * rtc::kNumNanosecsPerMillisec);
renderer_->RenderFrame(&render_frame);
}

View File

@ -28,93 +28,31 @@
#include "talk/media/webrtc/webrtcvideoframe.h"
#include "libyuv/convert.h"
#include "libyuv/convert_from.h"
#include "libyuv/planar_functions.h"
#include "talk/media/base/videocapturer.h"
#include "talk/media/base/videocommon.h"
#include "talk/media/webrtc/webrtctexturevideoframe.h"
#include "webrtc/base/logging.h"
#include "webrtc/video_frame.h"
#define UNIMPLEMENTED \
LOG(LS_ERROR) << "Call to unimplemented function " << __FUNCTION__; \
ASSERT(false)
using webrtc::kYPlane;
using webrtc::kUPlane;
using webrtc::kVPlane;
namespace cricket {
// Class that wraps ownerhip semantics of a buffer passed to it.
// * Buffers passed using Attach() become owned by this FrameBuffer and will be
// destroyed on FrameBuffer destruction.
// * Buffers passed using Alias() are not owned and will not be destroyed on
// FrameBuffer destruction, The buffer then must outlive the FrameBuffer.
class WebRtcVideoFrame::FrameBuffer {
public:
FrameBuffer();
explicit FrameBuffer(size_t length);
~FrameBuffer();
WebRtcVideoFrame::WebRtcVideoFrame() {}
void Attach(uint8* data, size_t length);
void Alias(uint8* data, size_t length);
uint8* data();
size_t length() const;
webrtc::VideoFrame* frame();
const webrtc::VideoFrame* frame() const;
private:
rtc::scoped_ptr<uint8[]> owned_data_;
webrtc::VideoFrame video_frame_;
};
WebRtcVideoFrame::FrameBuffer::FrameBuffer() {}
WebRtcVideoFrame::FrameBuffer::FrameBuffer(size_t length) {
uint8* buffer = new uint8[length];
Attach(buffer, length);
WebRtcVideoFrame::WebRtcVideoFrame(
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
int64_t elapsed_time_ns,
int64_t time_stamp_ns)
: video_frame_buffer_(buffer),
pixel_width_(1),
pixel_height_(1),
elapsed_time_ns_(elapsed_time_ns),
time_stamp_ns_(time_stamp_ns),
rotation_(webrtc::kVideoRotation_0) {
}
WebRtcVideoFrame::FrameBuffer::~FrameBuffer() {
// Make sure that |video_frame_| doesn't delete the buffer, as |owned_data_|
// will release the buffer if this FrameBuffer owns it.
uint8_t* new_memory = NULL;
size_t new_length = 0;
size_t new_size = 0;
video_frame_.Swap(new_memory, new_length, new_size);
}
void WebRtcVideoFrame::FrameBuffer::Attach(uint8* data, size_t length) {
Alias(data, length);
owned_data_.reset(data);
}
void WebRtcVideoFrame::FrameBuffer::Alias(uint8* data, size_t length) {
owned_data_.reset();
uint8_t* new_memory = reinterpret_cast<uint8_t*>(data);
size_t new_length = length;
size_t new_size = length;
video_frame_.Swap(new_memory, new_length, new_size);
}
uint8* WebRtcVideoFrame::FrameBuffer::data() {
return video_frame_.Buffer();
}
size_t WebRtcVideoFrame::FrameBuffer::length() const {
return video_frame_.Length();
}
webrtc::VideoFrame* WebRtcVideoFrame::FrameBuffer::frame() {
return &video_frame_;
}
const webrtc::VideoFrame* WebRtcVideoFrame::FrameBuffer::frame() const {
return &video_frame_;
}
WebRtcVideoFrame::WebRtcVideoFrame()
: video_buffer_(new RefCountedBuffer()),
rotation_(webrtc::kVideoRotation_0) {}
WebRtcVideoFrame::~WebRtcVideoFrame() {}
bool WebRtcVideoFrame::Init(uint32 format,
@ -126,11 +64,11 @@ bool WebRtcVideoFrame::Init(uint32 format,
size_t sample_size,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp,
int64_t elapsed_time_ns,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation) {
return Reset(format, w, h, dw, dh, sample, sample_size, pixel_width,
pixel_height, elapsed_time, time_stamp, rotation,
pixel_height, elapsed_time_ns, time_stamp_ns, rotation,
true /*apply_rotation*/);
}
@ -144,166 +82,111 @@ bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh,
apply_rotation);
}
bool WebRtcVideoFrame::Alias(const CapturedFrame* frame,
int dw,
int dh,
bool apply_rotation) {
if (CanonicalFourCC(frame->fourcc) != FOURCC_I420 ||
(apply_rotation &&
frame->GetRotation() != webrtc::kVideoRotation_0) ||
frame->width != dw || frame->height != dh) {
// TODO(fbarchard): Enable aliasing of more formats.
return Init(frame, dw, dh, apply_rotation);
} else {
Alias(static_cast<uint8*>(frame->data), frame->data_size, frame->width,
frame->height, frame->pixel_width, frame->pixel_height,
frame->elapsed_time, frame->time_stamp, frame->GetRotation());
return true;
}
}
bool WebRtcVideoFrame::InitToBlack(int w, int h, size_t pixel_width,
size_t pixel_height, int64_t elapsed_time,
int64_t time_stamp) {
InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time, time_stamp);
size_t pixel_height, int64_t elapsed_time_ns,
int64_t time_stamp_ns) {
InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time_ns,
time_stamp_ns);
return SetToBlack();
}
void WebRtcVideoFrame::Alias(uint8* buffer,
size_t buffer_size,
int w,
int h,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp,
webrtc::VideoRotation rotation) {
rtc::scoped_refptr<RefCountedBuffer> video_buffer(
new RefCountedBuffer());
video_buffer->Alias(buffer, buffer_size);
Attach(video_buffer.get(), buffer_size, w, h, pixel_width, pixel_height,
elapsed_time, time_stamp, rotation);
size_t WebRtcVideoFrame::GetWidth() const {
return video_frame_buffer_ ? video_frame_buffer_->width() : 0;
}
size_t WebRtcVideoFrame::GetWidth() const { return frame()->Width(); }
size_t WebRtcVideoFrame::GetHeight() const { return frame()->Height(); }
size_t WebRtcVideoFrame::GetHeight() const {
return video_frame_buffer_ ? video_frame_buffer_->height() : 0;
}
const uint8* WebRtcVideoFrame::GetYPlane() const {
uint8_t* buffer = frame()->Buffer();
return buffer;
// Const cast to call the correct const-version of data.
const webrtc::VideoFrameBuffer* const_ptr = video_frame_buffer_.get();
return const_ptr ? const_ptr->data(kYPlane) : nullptr;
}
const uint8* WebRtcVideoFrame::GetUPlane() const {
uint8_t* buffer = frame()->Buffer();
if (buffer) {
buffer += (frame()->Width() * frame()->Height());
}
return buffer;
// Const cast to call the correct const-version of data.
const webrtc::VideoFrameBuffer* const_ptr = video_frame_buffer_.get();
return const_ptr ? const_ptr->data(kUPlane) : nullptr;
}
const uint8* WebRtcVideoFrame::GetVPlane() const {
uint8_t* buffer = frame()->Buffer();
if (buffer) {
int uv_size = static_cast<int>(GetChromaSize());
buffer += frame()->Width() * frame()->Height() + uv_size;
}
return buffer;
// Const cast to call the correct const-version of data.
const webrtc::VideoFrameBuffer* const_ptr = video_frame_buffer_.get();
return const_ptr ? const_ptr->data(kVPlane) : nullptr;
}
uint8* WebRtcVideoFrame::GetYPlane() {
DCHECK(video_buffer_->HasOneRef());
uint8_t* buffer = frame()->Buffer();
return buffer;
return video_frame_buffer_ ? video_frame_buffer_->data(kYPlane) : nullptr;
}
uint8* WebRtcVideoFrame::GetUPlane() {
DCHECK(video_buffer_->HasOneRef());
uint8_t* buffer = frame()->Buffer();
if (buffer) {
buffer += (frame()->Width() * frame()->Height());
}
return buffer;
return video_frame_buffer_ ? video_frame_buffer_->data(kUPlane) : nullptr;
}
uint8* WebRtcVideoFrame::GetVPlane() {
DCHECK(video_buffer_->HasOneRef());
uint8_t* buffer = frame()->Buffer();
if (buffer) {
int uv_size = static_cast<int>(GetChromaSize());
buffer += frame()->Width() * frame()->Height() + uv_size;
}
return buffer;
return video_frame_buffer_ ? video_frame_buffer_->data(kVPlane) : nullptr;
}
VideoFrame* WebRtcVideoFrame::Copy() const {
uint8* old_buffer = video_buffer_->data();
if (!old_buffer)
return NULL;
size_t new_buffer_size = video_buffer_->length();
int32 WebRtcVideoFrame::GetYPitch() const {
return video_frame_buffer_ ? video_frame_buffer_->stride(kYPlane) : 0;
}
WebRtcVideoFrame* ret_val = new WebRtcVideoFrame();
ret_val->Attach(video_buffer_.get(), new_buffer_size, frame()->Width(),
frame()->Height(), pixel_width_, pixel_height_, elapsed_time_,
time_stamp_, rotation_);
return ret_val;
int32 WebRtcVideoFrame::GetUPitch() const {
return video_frame_buffer_ ? video_frame_buffer_->stride(kUPlane) : 0;
}
int32 WebRtcVideoFrame::GetVPitch() const {
return video_frame_buffer_ ? video_frame_buffer_->stride(kVPlane) : 0;
}
bool WebRtcVideoFrame::IsExclusive() const {
return video_buffer_->HasOneRef();
return video_frame_buffer_->HasOneRef();
}
void* WebRtcVideoFrame::GetNativeHandle() const {
return video_frame_buffer_ ? video_frame_buffer_->native_handle() : nullptr;
}
VideoFrame* WebRtcVideoFrame::Copy() const {
WebRtcVideoFrame* new_frame = new WebRtcVideoFrame(
video_frame_buffer_, elapsed_time_ns_, time_stamp_ns_);
new_frame->pixel_width_ = pixel_width_;
new_frame->pixel_height_ = pixel_height_;
new_frame->rotation_ = rotation_;
return new_frame;
}
bool WebRtcVideoFrame::MakeExclusive() {
DCHECK(video_frame_buffer_->native_handle() == nullptr);
if (IsExclusive())
return true;
// Not exclusive already, need to copy.
const size_t length = video_buffer_->length();
RefCountedBuffer* exclusive_buffer = new RefCountedBuffer(length);
memcpy(exclusive_buffer->data(), video_buffer_->data(), length);
Attach(exclusive_buffer, length, frame()->Width(), frame()->Height(),
pixel_width_, pixel_height_, elapsed_time_, time_stamp_, rotation_);
// Not exclusive already, need to copy buffer.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> new_buffer =
new rtc::RefCountedObject<webrtc::I420Buffer>(
video_frame_buffer_->width(), video_frame_buffer_->height(),
video_frame_buffer_->stride(kYPlane),
video_frame_buffer_->stride(kUPlane),
video_frame_buffer_->stride(kVPlane));
if (!CopyToPlanes(new_buffer->data(kYPlane), new_buffer->data(kUPlane),
new_buffer->data(kVPlane), new_buffer->stride(kYPlane),
new_buffer->stride(kUPlane), new_buffer->stride(kVPlane))) {
return false;
}
video_frame_buffer_ = new_buffer;
return true;
}
size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32 to_fourcc, uint8* buffer,
size_t size, int stride_rgb) const {
if (!frame()->Buffer()) {
return 0;
}
CHECK(video_frame_buffer_);
CHECK(video_frame_buffer_->native_handle() == nullptr);
return VideoFrame::ConvertToRgbBuffer(to_fourcc, buffer, size, stride_rgb);
}
void WebRtcVideoFrame::Attach(RefCountedBuffer* video_buffer,
size_t buffer_size,
int w,
int h,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp,
webrtc::VideoRotation rotation) {
if (video_buffer_.get() == video_buffer) {
return;
}
video_buffer_ = video_buffer;
frame()->SetWidth(w);
frame()->SetHeight(h);
pixel_width_ = pixel_width;
pixel_height_ = pixel_height;
elapsed_time_ = elapsed_time;
time_stamp_ = time_stamp;
rotation_ = rotation;
}
webrtc::VideoFrame* WebRtcVideoFrame::frame() {
return video_buffer_->frame();
}
const webrtc::VideoFrame* WebRtcVideoFrame::frame() const {
return video_buffer_->frame();
}
bool WebRtcVideoFrame::Reset(uint32 format,
int w,
int h,
@ -313,8 +196,8 @@ bool WebRtcVideoFrame::Reset(uint32 format,
size_t sample_size,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp,
int64_t elapsed_time_ns,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation,
bool apply_rotation) {
if (!Validate(format, w, h, sample, sample_size)) {
@ -339,18 +222,9 @@ bool WebRtcVideoFrame::Reset(uint32 format,
new_height = dw;
}
// Release reference in |video_buffer| at the end of this scope, so that
// |video_buffer_| becomes the sole owner.
{
size_t desired_size = SizeOf(new_width, new_height);
rtc::scoped_refptr<RefCountedBuffer> video_buffer(
new RefCountedBuffer(desired_size));
// Since the libyuv::ConvertToI420 will handle the rotation, so the
// new frame's rotation should always be 0.
Attach(video_buffer.get(), desired_size, new_width, new_height, pixel_width,
pixel_height, elapsed_time, time_stamp,
apply_rotation ? webrtc::kVideoRotation_0 : rotation);
}
InitToEmptyBuffer(new_width, new_height, pixel_width, pixel_height,
elapsed_time_ns, time_stamp_ns);
rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation;
int horiz_crop = ((w - dw) / 2) & ~1;
// ARGB on Windows has negative height.
@ -358,15 +232,14 @@ bool WebRtcVideoFrame::Reset(uint32 format,
int vert_crop = ((abs(h) - dh) / 2) & ~1;
// Conversion functions expect negative height to flip the image.
int idh = (h < 0) ? -dh : dh;
uint8* y = GetYPlane();
int y_stride = GetYPitch();
uint8* u = GetUPlane();
int u_stride = GetUPitch();
uint8* v = GetVPlane();
int v_stride = GetVPitch();
int r = libyuv::ConvertToI420(
sample, sample_size, y, y_stride, u, u_stride, v, v_stride, horiz_crop,
vert_crop, w, h, dw, idh,
sample, sample_size,
GetYPlane(), GetYPitch(),
GetUPlane(), GetUPitch(),
GetVPlane(), GetVPitch(),
horiz_crop, vert_crop,
w, h,
dw, idh,
static_cast<libyuv::RotationMode>(
apply_rotation ? rotation : webrtc::kVideoRotation_0),
format);
@ -379,168 +252,24 @@ bool WebRtcVideoFrame::Reset(uint32 format,
}
VideoFrame* WebRtcVideoFrame::CreateEmptyFrame(
int w, int h, size_t pixel_width, size_t pixel_height, int64_t elapsed_time,
int64_t time_stamp) const {
int w, int h, size_t pixel_width, size_t pixel_height,
int64_t elapsed_time_ns, int64_t time_stamp_ns) const {
WebRtcVideoFrame* frame = new WebRtcVideoFrame();
frame->InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time,
time_stamp);
frame->InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time_ns,
time_stamp_ns);
return frame;
}
void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h, size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp) {
size_t buffer_size = VideoFrame::SizeOf(w, h);
rtc::scoped_refptr<RefCountedBuffer> video_buffer(
new RefCountedBuffer(buffer_size));
Attach(video_buffer.get(), buffer_size, w, h, pixel_width, pixel_height,
elapsed_time, time_stamp, webrtc::kVideoRotation_0);
}
WebRtcVideoRenderFrame::WebRtcVideoRenderFrame(
const webrtc::I420VideoFrame* frame,
int64_t elapsed_time_ms)
: frame_(frame), elapsed_time_ms_(elapsed_time_ms) {
}
bool WebRtcVideoRenderFrame::InitToBlack(int w,
int h,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp) {
UNIMPLEMENTED;
return false;
}
bool WebRtcVideoRenderFrame::Reset(uint32 fourcc,
int w,
int h,
int dw,
int dh,
uint8* sample,
size_t sample_size,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp,
webrtc::VideoRotation rotation,
bool apply_rotation) {
UNIMPLEMENTED;
return false;
}
size_t WebRtcVideoRenderFrame::GetWidth() const {
return static_cast<size_t>(frame_->width());
}
size_t WebRtcVideoRenderFrame::GetHeight() const {
return static_cast<size_t>(frame_->height());
}
const uint8* WebRtcVideoRenderFrame::GetYPlane() const {
return frame_->buffer(webrtc::kYPlane);
}
const uint8* WebRtcVideoRenderFrame::GetUPlane() const {
return frame_->buffer(webrtc::kUPlane);
}
const uint8* WebRtcVideoRenderFrame::GetVPlane() const {
return frame_->buffer(webrtc::kVPlane);
}
uint8* WebRtcVideoRenderFrame::GetYPlane() {
UNIMPLEMENTED;
return NULL;
}
uint8* WebRtcVideoRenderFrame::GetUPlane() {
UNIMPLEMENTED;
return NULL;
}
uint8* WebRtcVideoRenderFrame::GetVPlane() {
UNIMPLEMENTED;
return NULL;
}
int32 WebRtcVideoRenderFrame::GetYPitch() const {
return frame_->stride(webrtc::kYPlane);
}
int32 WebRtcVideoRenderFrame::GetUPitch() const {
return frame_->stride(webrtc::kUPlane);
}
int32 WebRtcVideoRenderFrame::GetVPitch() const {
return frame_->stride(webrtc::kVPlane);
}
void* WebRtcVideoRenderFrame::GetNativeHandle() const {
return frame_->native_handle();
}
size_t WebRtcVideoRenderFrame::GetPixelWidth() const {
return 1;
}
size_t WebRtcVideoRenderFrame::GetPixelHeight() const {
return 1;
}
int64_t WebRtcVideoRenderFrame::GetElapsedTime() const {
return elapsed_time_ms_ * rtc::kNumNanosecsPerMillisec;
}
int64_t WebRtcVideoRenderFrame::GetTimeStamp() const {
return frame_->render_time_ms() * rtc::kNumNanosecsPerMillisec;
}
void WebRtcVideoRenderFrame::SetElapsedTime(int64_t elapsed_time) {
UNIMPLEMENTED;
}
void WebRtcVideoRenderFrame::SetTimeStamp(int64_t time_stamp) {
UNIMPLEMENTED;
}
webrtc::VideoRotation WebRtcVideoRenderFrame::GetVideoRotation() const {
UNIMPLEMENTED;
return webrtc::kVideoRotation_0;
}
// TODO(magjed): Make this copy shallow instead of deep, BUG=1128. There is no
// way to guarantee that the underlying webrtc::I420VideoFrame |frame_| will
// outlive the returned object. The only safe option is to make a deep copy.
// This can be fixed by making webrtc::I420VideoFrame reference counted, or
// adding a similar shallow copy function to it.
VideoFrame* WebRtcVideoRenderFrame::Copy() const {
if (frame_->native_handle() != NULL) {
return new WebRtcTextureVideoFrame(
static_cast<webrtc::NativeHandle*>(frame_->native_handle()),
static_cast<size_t>(frame_->width()),
static_cast<size_t>(frame_->height()), GetElapsedTime(),
GetTimeStamp());
}
WebRtcVideoFrame* new_frame = new WebRtcVideoFrame();
new_frame->InitToEmptyBuffer(frame_->width(), frame_->height(), 1, 1,
GetElapsedTime(), GetTimeStamp());
CopyToPlanes(new_frame->GetYPlane(), new_frame->GetUPlane(),
new_frame->GetVPlane(), new_frame->GetYPitch(),
new_frame->GetUPitch(), new_frame->GetVPitch());
return new_frame;
}
bool WebRtcVideoRenderFrame::MakeExclusive() {
UNIMPLEMENTED;
return false;
}
size_t WebRtcVideoRenderFrame::CopyToBuffer(uint8* buffer, size_t size) const {
UNIMPLEMENTED;
return 0;
}
VideoFrame* WebRtcVideoRenderFrame::CreateEmptyFrame(int w,
int h,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp) const {
WebRtcVideoFrame* frame = new WebRtcVideoFrame();
frame->InitToBlack(w, h, pixel_width, pixel_height, elapsed_time, time_stamp);
return frame;
int64_t elapsed_time_ns,
int64_t time_stamp_ns) {
video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
pixel_width_ = pixel_width;
pixel_height_ = pixel_height;
elapsed_time_ns_ = elapsed_time_ns;
time_stamp_ns_ = time_stamp_ns;
rotation_ = webrtc::kVideoRotation_0;
}
} // namespace cricket

View File

@ -33,11 +33,7 @@
#include "webrtc/base/refcount.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/interface/module_common_types.h"
namespace webrtc {
class I420VideoFrame;
};
#include "webrtc/common_video/interface/video_frame_buffer.h"
namespace cricket {
@ -46,6 +42,9 @@ struct CapturedFrame;
class WebRtcVideoFrame : public VideoFrame {
public:
WebRtcVideoFrame();
WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
int64_t elapsed_time_ns,
int64_t time_stamp_ns);
~WebRtcVideoFrame();
// Creates a frame from a raw sample with FourCC "format" and size "w" x "h".
@ -61,39 +60,17 @@ class WebRtcVideoFrame : public VideoFrame {
size_t sample_size,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp,
int64_t elapsed_time_ns,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation);
bool Init(const CapturedFrame* frame, int dw, int dh, bool apply_rotation);
void InitToEmptyBuffer(int w, int h, size_t pixel_width, size_t pixel_height,
int64_t elapsed_time, int64_t time_stamp);
// Aliases this WebRtcVideoFrame to a CapturedFrame. |frame| must outlive
// this WebRtcVideoFrame.
bool Alias(const CapturedFrame* frame,
int dw,
int dh,
bool apply_rotation);
int64_t elapsed_time_ns, int64_t time_stamp_ns);
bool InitToBlack(int w, int h, size_t pixel_width, size_t pixel_height,
int64_t elapsed_time, int64_t time_stamp);
// Aliases this WebRtcVideoFrame to a memory buffer. |buffer| must outlive
// this WebRtcVideoFrame.
void Alias(uint8* buffer,
size_t buffer_size,
int w,
int h,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp,
webrtc::VideoRotation rotation);
webrtc::VideoFrame* frame();
const webrtc::VideoFrame* frame() const;
int64_t elapsed_time_ns, int64_t time_stamp_ns);
// From base class VideoFrame.
virtual bool Reset(uint32 format,
@ -105,8 +82,8 @@ class WebRtcVideoFrame : public VideoFrame {
size_t sample_size,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp,
int64_t elapsed_time_ns,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation,
bool apply_rotation);
@ -118,19 +95,21 @@ class WebRtcVideoFrame : public VideoFrame {
virtual uint8* GetYPlane();
virtual uint8* GetUPlane();
virtual uint8* GetVPlane();
virtual int32 GetYPitch() const { return frame()->Width(); }
virtual int32 GetUPitch() const { return (frame()->Width() + 1) / 2; }
virtual int32 GetVPitch() const { return (frame()->Width() + 1) / 2; }
virtual void* GetNativeHandle() const { return NULL; }
virtual int32 GetYPitch() const;
virtual int32 GetUPitch() const;
virtual int32 GetVPitch() const;
virtual void* GetNativeHandle() const;
virtual size_t GetPixelWidth() const { return pixel_width_; }
virtual size_t GetPixelHeight() const { return pixel_height_; }
virtual int64_t GetElapsedTime() const { return elapsed_time_; }
virtual int64_t GetTimeStamp() const { return time_stamp_; }
virtual void SetElapsedTime(int64_t elapsed_time) {
elapsed_time_ = elapsed_time;
virtual int64_t GetElapsedTime() const { return elapsed_time_ns_; }
virtual int64_t GetTimeStamp() const { return time_stamp_ns_; }
virtual void SetElapsedTime(int64_t elapsed_time_ns) {
elapsed_time_ns_ = elapsed_time_ns;
}
virtual void SetTimeStamp(int64_t time_stamp_ns) {
time_stamp_ns_ = time_stamp_ns;
}
virtual void SetTimeStamp(int64_t time_stamp) { time_stamp_ = time_stamp; }
virtual webrtc::VideoRotation GetVideoRotation() const { return rotation_; }
@ -141,96 +120,20 @@ class WebRtcVideoFrame : public VideoFrame {
size_t size, int stride_rgb) const;
private:
class FrameBuffer;
typedef rtc::RefCountedObject<FrameBuffer> RefCountedBuffer;
void Attach(RefCountedBuffer* video_buffer,
size_t buffer_size,
int w,
int h,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp,
webrtc::VideoRotation rotation);
virtual VideoFrame* CreateEmptyFrame(int w, int h, size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp) const;
int64_t elapsed_time_ns,
int64_t time_stamp_ns) const;
rtc::scoped_refptr<RefCountedBuffer> video_buffer_;
// An opaque reference counted handle that stores the pixel data.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
size_t pixel_width_;
size_t pixel_height_;
int64_t elapsed_time_;
int64_t time_stamp_;
int64_t elapsed_time_ns_;
int64_t time_stamp_ns_;
webrtc::VideoRotation rotation_;
};
// Thin map between VideoFrame and an existing webrtc::I420VideoFrame
// to avoid having to copy the rendered VideoFrame prematurely.
// This implementation is only safe to use in a const context and should never
// be written to.
class WebRtcVideoRenderFrame : public VideoFrame {
public:
WebRtcVideoRenderFrame(const webrtc::I420VideoFrame* frame,
int64_t elapsed_time_ms);
virtual bool InitToBlack(int w,
int h,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp) override;
virtual bool Reset(uint32 fourcc,
int w,
int h,
int dw,
int dh,
uint8* sample,
size_t sample_size,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp,
webrtc::VideoRotation rotation,
bool apply_rotation) override;
virtual size_t GetWidth() const override;
virtual size_t GetHeight() const override;
virtual const uint8* GetYPlane() const override;
virtual const uint8* GetUPlane() const override;
virtual const uint8* GetVPlane() const override;
virtual uint8* GetYPlane() override;
virtual uint8* GetUPlane() override;
virtual uint8* GetVPlane() override;
virtual int32 GetYPitch() const override;
virtual int32 GetUPitch() const override;
virtual int32 GetVPitch() const override;
virtual void* GetNativeHandle() const override;
virtual size_t GetPixelWidth() const override;
virtual size_t GetPixelHeight() const override;
virtual int64_t GetElapsedTime() const override;
virtual int64_t GetTimeStamp() const override;
virtual void SetElapsedTime(int64_t elapsed_time) override;
virtual void SetTimeStamp(int64_t time_stamp) override;
virtual webrtc::VideoRotation GetVideoRotation() const override;
virtual VideoFrame* Copy() const override;
virtual bool MakeExclusive() override;
virtual size_t CopyToBuffer(uint8* buffer, size_t size) const override;
protected:
virtual VideoFrame* CreateEmptyFrame(int w,
int h,
size_t pixel_width,
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp) const override;
private:
const webrtc::I420VideoFrame* const frame_;
const int64_t elapsed_time_ms_;
};
} // namespace cricket
#endif // TALK_MEDIA_WEBRTCVIDEOFRAME_H_

View File

@ -280,18 +280,6 @@ TEST_WEBRTCVIDEOFRAME(CopyIsRef)
TEST_WEBRTCVIDEOFRAME(MakeExclusive)
// These functions test implementation-specific details.
TEST_F(WebRtcVideoFrameTest, Alias) {
cricket::WebRtcVideoFrame frame1, frame2;
ASSERT_TRUE(LoadFrameNoRepeat(&frame1));
const int64 time_stamp = INT64_C(0x7FFFFFFFFFFFFFF0);
frame1.SetTimeStamp(time_stamp);
EXPECT_EQ(time_stamp, frame1.GetTimeStamp());
frame2.Alias(frame1.frame()->Buffer(), frame1.frame()->Size(), kWidth,
kHeight, 1, 1, frame1.GetElapsedTime(), frame1.GetTimeStamp(),
webrtc::kVideoRotation_0);
EXPECT_TRUE(IsEqual(frame1, frame2, 0));
}
// Tests the Init function with different cropped size.
TEST_F(WebRtcVideoFrameTest, InitEvenSize) {
TestInit(640, 360, webrtc::kVideoRotation_0, true);

View File

@ -33,10 +33,8 @@ namespace cricket {
VideoFrame* WebRtcVideoFrameFactory::CreateAliasedFrame(
const CapturedFrame* aliased_frame, int width, int height) const {
// TODO(pthatcher): Move Alias logic into the VideoFrameFactory and
// out of the VideoFrame.
rtc::scoped_ptr<WebRtcVideoFrame> frame(new WebRtcVideoFrame());
if (!frame->Alias(aliased_frame, width, height, apply_rotation_)) {
if (!frame->Init(aliased_frame, width, height, apply_rotation_)) {
LOG(LS_ERROR) <<
"Failed to create WebRtcVideoFrame in CreateAliasedFrame.";
return NULL;

View File

@ -21,13 +21,13 @@ source_set("common_video") {
"interface/i420_video_frame.h",
"interface/native_handle.h",
"interface/texture_video_frame.h",
"interface/video_frame_buffer.h",
"libyuv/include/scaler.h",
"libyuv/include/webrtc_libyuv.h",
"libyuv/scaler.cc",
"libyuv/webrtc_libyuv.cc",
"plane.cc",
"plane.h",
"texture_video_frame.cc"
"texture_video_frame.cc",
"video_frame_buffer.cc",
]
include_dirs = [ "../modules/interface" ]

View File

@ -42,14 +42,14 @@
'interface/i420_video_frame.h',
'interface/native_handle.h',
'interface/texture_video_frame.h',
'interface/video_frame_buffer.h',
'i420_video_frame.cc',
'libyuv/include/webrtc_libyuv.h',
'libyuv/include/scaler.h',
'libyuv/webrtc_libyuv.cc',
'libyuv/scaler.cc',
'plane.h',
'plane.cc',
'texture_video_frame.cc'
'texture_video_frame.cc',
'video_frame_buffer.cc',
],
},
], # targets

View File

@ -22,7 +22,6 @@
'i420_video_frame_unittest.cc',
'libyuv/libyuv_unittest.cc',
'libyuv/scaler_unittest.cc',
'plane_unittest.cc',
'texture_video_frame_unittest.cc'
],
# Disable warnings to enable Win64 build, issue 1323.

View File

@ -14,37 +14,59 @@
#include <algorithm> // swap
#include "webrtc/base/checks.h"
namespace webrtc {
I420VideoFrame::I420VideoFrame()
: width_(0),
height_(0),
timestamp_(0),
: timestamp_(0),
ntp_time_ms_(0),
render_time_ms_(0),
rotation_(kVideoRotation_0) {
}
I420VideoFrame::I420VideoFrame(
const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
uint32_t timestamp,
int64_t render_time_ms,
VideoRotation rotation)
: video_frame_buffer_(buffer),
timestamp_(timestamp),
ntp_time_ms_(0),
render_time_ms_(render_time_ms),
rotation_(rotation) {
}
I420VideoFrame::~I420VideoFrame() {}
int I420VideoFrame::CreateEmptyFrame(int width, int height,
int stride_y, int stride_u, int stride_v) {
if (CheckDimensions(width, height, stride_y, stride_u, stride_v) < 0)
const int half_width = (width + 1) / 2;
if (width <= 0 || height <= 0 || stride_y < width || stride_u < half_width ||
stride_v < half_width) {
return -1;
int size_y = stride_y * height;
int half_height = (height + 1) / 2;
int size_u = stride_u * half_height;
int size_v = stride_v * half_height;
width_ = width;
height_ = height;
y_plane_.CreateEmptyPlane(size_y, stride_y, size_y);
u_plane_.CreateEmptyPlane(size_u, stride_u, size_u);
v_plane_.CreateEmptyPlane(size_v, stride_v, size_v);
}
// Creating empty frame - reset all values.
timestamp_ = 0;
ntp_time_ms_ = 0;
render_time_ms_ = 0;
rotation_ = kVideoRotation_0;
// Check if it's safe to reuse allocation.
if (video_frame_buffer_ &&
video_frame_buffer_->HasOneRef() &&
!video_frame_buffer_->native_handle() &&
width == video_frame_buffer_->width() &&
height == video_frame_buffer_->height() &&
stride_y == stride(kYPlane) &&
stride_u == stride(kUPlane) &&
stride_v == stride(kVPlane)) {
return 0;
}
// Need to allocate new buffer.
video_frame_buffer_ = new rtc::RefCountedObject<I420Buffer>(
width, height, stride_y, stride_u, stride_v);
return 0;
}
@ -70,31 +92,35 @@ int I420VideoFrame::CreateFrame(int size_y,
int stride_u,
int stride_v,
VideoRotation rotation) {
if (size_y < 1 || size_u < 1 || size_v < 1)
const int half_height = (height + 1) / 2;
const int expected_size_y = height * stride_y;
const int expected_size_u = half_height * stride_u;
const int expected_size_v = half_height * stride_v;
CHECK_GE(size_y, expected_size_y);
CHECK_GE(size_u, expected_size_u);
CHECK_GE(size_v, expected_size_v);
if (CreateEmptyFrame(width, height, stride_y, stride_u, stride_v) < 0)
return -1;
if (CheckDimensions(width, height, stride_y, stride_u, stride_v) < 0)
return -1;
y_plane_.Copy(size_y, stride_y, buffer_y);
u_plane_.Copy(size_u, stride_u, buffer_u);
v_plane_.Copy(size_v, stride_v, buffer_v);
width_ = width;
height_ = height;
memcpy(buffer(kYPlane), buffer_y, expected_size_y);
memcpy(buffer(kUPlane), buffer_u, expected_size_u);
memcpy(buffer(kVPlane), buffer_v, expected_size_v);
rotation_ = rotation;
return 0;
}
int I420VideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
int ret = CreateFrame(videoFrame.allocated_size(kYPlane),
videoFrame.buffer(kYPlane),
videoFrame.allocated_size(kUPlane),
videoFrame.buffer(kUPlane),
videoFrame.allocated_size(kVPlane),
videoFrame.buffer(kVPlane),
videoFrame.width_, videoFrame.height_,
videoFrame.stride(kYPlane), videoFrame.stride(kUPlane),
videoFrame.stride(kVPlane));
if (ret < 0)
return ret;
if (videoFrame.native_handle()) {
video_frame_buffer_ = videoFrame.video_frame_buffer();
} else {
int ret = CreateFrame(
videoFrame.allocated_size(kYPlane), videoFrame.buffer(kYPlane),
videoFrame.allocated_size(kUPlane), videoFrame.buffer(kUPlane),
videoFrame.allocated_size(kVPlane), videoFrame.buffer(kVPlane),
videoFrame.width(), videoFrame.height(), videoFrame.stride(kYPlane),
videoFrame.stride(kUPlane), videoFrame.stride(kVPlane));
if (ret < 0)
return ret;
}
timestamp_ = videoFrame.timestamp_;
ntp_time_ms_ = videoFrame.ntp_time_ms_;
render_time_ms_ = videoFrame.render_time_ms_;
@ -112,11 +138,7 @@ I420VideoFrame* I420VideoFrame::CloneFrame() const {
}
void I420VideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
y_plane_.Swap(videoFrame->y_plane_);
u_plane_.Swap(videoFrame->u_plane_);
v_plane_.Swap(videoFrame->v_plane_);
std::swap(width_, videoFrame->width_);
std::swap(height_, videoFrame->height_);
video_frame_buffer_.swap(videoFrame->video_frame_buffer_);
std::swap(timestamp_, videoFrame->timestamp_);
std::swap(ntp_time_ms_, videoFrame->ntp_time_ms_);
std::swap(render_time_ms_, videoFrame->render_time_ms_);
@ -124,75 +146,43 @@ void I420VideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
}
uint8_t* I420VideoFrame::buffer(PlaneType type) {
Plane* plane_ptr = GetPlane(type);
if (plane_ptr)
return plane_ptr->buffer();
return NULL;
return video_frame_buffer_ ? video_frame_buffer_->data(type) : nullptr;
}
const uint8_t* I420VideoFrame::buffer(PlaneType type) const {
const Plane* plane_ptr = GetPlane(type);
if (plane_ptr)
return plane_ptr->buffer();
return NULL;
// Const cast to call the correct const-version of data.
const VideoFrameBuffer* const_buffer = video_frame_buffer_.get();
return const_buffer ? const_buffer->data(type) : nullptr;
}
int I420VideoFrame::allocated_size(PlaneType type) const {
const Plane* plane_ptr = GetPlane(type);
if (plane_ptr)
return plane_ptr->allocated_size();
return -1;
const int plane_height = (type == kYPlane) ? height() : (height() + 1) / 2;
return plane_height * stride(type);
}
int I420VideoFrame::stride(PlaneType type) const {
const Plane* plane_ptr = GetPlane(type);
if (plane_ptr)
return plane_ptr->stride();
return -1;
return video_frame_buffer_ ? video_frame_buffer_->stride(type) : 0;
}
int I420VideoFrame::width() const {
return video_frame_buffer_ ? video_frame_buffer_->width() : 0;
}
int I420VideoFrame::height() const {
return video_frame_buffer_ ? video_frame_buffer_->height() : 0;
}
bool I420VideoFrame::IsZeroSize() const {
return (y_plane_.IsZeroSize() && u_plane_.IsZeroSize() &&
v_plane_.IsZeroSize());
return !video_frame_buffer_;
}
void* I420VideoFrame::native_handle() const { return NULL; }
int I420VideoFrame::CheckDimensions(int width, int height,
int stride_y, int stride_u, int stride_v) {
int half_width = (width + 1) / 2;
if (width < 1 || height < 1 ||
stride_y < width || stride_u < half_width || stride_v < half_width)
return -1;
return 0;
void* I420VideoFrame::native_handle() const {
return video_frame_buffer_ ? video_frame_buffer_->native_handle() : nullptr;
}
const Plane* I420VideoFrame::GetPlane(PlaneType type) const {
switch (type) {
case kYPlane :
return &y_plane_;
case kUPlane :
return &u_plane_;
case kVPlane :
return &v_plane_;
default:
assert(false);
}
return NULL;
}
Plane* I420VideoFrame::GetPlane(PlaneType type) {
switch (type) {
case kYPlane :
return &y_plane_;
case kUPlane :
return &u_plane_;
case kVPlane :
return &v_plane_;
default:
assert(false);
}
return NULL;
rtc::scoped_refptr<VideoFrameBuffer> I420VideoFrame::video_frame_buffer()
const {
return video_frame_buffer_;
}
} // namespace webrtc

View File

@ -14,8 +14,6 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_video/interface/i420_video_frame.h"
#include "webrtc/system_wrappers/interface/ref_count.h"
#include "webrtc/system_wrappers/interface/scoped_refptr.h"
namespace webrtc {
@ -240,15 +238,30 @@ TEST(TestI420VideoFrame, FrameSwap) {
EXPECT_TRUE(EqualFrames(frame2_copy, frame1));
}
TEST(TestI420VideoFrame, RefCountedInstantiation) {
// Refcounted instantiation - ref_count should correspond to the number of
// instances.
scoped_refptr<I420VideoFrame> ref_count_frame(
new RefCountImpl<I420VideoFrame>());
EXPECT_EQ(2, ref_count_frame->AddRef());
EXPECT_EQ(3, ref_count_frame->AddRef());
EXPECT_EQ(2, ref_count_frame->Release());
EXPECT_EQ(1, ref_count_frame->Release());
TEST(TestI420VideoFrame, ReuseAllocation) {
I420VideoFrame frame;
frame.CreateEmptyFrame(640, 320, 640, 320, 320);
const uint8_t* y = frame.buffer(kYPlane);
const uint8_t* u = frame.buffer(kUPlane);
const uint8_t* v = frame.buffer(kVPlane);
frame.CreateEmptyFrame(640, 320, 640, 320, 320);
EXPECT_EQ(y, frame.buffer(kYPlane));
EXPECT_EQ(u, frame.buffer(kUPlane));
EXPECT_EQ(v, frame.buffer(kVPlane));
}
TEST(TestI420VideoFrame, FailToReuseAllocation) {
I420VideoFrame frame1;
frame1.CreateEmptyFrame(640, 320, 640, 320, 320);
const uint8_t* y = frame1.buffer(kYPlane);
const uint8_t* u = frame1.buffer(kUPlane);
const uint8_t* v = frame1.buffer(kVPlane);
// Make a shallow copy of |frame1|.
I420VideoFrame frame2(frame1.video_frame_buffer(), 0, 0, kVideoRotation_0);
frame1.CreateEmptyFrame(640, 320, 640, 320, 320);
EXPECT_NE(y, frame1.buffer(kYPlane));
EXPECT_NE(u, frame1.buffer(kUPlane));
EXPECT_NE(v, frame1.buffer(kVPlane));
}
bool EqualPlane(const uint8_t* data1,

View File

@ -17,7 +17,6 @@
#include "webrtc/common_video/interface/i420_video_frame.h"
#include "webrtc/common_video/interface/native_handle.h"
#include "webrtc/system_wrappers/interface/scoped_refptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@ -29,54 +28,6 @@ class TextureVideoFrame : public I420VideoFrame {
int height,
uint32_t timestamp,
int64_t render_time_ms);
virtual ~TextureVideoFrame();
// I420VideoFrame implementation
virtual int CreateEmptyFrame(int width,
int height,
int stride_y,
int stride_u,
int stride_v) override;
virtual int CreateFrame(int size_y,
const uint8_t* buffer_y,
int size_u,
const uint8_t* buffer_u,
int size_v,
const uint8_t* buffer_v,
int width,
int height,
int stride_y,
int stride_u,
int stride_v) override;
virtual int CreateFrame(int size_y,
const uint8_t* buffer_y,
int size_u,
const uint8_t* buffer_u,
int size_v,
const uint8_t* buffer_v,
int width,
int height,
int stride_y,
int stride_u,
int stride_v,
webrtc::VideoRotation rotation) override;
virtual int CopyFrame(const I420VideoFrame& videoFrame) override;
virtual I420VideoFrame* CloneFrame() const override;
virtual void SwapFrame(I420VideoFrame* videoFrame) override;
virtual uint8_t* buffer(PlaneType type) override;
virtual const uint8_t* buffer(PlaneType type) const override;
virtual int allocated_size(PlaneType type) const override;
virtual int stride(PlaneType type) const override;
virtual bool IsZeroSize() const override;
virtual void* native_handle() const override;
protected:
virtual int CheckDimensions(
int width, int height, int stride_y, int stride_u, int stride_v) override;
private:
// An opaque handle that stores the underlying video frame.
scoped_refptr<NativeHandle> handle_;
};
} // namespace webrtc

View File

@ -0,0 +1,109 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_FRAME_BUFFER_H_
#define WEBRTC_VIDEO_FRAME_BUFFER_H_
#include "webrtc/base/refcount.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/common_video/interface/native_handle.h"
#include "webrtc/system_wrappers/interface/aligned_malloc.h"
namespace webrtc {
enum PlaneType {
kYPlane = 0,
kUPlane = 1,
kVPlane = 2,
kNumOfPlanes = 3,
};
// Interface of a simple frame buffer containing pixel data. This interface does
// not contain any frame metadata such as rotation, timestamp, pixel_width, etc.
class VideoFrameBuffer : public rtc::RefCountInterface {
public:
// Returns true if this buffer has a single exclusive owner.
virtual bool HasOneRef() const = 0;
// The resolution of the frame in pixels. For formats where some planes are
// subsampled, this is the highest-resolution plane.
virtual int width() const = 0;
virtual int height() const = 0;
// Returns pointer to the pixel data for a given plane. The memory is owned by
// the VideoFrameBuffer object and must not be freed by the caller.
virtual const uint8_t* data(PlaneType type) const = 0;
// Non-const data access is only allowed if |HasOneRef| is true.
virtual uint8_t* data(PlaneType type) = 0;
// Returns the number of bytes between successive rows for a given plane.
virtual int stride(PlaneType type) const = 0;
// Return the handle of the underlying video frame. This is used when the
// frame is backed by a texture.
virtual rtc::scoped_refptr<NativeHandle> native_handle() const = 0;
protected:
virtual ~VideoFrameBuffer();
};
// Plain I420 buffer in standard memory.
class I420Buffer : public VideoFrameBuffer {
public:
I420Buffer(int width, int height);
I420Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
int width() const override;
int height() const override;
const uint8_t* data(PlaneType type) const override;
uint8_t* data(PlaneType type) override;
int stride(PlaneType type) const override;
rtc::scoped_refptr<NativeHandle> native_handle() const override;
private:
friend class rtc::RefCountedObject<I420Buffer>;
~I420Buffer() override;
const int width_;
const int height_;
const int stride_y_;
const int stride_u_;
const int stride_v_;
const rtc::scoped_ptr<uint8_t, AlignedFreeDeleter> data_;
};
// Texture buffer around a NativeHandle.
class TextureBuffer : public VideoFrameBuffer {
public:
TextureBuffer(const rtc::scoped_refptr<NativeHandle>& native_handle,
int width,
int height);
int width() const override;
int height() const override;
const uint8_t* data(PlaneType type) const override;
uint8_t* data(PlaneType type) override;
int stride(PlaneType type) const override;
rtc::scoped_refptr<NativeHandle> native_handle() const override;
private:
friend class rtc::RefCountedObject<TextureBuffer>;
~TextureBuffer() override;
const rtc::scoped_refptr<NativeHandle> native_handle_;
const int width_;
const int height_;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_FRAME_BUFFER_H_

View File

@ -1,97 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/common_video/plane.h"
#include <math.h>
#include <string.h>
#include "testing/gtest/include/gtest/gtest.h"
namespace webrtc {
TEST(TestPlane, CreateEmptyPlaneValues) {
Plane plane;
int size, stride;
EXPECT_EQ(0, plane.allocated_size());
EXPECT_EQ(0, plane.stride());
EXPECT_TRUE(plane.IsZeroSize());
size = 0;
stride = 20;
EXPECT_EQ(-1, plane.CreateEmptyPlane(size, stride, 1));
EXPECT_EQ(-1, plane.CreateEmptyPlane(10, stride, size));
size = 20;
stride = 0;
EXPECT_EQ(-1, plane.CreateEmptyPlane(size, stride, size));
stride = 20;
EXPECT_EQ(0, plane.CreateEmptyPlane(size, stride, size));
EXPECT_EQ(size, plane.allocated_size());
EXPECT_EQ(stride, plane.stride());
EXPECT_FALSE(plane.IsZeroSize());
}
TEST(TestPlane, ResetSize) {
Plane plane;
EXPECT_TRUE(plane.IsZeroSize());
int allocated_size, plane_size, stride;
EXPECT_EQ(0, plane.allocated_size());
allocated_size = 30;
plane_size = 20;
stride = 10;
EXPECT_EQ(0, plane.CreateEmptyPlane(allocated_size, stride, plane_size));
EXPECT_EQ(allocated_size, plane.allocated_size());
EXPECT_FALSE(plane.IsZeroSize());
plane.ResetSize();
EXPECT_TRUE(plane.IsZeroSize());
}
TEST(TestPlane, PlaneCopy) {
Plane plane1, plane2;
// Copy entire plane.
plane1.CreateEmptyPlane(100, 10, 100);
int size1 = plane1.allocated_size();
int size2 = 30;
plane2.CreateEmptyPlane(50, 15, size2);
int stride1 = plane1.stride();
int stride2 = plane2.stride();
plane1.Copy(plane2);
// Smaller size - keep buffer size as is.
EXPECT_EQ(size1, plane1.allocated_size());
EXPECT_EQ(stride2, plane1.stride());
plane2.Copy(plane1);
// Verify increment of allocated size.
EXPECT_EQ(plane1.allocated_size(), plane2.allocated_size());
EXPECT_EQ(stride2, plane2.stride());
// Copy buffer.
uint8_t buffer1[100];
size1 = 80;
memset(&buffer1, 0, size1);
plane2.Copy(size1, stride1, buffer1);
EXPECT_GE(plane2.allocated_size(), size1);
EXPECT_EQ(0, memcmp(buffer1, plane2.buffer(), size1));
}
TEST(TestPlane, PlaneSwap) {
Plane plane1, plane2;
int size1, size2, stride1, stride2;
plane1.CreateEmptyPlane(100, 10, 100);
plane2.CreateEmptyPlane(50, 15, 50);
size1 = plane1.allocated_size();
stride1 = plane1.stride();
stride2 = plane2.stride();
size2 = plane2.allocated_size();
plane1.Swap(plane2);
EXPECT_EQ(size1, plane2.allocated_size());
EXPECT_EQ(size2, plane1.allocated_size());
EXPECT_EQ(stride2, plane1.stride());
EXPECT_EQ(stride1, plane2.stride());
}
} // namespace webrtc

View File

@ -10,7 +10,7 @@
#include "webrtc/common_video/interface/texture_video_frame.h"
#include <assert.h>
#include "webrtc/base/refcount.h"
namespace webrtc {
@ -19,99 +19,11 @@ TextureVideoFrame::TextureVideoFrame(NativeHandle* handle,
int height,
uint32_t timestamp,
int64_t render_time_ms)
: handle_(handle) {
width_ = width;
height_ = height;
set_timestamp(timestamp);
set_render_time_ms(render_time_ms);
}
TextureVideoFrame::~TextureVideoFrame() {}
int TextureVideoFrame::CreateEmptyFrame(int width,
int height,
int stride_y,
int stride_u,
int stride_v) {
assert(false); // Should not be called.
return -1;
}
int TextureVideoFrame::CreateFrame(int size_y,
const uint8_t* buffer_y,
int size_u,
const uint8_t* buffer_u,
int size_v,
const uint8_t* buffer_v,
int width,
int height,
int stride_y,
int stride_u,
int stride_v) {
assert(false); // Should not be called.
return -1;
}
int TextureVideoFrame::CreateFrame(int size_y,
const uint8_t* buffer_y,
int size_u,
const uint8_t* buffer_u,
int size_v,
const uint8_t* buffer_v,
int width,
int height,
int stride_y,
int stride_u,
int stride_v,
webrtc::VideoRotation rotation) {
assert(false); // Should not be called.
return -1;
}
int TextureVideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
assert(false); // Should not be called.
return -1;
}
I420VideoFrame* TextureVideoFrame::CloneFrame() const {
return new TextureVideoFrame(
handle_, width(), height(), timestamp(), render_time_ms());
}
void TextureVideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
assert(false); // Should not be called.
}
uint8_t* TextureVideoFrame::buffer(PlaneType type) {
assert(false); // Should not be called.
return NULL;
}
const uint8_t* TextureVideoFrame::buffer(PlaneType type) const {
assert(false); // Should not be called.
return NULL;
}
int TextureVideoFrame::allocated_size(PlaneType type) const {
assert(false); // Should not be called.
return -1;
}
int TextureVideoFrame::stride(PlaneType type) const {
assert(false); // Should not be called.
return -1;
}
bool TextureVideoFrame::IsZeroSize() const {
assert(false); // Should not be called.
return true;
}
void* TextureVideoFrame::native_handle() const { return handle_.get(); }
int TextureVideoFrame::CheckDimensions(
int width, int height, int stride_y, int stride_u, int stride_v) {
return 0;
: I420VideoFrame(
new rtc::RefCountedObject<TextureBuffer>(handle, width, height),
timestamp,
render_time_ms,
kVideoRotation_0) {
}
} // namespace webrtc

View File

@ -0,0 +1,136 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/common_video/interface/video_frame_buffer.h"
#include "webrtc/base/checks.h"
// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
static const int kBufferAlignment = 64;
namespace webrtc {
VideoFrameBuffer::~VideoFrameBuffer() {}
I420Buffer::I420Buffer(int width, int height)
: I420Buffer(width, height, width, (width + 1) / 2, (width + 1) / 2) {
}
I420Buffer::I420Buffer(int width,
int height,
int stride_y,
int stride_u,
int stride_v)
: width_(width),
height_(height),
stride_y_(stride_y),
stride_u_(stride_u),
stride_v_(stride_v),
data_(static_cast<uint8_t*>(AlignedMalloc(
stride_y * height + (stride_u + stride_v) * ((height + 1) / 2),
kBufferAlignment))) {
DCHECK_GT(width, 0);
DCHECK_GT(height, 0);
DCHECK_GE(stride_y, width);
DCHECK_GE(stride_u, (width + 1) / 2);
DCHECK_GE(stride_v, (width + 1) / 2);
}
I420Buffer::~I420Buffer() {
}
int I420Buffer::width() const {
return width_;
}
int I420Buffer::height() const {
return height_;
}
const uint8_t* I420Buffer::data(PlaneType type) const {
switch (type) {
case kYPlane:
return data_.get();
case kUPlane:
return data_.get() + stride_y_ * height_;
case kVPlane:
return data_.get() + stride_y_ * height_ +
stride_u_ * ((height_ + 1) / 2);
default:
RTC_NOTREACHED();
return nullptr;
}
}
uint8_t* I420Buffer::data(PlaneType type) {
DCHECK(HasOneRef());
return const_cast<uint8_t*>(
static_cast<const VideoFrameBuffer*>(this)->data(type));
}
int I420Buffer::stride(PlaneType type) const {
switch (type) {
case kYPlane:
return stride_y_;
case kUPlane:
return stride_u_;
case kVPlane:
return stride_v_;
default:
RTC_NOTREACHED();
return 0;
}
}
rtc::scoped_refptr<NativeHandle> I420Buffer::native_handle() const {
return nullptr;
}
TextureBuffer::TextureBuffer(
const rtc::scoped_refptr<NativeHandle>& native_handle,
int width,
int height)
: native_handle_(native_handle), width_(width), height_(height) {
DCHECK(native_handle.get());
DCHECK_GT(width, 0);
DCHECK_GT(height, 0);
}
TextureBuffer::~TextureBuffer() {
}
int TextureBuffer::width() const {
return width_;
}
int TextureBuffer::height() const {
return height_;
}
const uint8_t* TextureBuffer::data(PlaneType type) const {
RTC_NOTREACHED(); // Should not be called.
return nullptr;
}
uint8_t* TextureBuffer::data(PlaneType type) {
RTC_NOTREACHED(); // Should not be called.
return nullptr;
}
int TextureBuffer::stride(PlaneType type) const {
RTC_NOTREACHED(); // Should not be called.
return 0;
}
rtc::scoped_refptr<NativeHandle> TextureBuffer::native_handle() const {
return native_handle_;
}
} // namespace webrtc

View File

@ -398,232 +398,6 @@ struct VideoContentMetrics {
float spatial_pred_err_v;
};
/*************************************************
*
* VideoFrame class
*
* The VideoFrame class allows storing and
* handling of video frames.
*
*
*************************************************/
class VideoFrame {
public:
VideoFrame();
~VideoFrame();
/**
* Verifies that current allocated buffer size is larger than or equal to the
* input size.
* If the current buffer size is smaller, a new allocation is made and the old
* buffer data
* is copied to the new buffer.
* Buffer size is updated to minimumSize.
*/
int32_t VerifyAndAllocate(const size_t minimumSize);
/**
* Update length of data buffer in frame. Function verifies that new length
* is less or
* equal to allocated size.
*/
int32_t SetLength(const size_t newLength);
/*
* Swap buffer and size data
*/
int32_t Swap(uint8_t*& newMemory, size_t& newLength, size_t& newSize);
/*
* Swap buffer and size data
*/
int32_t SwapFrame(VideoFrame& videoFrame);
/**
* Copy buffer: If newLength is bigger than allocated size, a new buffer of
* size length
* is allocated.
*/
int32_t CopyFrame(const VideoFrame& videoFrame);
/**
* Copy buffer: If newLength is bigger than allocated size, a new buffer of
* size length
* is allocated.
*/
int32_t CopyFrame(size_t length, const uint8_t* sourceBuffer);
/**
* Delete VideoFrame and resets members to zero
*/
void Free();
/**
* Set frame timestamp (90kHz)
*/
void SetTimeStamp(const uint32_t timeStamp) { _timeStamp = timeStamp; }
/**
* Get pointer to frame buffer
*/
uint8_t* Buffer() const { return _buffer; }
uint8_t*& Buffer() { return _buffer; }
/**
* Get allocated buffer size
*/
size_t Size() const { return _bufferSize; }
/**
* Get frame length
*/
size_t Length() const { return _bufferLength; }
/**
* Get frame timestamp (90kHz)
*/
uint32_t TimeStamp() const { return _timeStamp; }
/**
* Get frame width
*/
uint32_t Width() const { return _width; }
/**
* Get frame height
*/
uint32_t Height() const { return _height; }
/**
* Set frame width
*/
void SetWidth(const uint32_t width) { _width = width; }
/**
* Set frame height
*/
void SetHeight(const uint32_t height) { _height = height; }
/**
* Set render time in miliseconds
*/
void SetRenderTime(const int64_t renderTimeMs) {
_renderTimeMs = renderTimeMs;
}
/**
* Get render time in miliseconds
*/
int64_t RenderTimeMs() const { return _renderTimeMs; }
private:
void Set(uint8_t* buffer, uint32_t size, uint32_t length, uint32_t timeStamp);
uint8_t* _buffer; // Pointer to frame buffer
size_t _bufferSize; // Allocated buffer size
size_t _bufferLength; // Length (in bytes) of buffer
uint32_t _timeStamp; // Timestamp of frame (90kHz)
uint32_t _width;
uint32_t _height;
int64_t _renderTimeMs;
}; // end of VideoFrame class declaration
// inline implementation of VideoFrame class:
inline VideoFrame::VideoFrame()
: _buffer(0),
_bufferSize(0),
_bufferLength(0),
_timeStamp(0),
_width(0),
_height(0),
_renderTimeMs(0) {
//
}
inline VideoFrame::~VideoFrame() {
if (_buffer) {
delete[] _buffer;
_buffer = NULL;
}
}
inline int32_t VideoFrame::VerifyAndAllocate(const size_t minimumSize) {
if (minimumSize < 1) {
return -1;
}
if (minimumSize > _bufferSize) {
// create buffer of sufficient size
uint8_t* newBufferBuffer = new uint8_t[minimumSize];
if (_buffer) {
// copy old data
memcpy(newBufferBuffer, _buffer, _bufferSize);
delete[] _buffer;
} else {
memset(newBufferBuffer, 0, minimumSize * sizeof(uint8_t));
}
_buffer = newBufferBuffer;
_bufferSize = minimumSize;
}
return 0;
}
inline int32_t VideoFrame::SetLength(const size_t newLength) {
if (newLength > _bufferSize) { // can't accomodate new value
return -1;
}
_bufferLength = newLength;
return 0;
}
inline int32_t VideoFrame::SwapFrame(VideoFrame& videoFrame) {
uint32_t tmpTimeStamp = _timeStamp;
uint32_t tmpWidth = _width;
uint32_t tmpHeight = _height;
int64_t tmpRenderTime = _renderTimeMs;
_timeStamp = videoFrame._timeStamp;
_width = videoFrame._width;
_height = videoFrame._height;
_renderTimeMs = videoFrame._renderTimeMs;
videoFrame._timeStamp = tmpTimeStamp;
videoFrame._width = tmpWidth;
videoFrame._height = tmpHeight;
videoFrame._renderTimeMs = tmpRenderTime;
return Swap(videoFrame._buffer, videoFrame._bufferLength,
videoFrame._bufferSize);
}
inline int32_t VideoFrame::Swap(uint8_t*& newMemory, size_t& newLength,
size_t& newSize) {
std::swap(_buffer, newMemory);
std::swap(_bufferLength, newLength);
std::swap(_bufferSize, newSize);
return 0;
}
inline int32_t VideoFrame::CopyFrame(size_t length,
const uint8_t* sourceBuffer) {
if (length > _bufferSize) {
int32_t ret = VerifyAndAllocate(length);
if (ret < 0) {
return ret;
}
}
memcpy(_buffer, sourceBuffer, length);
_bufferLength = length;
return 0;
}
inline int32_t VideoFrame::CopyFrame(const VideoFrame& videoFrame) {
if (CopyFrame(videoFrame.Length(), videoFrame.Buffer()) != 0) {
return -1;
}
_timeStamp = videoFrame._timeStamp;
_width = videoFrame._width;
_height = videoFrame._height;
_renderTimeMs = videoFrame._renderTimeMs;
return 0;
}
inline void VideoFrame::Free() {
_timeStamp = 0;
_bufferLength = 0;
_bufferSize = 0;
_height = 0;
_width = 0;
_renderTimeMs = 0;
if (_buffer) {
delete[] _buffer;
_buffer = NULL;
}
}
/* This class holds up to 60 ms of super-wideband (32 kHz) stereo audio. It
* allows for adding and subtracting frames while keeping track of the resulting
* states.

View File

@ -135,7 +135,8 @@ TEST_F(ViECapturerTest, TestTextureFrames) {
for (int i = 0 ; i < kNumFrame; ++i) {
webrtc::RefCountImpl<FakeNativeHandle>* handle =
new webrtc::RefCountImpl<FakeNativeHandle>();
input_frames_.push_back(new TextureVideoFrame(handle, i, i, i, i));
// Add one to |i| so that width/height > 0.
input_frames_.push_back(new TextureVideoFrame(handle, i + 1, i + 1, i, i));
AddInputFrame(input_frames_[i]);
WaitOutputFrame();
}

View File

@ -11,42 +11,21 @@
#ifndef WEBRTC_VIDEO_FRAME_H_
#define WEBRTC_VIDEO_FRAME_H_
#include <assert.h>
#include "webrtc/common_video/plane.h"
// TODO(pbos): Remove scoped_refptr include (and AddRef/Release if they're not
// used).
#include "webrtc/system_wrappers/interface/scoped_refptr.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/common_video/interface/video_frame_buffer.h"
#include "webrtc/typedefs.h"
#include "webrtc/common_video/rotation.h"
namespace webrtc {
enum PlaneType {
kYPlane = 0,
kUPlane = 1,
kVPlane = 2,
kNumOfPlanes = 3
};
class I420VideoFrame {
public:
I420VideoFrame();
I420VideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
uint32_t timestamp,
int64_t render_time_ms,
VideoRotation rotation);
virtual ~I420VideoFrame();
// Infrastructure for refCount implementation.
// Implements dummy functions for reference counting so that non reference
// counted instantiation can be done. These functions should not be called
// when creating the frame with new I420VideoFrame().
// Note: do not pass a I420VideoFrame created with new I420VideoFrame() or
// equivalent to a scoped_refptr or memory leak will occur.
virtual int32_t AddRef() {
assert(false);
return -1;
}
virtual int32_t Release() {
assert(false);
return -1;
}
// CreateEmptyFrame: Sets frame dimensions and allocates buffers based
// on set dimensions - height and plane stride.
@ -62,6 +41,7 @@ class I420VideoFrame {
// CreateFrame: Sets the frame's members and buffers. If required size is
// bigger than allocated one, new buffers of adequate size will be allocated.
// Return value: 0 on success, -1 on error.
// TODO(magjed): Remove unnecessary buffer size arguments.
virtual int CreateFrame(int size_y,
const uint8_t* buffer_y,
int size_u,
@ -112,10 +92,10 @@ class I420VideoFrame {
virtual int stride(PlaneType type) const;
// Get frame width.
virtual int width() const { return width_; }
virtual int width() const;
// Get frame height.
virtual int height() const { return height_; }
virtual int height() const;
// Set frame timestamp (90kHz).
virtual void set_timestamp(uint32_t timestamp) { timestamp_ = timestamp; }
@ -162,27 +142,13 @@ class I420VideoFrame {
// longer in use, so the underlying resource can be freed.
virtual void* native_handle() const;
protected:
// Verifies legality of parameters.
// Return value: 0 on success, -1 on error.
virtual int CheckDimensions(int width,
int height,
int stride_y,
int stride_u,
int stride_v);
// TODO(magjed): Move these to an internal frame buffer instead.
int width_;
int height_;
// Return the underlying buffer.
virtual rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer()
const;
private:
// Get the pointer to a specific plane.
const Plane* GetPlane(PlaneType type) const;
// Overloading with non-const.
Plane* GetPlane(PlaneType type);
Plane y_plane_;
Plane u_plane_;
Plane v_plane_;
// An opaque reference counted handle that stores the pixel data.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
uint32_t timestamp_;
int64_t ntp_time_ms_;
int64_t render_time_ms_;