Rename I420VideoFrame to VideoFrame.

This is a mechanical change since it affects so many
files.
I420VideoFrame -> VideoFrame
and reformatted.

Rationale: in the next CL I420VideoFrame will
get an indication of Pixel Format (I420 for
starters) and of storage type: usually
UNOWNED, could be SHMEM, and in the near
future will be possibly TEXTURE. See
https://codereview.chromium.org/1154153003
for the change that happened in Cr.

BUG=4730, chromium:440843
R=jiayl@webrtc.org, niklas.enbom@webrtc.org, pthatcher@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/52629004

Cr-Commit-Position: refs/heads/master@{#9339}
This commit is contained in:
Miguel Casas-Sanchez
2015-05-29 17:21:40 -07:00
parent c2cb266c93
commit 4765070b8d
158 changed files with 639 additions and 699 deletions

View File

@@ -51,7 +51,7 @@ using rtc::scoped_ptr;
using webrtc::CodecSpecificInfo;
using webrtc::DecodedImageCallback;
using webrtc::EncodedImage;
using webrtc::I420VideoFrame;
using webrtc::VideoFrame;
using webrtc::RTPFragmentationHeader;
using webrtc::TickTime;
using webrtc::VideoCodec;
@@ -108,7 +108,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
bool use_surface_;
int error_count_;
VideoCodec codec_;
I420VideoFrame decoded_image_;
VideoFrame decoded_image_;
NativeHandleImpl native_handle_;
DecodedImageCallback* callback_;
int frames_received_; // Number of frames received by decoder.
@@ -654,9 +654,9 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
int32_t callback_status = WEBRTC_VIDEO_CODEC_OK;
if (use_surface_) {
native_handle_.SetTextureObject(surface_texture_, texture_id);
I420VideoFrame texture_image(&native_handle_, width, height,
output_timestamp_, 0, webrtc::kVideoRotation_0,
rtc::Callback0<void>());
VideoFrame texture_image(&native_handle_, width, height, output_timestamp_,
0, webrtc::kVideoRotation_0,
rtc::Callback0<void>());
texture_image.set_ntp_time_ms(output_ntp_time_ms_);
callback_status = callback_->Decoded(texture_image);
} else {

View File

@@ -48,7 +48,7 @@ using rtc::scoped_ptr;
using webrtc::CodecSpecificInfo;
using webrtc::EncodedImage;
using webrtc::I420VideoFrame;
using webrtc::VideoFrame;
using webrtc::RTPFragmentationHeader;
using webrtc::VideoCodec;
using webrtc::VideoCodecType;
@@ -85,7 +85,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
int32_t /* number_of_cores */,
size_t /* max_payload_size */) override;
int32_t Encode(
const webrtc::I420VideoFrame& input_image,
const webrtc::VideoFrame& input_image,
const webrtc::CodecSpecificInfo* /* codec_specific_info */,
const std::vector<webrtc::VideoFrameType>* frame_types) override;
int32_t RegisterEncodeCompleteCallback(
@@ -116,7 +116,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
// (makes it easier to reason about thread-safety).
int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps);
int32_t EncodeOnCodecThread(
const webrtc::I420VideoFrame& input_image,
const webrtc::VideoFrame& input_image,
const std::vector<webrtc::VideoFrameType>* frame_types);
int32_t RegisterEncodeCompleteCallbackOnCodecThread(
webrtc::EncodedImageCallback* callback);
@@ -309,7 +309,7 @@ int32_t MediaCodecVideoEncoder::InitEncode(
}
int32_t MediaCodecVideoEncoder::Encode(
const webrtc::I420VideoFrame& frame,
const webrtc::VideoFrame& frame,
const webrtc::CodecSpecificInfo* /* codec_specific_info */,
const std::vector<webrtc::VideoFrameType>* frame_types) {
return codec_thread_->Invoke<int32_t>(Bind(
@@ -471,7 +471,7 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
}
int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
const webrtc::I420VideoFrame& frame,
const webrtc::VideoFrame& frame,
const std::vector<webrtc::VideoFrameType>* frame_types) {
CheckOnCodecThread();
JNIEnv* jni = AttachCurrentThreadIfNeeded();
@@ -493,9 +493,9 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
}
CHECK(frame_types->size() == 1) << "Unexpected stream count";
const I420VideoFrame& input_frame =
(scale_ && codecType_ == kVideoCodecVP8) ?
quality_scaler_->GetScaledFrame(frame) : frame;
const VideoFrame& input_frame = (scale_ && codecType_ == kVideoCodecVP8)
? quality_scaler_->GetScaledFrame(frame)
: frame;
if (input_frame.width() != width_ || input_frame.height() != height_) {
ALOGD("Frame resolution change from %d x %d to %d x %d",

View File

@@ -108,7 +108,7 @@ int FakeVideoSendStream::GetLastHeight() const {
}
void FakeVideoSendStream::IncomingCapturedFrame(
const webrtc::I420VideoFrame& frame) {
const webrtc::VideoFrame& frame) {
++num_swapped_frames_;
last_frame_.ShallowCopy(frame);
}
@@ -166,7 +166,7 @@ bool FakeVideoReceiveStream::IsReceiving() const {
return receiving_;
}
void FakeVideoReceiveStream::InjectFrame(const webrtc::I420VideoFrame& frame,
void FakeVideoReceiveStream::InjectFrame(const webrtc::VideoFrame& frame,
int time_to_render_ms) {
config_.renderer->RenderFrame(frame, time_to_render_ms);
}

View File

@@ -71,7 +71,7 @@ class FakeVideoSendStream : public webrtc::VideoSendStream,
void SetStats(const webrtc::VideoSendStream::Stats& stats);
private:
void IncomingCapturedFrame(const webrtc::I420VideoFrame& frame) override;
void IncomingCapturedFrame(const webrtc::VideoFrame& frame) override;
webrtc::VideoSendStream::Stats GetStats() override;
bool ReconfigureVideoEncoder(
@@ -91,7 +91,7 @@ class FakeVideoSendStream : public webrtc::VideoSendStream,
webrtc::VideoCodecVP9 vp9;
} vpx_settings_;
int num_swapped_frames_;
webrtc::I420VideoFrame last_frame_;
webrtc::VideoFrame last_frame_;
webrtc::VideoSendStream::Stats stats_;
};
@@ -104,7 +104,7 @@ class FakeVideoReceiveStream : public webrtc::VideoReceiveStream {
bool IsReceiving() const;
void InjectFrame(const webrtc::I420VideoFrame& frame, int time_to_render_ms);
void InjectFrame(const webrtc::VideoFrame& frame, int time_to_render_ms);
void SetStats(const webrtc::VideoReceiveStream::Stats& stats);

View File

@@ -109,7 +109,7 @@ class FakeWebRtcVideoCaptureModule : public webrtc::VideoCaptureModule {
bool SendFrame(int w, int h) {
if (!running_) return false;
webrtc::I420VideoFrame sample;
webrtc::VideoFrame sample;
// Setting stride based on width.
if (sample.CreateEmptyFrame(w, h, w, (w + 1) / 2, (w + 1) / 2) < 0) {
return false;

View File

@@ -159,10 +159,9 @@ class FakeWebRtcVideoEncoder : public webrtc::VideoEncoder {
return codec_settings_;
}
virtual int32 Encode(
const webrtc::I420VideoFrame& inputImage,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
const std::vector<webrtc::VideoFrameType>* frame_types) {
virtual int32 Encode(const webrtc::VideoFrame& inputImage,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
const std::vector<webrtc::VideoFrameType>* frame_types) {
rtc::CritScope lock(&crit_);
++num_frames_encoded_;
return WEBRTC_VIDEO_CODEC_OK;

View File

@@ -41,7 +41,7 @@ class PassthroughStream: public webrtc::VideoRenderCallback {
virtual ~PassthroughStream() {
}
virtual int32_t RenderFrame(const uint32_t stream_id,
const webrtc::I420VideoFrame& videoFrame) {
const webrtc::VideoFrame& videoFrame) {
rtc::CritScope cs(&stream_critical_);
// Send frame for rendering directly
if (running_ && renderer_) {

View File

@@ -161,12 +161,12 @@ class WebRtcPassthroughRender : public webrtc::VideoRender {
}
int32_t SetStartImage(const uint32_t stream_id,
const webrtc::I420VideoFrame& videoFrame) override {
const webrtc::VideoFrame& videoFrame) override {
return -1;
}
int32_t SetTimeoutImage(const uint32_t stream_id,
const webrtc::I420VideoFrame& videoFrame,
const webrtc::VideoFrame& videoFrame,
const uint32_t timeout) override {
return -1;
}

View File

@@ -44,7 +44,7 @@ class WebRtcPassthroughRenderTest : public testing::Test {
}
virtual int32_t RenderFrame(const uint32_t stream_id,
const webrtc::I420VideoFrame& videoFrame) {
const webrtc::VideoFrame& videoFrame) {
++frame_num_;
LOG(INFO) << "RenderFrame stream_id: " << stream_id
<< " frame_num: " << frame_num_;
@@ -121,7 +121,7 @@ TEST_F(WebRtcPassthroughRenderTest, Streams) {
}
TEST_F(WebRtcPassthroughRenderTest, Renderer) {
webrtc::I420VideoFrame frame;
webrtc::VideoFrame frame;
const int stream_id1 = 1234;
const int stream_id2 = 5678;
const int stream_id3 = 9012; // A stream that doesn't exist.

View File

@@ -370,7 +370,7 @@ bool WebRtcVideoCapturer::GetPreferredFourccs(
void WebRtcVideoCapturer::OnIncomingCapturedFrame(
const int32_t id,
const webrtc::I420VideoFrame& sample) {
const webrtc::VideoFrame& sample) {
// This can only happen between Start() and Stop().
DCHECK(start_thread_);
DCHECK(async_invoker_);
@@ -396,7 +396,7 @@ void WebRtcVideoCapturer::OnCaptureDelayChanged(const int32_t id,
}
void WebRtcVideoCapturer::SignalFrameCapturedOnStartThread(
const webrtc::I420VideoFrame frame) {
const webrtc::VideoFrame frame) {
// This can only happen between Start() and Stop().
DCHECK(start_thread_);
DCHECK(start_thread_->IsCurrent());
@@ -424,7 +424,7 @@ void WebRtcVideoCapturer::SignalFrameCapturedOnStartThread(
}
// WebRtcCapturedFrame
WebRtcCapturedFrame::WebRtcCapturedFrame(const webrtc::I420VideoFrame& sample,
WebRtcCapturedFrame::WebRtcCapturedFrame(const webrtc::VideoFrame& sample,
void* buffer,
size_t length) {
width = sample.width();

View File

@@ -82,7 +82,7 @@ class WebRtcVideoCapturer : public VideoCapturer,
private:
// Callback when a frame is captured by camera.
virtual void OnIncomingCapturedFrame(const int32_t id,
const webrtc::I420VideoFrame& frame);
const webrtc::VideoFrame& frame);
virtual void OnCaptureDelayChanged(const int32_t id,
const int32_t delay);
@@ -92,7 +92,7 @@ class WebRtcVideoCapturer : public VideoCapturer,
// directly from OnIncomingCapturedFrame.
// TODO(tommi): Remove this workaround when we've updated the WebRTC capturers
// to follow the same contract.
void SignalFrameCapturedOnStartThread(const webrtc::I420VideoFrame frame);
void SignalFrameCapturedOnStartThread(const webrtc::VideoFrame frame);
rtc::scoped_ptr<WebRtcVcmFactoryInterface> factory_;
webrtc::VideoCaptureModule* module_;
@@ -105,8 +105,9 @@ class WebRtcVideoCapturer : public VideoCapturer,
struct WebRtcCapturedFrame : public CapturedFrame {
public:
WebRtcCapturedFrame(const webrtc::I420VideoFrame& frame,
void* buffer, size_t length);
WebRtcCapturedFrame(const webrtc::VideoFrame& frame,
void* buffer,
size_t length);
};
} // namespace cricket

View File

@@ -1679,7 +1679,7 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::~WebRtcVideoSendStream() {
DestroyVideoEncoder(&allocated_encoder_);
}
static void CreateBlackFrame(webrtc::I420VideoFrame* video_frame,
static void CreateBlackFrame(webrtc::VideoFrame* video_frame,
int width,
int height) {
video_frame->CreateEmptyFrame(width, height, width, (width + 1) / 2,
@@ -1696,8 +1696,8 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::InputFrame(
VideoCapturer* capturer,
const VideoFrame* frame) {
TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::InputFrame");
webrtc::I420VideoFrame video_frame(frame->GetVideoFrameBuffer(), 0, 0,
frame->GetVideoRotation());
webrtc::VideoFrame video_frame(frame->GetVideoFrameBuffer(), 0, 0,
frame->GetVideoRotation());
rtc::CritScope cs(&lock_);
if (stream_ == NULL) {
// Frame input before send codecs are configured, dropping frame.
@@ -1744,7 +1744,7 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer(
if (capturer == NULL) {
if (stream_ != NULL) {
LOG(LS_VERBOSE) << "Disabling capturer, sending black frame.";
webrtc::I420VideoFrame black_frame;
webrtc::VideoFrame black_frame;
CreateBlackFrame(&black_frame, last_dimensions_.width,
last_dimensions_.height);
@@ -2368,7 +2368,7 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::ClearDecoders(
}
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RenderFrame(
const webrtc::I420VideoFrame& frame,
const webrtc::VideoFrame& frame,
int time_to_render_ms) {
rtc::CritScope crit(&renderer_lock_);

View File

@@ -416,7 +416,7 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
void SetRecvCodecs(const std::vector<VideoCodecSettings>& recv_codecs);
void SetRtpExtensions(const std::vector<webrtc::RtpExtension>& extensions);
void RenderFrame(const webrtc::I420VideoFrame& frame,
void RenderFrame(const webrtc::VideoFrame& frame,
int time_to_render_ms) override;
bool IsTextureSupported() const override;
bool IsDefaultStream() const;

View File

@@ -74,7 +74,7 @@ void VerifyCodecHasDefaultFeedbackParams(const cricket::VideoCodec& codec) {
cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir)));
}
static void CreateBlackFrame(webrtc::I420VideoFrame* video_frame,
static void CreateBlackFrame(webrtc::VideoFrame* video_frame,
int width,
int height) {
video_frame->CreateEmptyFrame(
@@ -1727,7 +1727,7 @@ TEST_F(WebRtcVideoChannel2Test, EstimatesNtpStartTimeAndElapsedTimeCorrectly) {
EXPECT_TRUE(channel_->SetRenderer(last_ssrc_, &renderer));
EXPECT_TRUE(channel_->SetRender(true));
webrtc::I420VideoFrame video_frame;
webrtc::VideoFrame video_frame;
CreateBlackFrame(&video_frame, 4, 4);
video_frame.set_timestamp(kInitialTimestamp);
// Initial NTP time is not available on the first frame, but should still be

View File

@@ -18,7 +18,6 @@ config("common_video_config") {
source_set("common_video") {
sources = [
"i420_buffer_pool.cc",
"i420_video_frame.cc",
"incoming_video_stream.cc",
"interface/i420_buffer_pool.h",
"interface/incoming_video_stream.h",
@@ -27,6 +26,7 @@ source_set("common_video") {
"libyuv/include/webrtc_libyuv.h",
"libyuv/scaler.cc",
"libyuv/webrtc_libyuv.cc",
"video_frame.cc",
"video_frame_buffer.cc",
"video_render_frames.cc",
"video_render_frames.h",

View File

@@ -40,7 +40,7 @@
],
'sources': [
'i420_buffer_pool.cc',
'i420_video_frame.cc',
'video_frame.cc',
'incoming_video_stream.cc',
'interface/i420_buffer_pool.h',
'interface/incoming_video_stream.h',

View File

@@ -35,25 +35,24 @@ bool EqualPlane(const uint8_t* data1,
int stride,
int width,
int height);
bool EqualFrames(const I420VideoFrame& frame1, const I420VideoFrame& frame2);
bool EqualTextureFrames(const I420VideoFrame& frame1,
const I420VideoFrame& frame2);
bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2);
bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2);
int ExpectedSize(int plane_stride, int image_height, PlaneType type);
TEST(TestI420VideoFrame, InitialValues) {
I420VideoFrame frame;
TEST(TestVideoFrame, InitialValues) {
VideoFrame frame;
EXPECT_TRUE(frame.IsZeroSize());
EXPECT_EQ(kVideoRotation_0, frame.rotation());
}
TEST(TestI420VideoFrame, CopiesInitialFrameWithoutCrashing) {
I420VideoFrame frame;
I420VideoFrame frame2;
TEST(TestVideoFrame, CopiesInitialFrameWithoutCrashing) {
VideoFrame frame;
VideoFrame frame2;
frame2.CopyFrame(frame);
}
TEST(TestI420VideoFrame, WidthHeightValues) {
I420VideoFrame frame;
TEST(TestVideoFrame, WidthHeightValues) {
VideoFrame frame;
const int valid_value = 10;
EXPECT_EQ(0, frame.CreateEmptyFrame(10, 10, 10, 14, 90));
EXPECT_EQ(valid_value, frame.width());
@@ -66,8 +65,8 @@ TEST(TestI420VideoFrame, WidthHeightValues) {
EXPECT_EQ(789, frame.render_time_ms());
}
TEST(TestI420VideoFrame, SizeAllocation) {
I420VideoFrame frame;
TEST(TestVideoFrame, SizeAllocation) {
VideoFrame frame;
EXPECT_EQ(0, frame. CreateEmptyFrame(10, 10, 12, 14, 220));
int height = frame.height();
int stride_y = frame.stride(kYPlane);
@@ -82,7 +81,7 @@ TEST(TestI420VideoFrame, SizeAllocation) {
frame.allocated_size(kVPlane));
}
TEST(TestI420VideoFrame, CopyFrame) {
TEST(TestVideoFrame, CopyFrame) {
uint32_t timestamp = 1;
int64_t ntp_time_ms = 2;
int64_t render_time_ms = 3;
@@ -92,7 +91,7 @@ TEST(TestI420VideoFrame, CopyFrame) {
int width = 15;
int height = 15;
// Copy frame.
I420VideoFrame small_frame;
VideoFrame small_frame;
EXPECT_EQ(0, small_frame.CreateEmptyFrame(width, height,
stride_y, stride_u, stride_v));
small_frame.set_timestamp(timestamp);
@@ -108,7 +107,7 @@ TEST(TestI420VideoFrame, CopyFrame) {
memset(buffer_y, 16, kSizeY);
memset(buffer_u, 8, kSizeU);
memset(buffer_v, 4, kSizeV);
I420VideoFrame big_frame;
VideoFrame big_frame;
EXPECT_EQ(0,
big_frame.CreateFrame(buffer_y, buffer_u, buffer_v,
width + 5, height + 5, stride_y + 5,
@@ -128,7 +127,7 @@ TEST(TestI420VideoFrame, CopyFrame) {
EXPECT_TRUE(EqualFrames(small_frame, big_frame));
}
TEST(TestI420VideoFrame, ShallowCopy) {
TEST(TestVideoFrame, ShallowCopy) {
uint32_t timestamp = 1;
int64_t ntp_time_ms = 2;
int64_t render_time_ms = 3;
@@ -148,18 +147,18 @@ TEST(TestI420VideoFrame, ShallowCopy) {
memset(buffer_y, 16, kSizeY);
memset(buffer_u, 8, kSizeU);
memset(buffer_v, 4, kSizeV);
I420VideoFrame frame1;
VideoFrame frame1;
EXPECT_EQ(0, frame1.CreateFrame(buffer_y, buffer_u, buffer_v, width, height,
stride_y, stride_u, stride_v, kRotation));
frame1.set_timestamp(timestamp);
frame1.set_ntp_time_ms(ntp_time_ms);
frame1.set_render_time_ms(render_time_ms);
I420VideoFrame frame2;
VideoFrame frame2;
frame2.ShallowCopy(frame1);
// To be able to access the buffers, we need const pointers to the frames.
const I420VideoFrame* const_frame1_ptr = &frame1;
const I420VideoFrame* const_frame2_ptr = &frame2;
const VideoFrame* const_frame1_ptr = &frame1;
const VideoFrame* const_frame2_ptr = &frame2;
EXPECT_TRUE(const_frame1_ptr->buffer(kYPlane) ==
const_frame2_ptr->buffer(kYPlane));
@@ -184,8 +183,8 @@ TEST(TestI420VideoFrame, ShallowCopy) {
EXPECT_NE(frame2.rotation(), frame1.rotation());
}
TEST(TestI420VideoFrame, Reset) {
I420VideoFrame frame;
TEST(TestVideoFrame, Reset) {
VideoFrame frame;
ASSERT_TRUE(frame.CreateEmptyFrame(5, 5, 5, 5, 5) == 0);
frame.set_ntp_time_ms(1);
frame.set_timestamp(2);
@@ -199,8 +198,8 @@ TEST(TestI420VideoFrame, Reset) {
EXPECT_TRUE(frame.video_frame_buffer() == NULL);
}
TEST(TestI420VideoFrame, CopyBuffer) {
I420VideoFrame frame1, frame2;
TEST(TestVideoFrame, CopyBuffer) {
VideoFrame frame1, frame2;
int width = 15;
int height = 15;
int stride_y = 15;
@@ -228,8 +227,8 @@ TEST(TestI420VideoFrame, CopyBuffer) {
EXPECT_LE(kSizeUv, frame2.allocated_size(kVPlane));
}
TEST(TestI420VideoFrame, ReuseAllocation) {
I420VideoFrame frame;
TEST(TestVideoFrame, ReuseAllocation) {
VideoFrame frame;
frame.CreateEmptyFrame(640, 320, 640, 320, 320);
const uint8_t* y = frame.buffer(kYPlane);
const uint8_t* u = frame.buffer(kUPlane);
@@ -240,24 +239,24 @@ TEST(TestI420VideoFrame, ReuseAllocation) {
EXPECT_EQ(v, frame.buffer(kVPlane));
}
TEST(TestI420VideoFrame, FailToReuseAllocation) {
I420VideoFrame frame1;
TEST(TestVideoFrame, FailToReuseAllocation) {
VideoFrame frame1;
frame1.CreateEmptyFrame(640, 320, 640, 320, 320);
const uint8_t* y = frame1.buffer(kYPlane);
const uint8_t* u = frame1.buffer(kUPlane);
const uint8_t* v = frame1.buffer(kVPlane);
// Make a shallow copy of |frame1|.
I420VideoFrame frame2(frame1.video_frame_buffer(), 0, 0, kVideoRotation_0);
VideoFrame frame2(frame1.video_frame_buffer(), 0, 0, kVideoRotation_0);
frame1.CreateEmptyFrame(640, 320, 640, 320, 320);
EXPECT_NE(y, frame1.buffer(kYPlane));
EXPECT_NE(u, frame1.buffer(kUPlane));
EXPECT_NE(v, frame1.buffer(kVPlane));
}
TEST(TestI420VideoFrame, TextureInitialValues) {
TEST(TestVideoFrame, TextureInitialValues) {
NativeHandleImpl handle;
I420VideoFrame frame(&handle, 640, 480, 100, 10, webrtc::kVideoRotation_0,
rtc::Callback0<void>());
VideoFrame frame(&handle, 640, 480, 100, 10, webrtc::kVideoRotation_0,
rtc::Callback0<void>());
EXPECT_EQ(640, frame.width());
EXPECT_EQ(480, frame.height());
EXPECT_EQ(100u, frame.timestamp());
@@ -270,12 +269,12 @@ TEST(TestI420VideoFrame, TextureInitialValues) {
EXPECT_EQ(20, frame.render_time_ms());
}
TEST(TestI420VideoFrame, NoLongerNeeded) {
TEST(TestVideoFrame, NoLongerNeeded) {
NativeHandleImpl handle;
ASSERT_FALSE(handle.no_longer_needed());
I420VideoFrame* frame = new I420VideoFrame(
&handle, 640, 480, 100, 200, webrtc::kVideoRotation_0,
rtc::Bind(&NativeHandleImpl::SetNoLongerNeeded, &handle));
VideoFrame* frame =
new VideoFrame(&handle, 640, 480, 100, 200, webrtc::kVideoRotation_0,
rtc::Bind(&NativeHandleImpl::SetNoLongerNeeded, &handle));
EXPECT_FALSE(handle.no_longer_needed());
delete frame;
EXPECT_TRUE(handle.no_longer_needed());
@@ -295,7 +294,7 @@ bool EqualPlane(const uint8_t* data1,
return true;
}
bool EqualFrames(const I420VideoFrame& frame1, const I420VideoFrame& frame2) {
bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) {
if ((frame1.width() != frame2.width()) ||
(frame1.height() != frame2.height()) ||
(frame1.stride(kYPlane) != frame2.stride(kYPlane)) ||
@@ -316,8 +315,7 @@ bool EqualFrames(const I420VideoFrame& frame1, const I420VideoFrame& frame2) {
frame1.stride(kVPlane), half_width, half_height);
}
bool EqualTextureFrames(const I420VideoFrame& frame1,
const I420VideoFrame& frame2) {
bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2) {
return ((frame1.native_handle() == frame2.native_handle()) &&
(frame1.width() == frame2.width()) &&
(frame1.height() == frame2.height()) &&

View File

@@ -62,7 +62,7 @@ VideoRenderCallback* IncomingVideoStream::ModuleCallback() {
}
int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id,
const I420VideoFrame& video_frame) {
const VideoFrame& video_frame) {
CriticalSectionScoped csS(stream_critsect_.get());
if (!running_) {
@@ -88,14 +88,13 @@ int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id,
return 0;
}
int32_t IncomingVideoStream::SetStartImage(
const I420VideoFrame& video_frame) {
int32_t IncomingVideoStream::SetStartImage(const VideoFrame& video_frame) {
CriticalSectionScoped csS(thread_critsect_.get());
return start_image_.CopyFrame(video_frame);
}
int32_t IncomingVideoStream::SetTimeoutImage(
const I420VideoFrame& video_frame, const uint32_t timeout) {
int32_t IncomingVideoStream::SetTimeoutImage(const VideoFrame& video_frame,
const uint32_t timeout) {
CriticalSectionScoped csS(thread_critsect_.get());
timeout_time_ = timeout;
return timeout_image_.CopyFrame(video_frame);
@@ -207,7 +206,7 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
return false;
}
// Get a new frame to render and the time for the frame after this one.
I420VideoFrame frame_to_render;
VideoFrame frame_to_render;
uint32_t wait_time;
{
CriticalSectionScoped cs(buffer_critsect_.get());

View File

@@ -23,7 +23,8 @@ class ThreadWrapper;
class VideoRenderCallback {
public:
virtual int32_t RenderFrame(const uint32_t streamId,
const I420VideoFrame& videoFrame) = 0;
const VideoFrame& videoFrame) = 0;
protected:
virtual ~VideoRenderCallback() {}
};
@@ -36,7 +37,7 @@ class IncomingVideoStream : public VideoRenderCallback {
// Get callback to deliver frames to the module.
VideoRenderCallback* ModuleCallback();
virtual int32_t RenderFrame(const uint32_t stream_id,
const I420VideoFrame& video_frame);
const VideoFrame& video_frame);
// Set callback to the platform dependent code.
void SetRenderCallback(VideoRenderCallback* render_callback);
@@ -55,9 +56,9 @@ class IncomingVideoStream : public VideoRenderCallback {
uint32_t StreamId() const;
uint32_t IncomingRate() const;
int32_t SetStartImage(const I420VideoFrame& video_frame);
int32_t SetStartImage(const VideoFrame& video_frame);
int32_t SetTimeoutImage(const I420VideoFrame& video_frame,
int32_t SetTimeoutImage(const VideoFrame& video_frame,
const uint32_t timeout);
int32_t SetExpectedRenderDelay(int32_t delay_ms);
@@ -90,9 +91,9 @@ class IncomingVideoStream : public VideoRenderCallback {
int64_t last_rate_calculation_time_ms_ GUARDED_BY(stream_critsect_);
uint16_t num_frames_since_last_calculation_ GUARDED_BY(stream_critsect_);
int64_t last_render_time_ms_ GUARDED_BY(thread_critsect_);
I420VideoFrame temp_frame_ GUARDED_BY(thread_critsect_);
I420VideoFrame start_image_ GUARDED_BY(thread_critsect_);
I420VideoFrame timeout_image_ GUARDED_BY(thread_critsect_);
VideoFrame temp_frame_ GUARDED_BY(thread_critsect_);
VideoFrame start_image_ GUARDED_BY(thread_critsect_);
VideoFrame timeout_image_ GUARDED_BY(thread_critsect_);
uint32_t timeout_time_ GUARDED_BY(thread_critsect_);
};

View File

@@ -48,8 +48,7 @@ class Scaler {
// Return value: 0 - OK,
// -1 - parameter error
// -2 - scaler not set
int Scale(const I420VideoFrame& src_frame,
I420VideoFrame* dst_frame);
int Scale(const VideoFrame& src_frame, VideoFrame* dst_frame);
private:
// Determine if the VideoTypes are currently supported.

View File

@@ -77,23 +77,22 @@ void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv);
size_t CalcBufferSize(VideoType type, int width, int height);
// TODO(mikhal): Add unit test for these two functions and determine location.
// Print I420VideoFrame to file
// Print VideoFrame to file
// Input:
// - frame : Reference to video frame.
// - file : pointer to file object. It is assumed that the file is
// already open for writing.
// Return value: 0 if OK, < 0 otherwise.
int PrintI420VideoFrame(const I420VideoFrame& frame, FILE* file);
int PrintVideoFrame(const VideoFrame& frame, FILE* file);
// Extract buffer from I420VideoFrame (consecutive planes, no stride)
// Extract buffer from VideoFrame (consecutive planes, no stride)
// Input:
// - frame : Reference to video frame.
// - size : pointer to the size of the allocated buffer. If size is
// insufficient, an error will be returned.
// - buffer : Pointer to buffer
// Return value: length of buffer if OK, < 0 otherwise.
int ExtractBuffer(const I420VideoFrame& input_frame,
size_t size, uint8_t* buffer);
int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer);
// Convert To I420
// Input:
// - src_video_type : Type of input video.
@@ -115,7 +114,7 @@ int ConvertToI420(VideoType src_video_type,
int src_height,
size_t sample_size,
VideoRotation rotation,
I420VideoFrame* dst_frame);
VideoFrame* dst_frame);
// Convert From I420
// Input:
@@ -125,13 +124,15 @@ int ConvertToI420(VideoType src_video_type,
// - dst_frame : Pointer to a destination frame.
// Return value: 0 if OK, < 0 otherwise.
// It is assumed that source and destination have equal height.
int ConvertFromI420(const I420VideoFrame& src_frame,
VideoType dst_video_type, int dst_sample_size,
int ConvertFromI420(const VideoFrame& src_frame,
VideoType dst_video_type,
int dst_sample_size,
uint8_t* dst_frame);
// ConvertFrom YV12.
// Interface - same as above.
int ConvertFromYV12(const I420VideoFrame& src_frame,
VideoType dst_video_type, int dst_sample_size,
int ConvertFromYV12(const VideoFrame& src_frame,
VideoType dst_video_type,
int dst_sample_size,
uint8_t* dst_frame);
// The following list describes designated conversion functions which
@@ -148,11 +149,9 @@ int ConvertNV12ToRGB565(const uint8_t* src_frame,
// Compute PSNR for an I420 frame (all planes).
// Returns the PSNR in decibel, to a maximum of kInfinitePSNR.
double I420PSNR(const I420VideoFrame* ref_frame,
const I420VideoFrame* test_frame);
double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame);
// Compute SSIM for an I420 frame (all planes).
double I420SSIM(const I420VideoFrame* ref_frame,
const I420VideoFrame* test_frame);
double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame);
}
#endif // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_

View File

@@ -37,8 +37,7 @@ int PrintBuffer(const uint8_t* buffer, int width, int height, int stride) {
return 0;
}
int PrintFrame(const I420VideoFrame* frame, const char* str) {
int PrintFrame(const VideoFrame* frame, const char* str) {
if (frame == NULL)
return -1;
printf("%s %dx%d \n", str, frame->width(), frame->height());
@@ -57,7 +56,7 @@ int PrintFrame(const I420VideoFrame* frame, const char* str) {
// Create an image from on a YUV frame. Every plane value starts with a start
// value, and will be set to increasing values.
void CreateImage(I420VideoFrame* frame, int plane_offset[kNumOfPlanes]) {
void CreateImage(VideoFrame* frame, int plane_offset[kNumOfPlanes]) {
if (frame == NULL)
return;
for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) {
@@ -83,7 +82,7 @@ class TestLibYuv : public ::testing::Test {
virtual void TearDown();
FILE* source_file_;
I420VideoFrame orig_frame_;
VideoFrame orig_frame_;
rtc::scoped_ptr<uint8_t[]> orig_buffer_;
const int width_;
const int height_;
@@ -142,7 +141,7 @@ TEST_F(TestLibYuv, ConvertTest) {
double psnr = 0.0;
I420VideoFrame res_i420_frame;
VideoFrame res_i420_frame;
EXPECT_EQ(0,res_i420_frame.CreateEmptyFrame(width_, height_, width_,
(width_ + 1) / 2,
(width_ + 1) / 2));
@@ -153,7 +152,7 @@ TEST_F(TestLibYuv, ConvertTest) {
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, &res_i420_frame));
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
return;
}
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
@@ -173,7 +172,7 @@ TEST_F(TestLibYuv, ConvertTest) {
EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, &res_i420_frame));
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
return;
}
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
@@ -189,7 +188,7 @@ TEST_F(TestLibYuv, ConvertTest) {
height_, 0, kVideoRotation_0, &res_i420_frame));
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
EXPECT_EQ(48.0, psnr);
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
return;
}
j++;
@@ -197,7 +196,7 @@ TEST_F(TestLibYuv, ConvertTest) {
printf("\nConvert #%d I420 <-> YV12\n", j);
rtc::scoped_ptr<uint8_t[]> outYV120Buffer(new uint8_t[frame_length_]);
rtc::scoped_ptr<uint8_t[]> res_i420_buffer(new uint8_t[frame_length_]);
I420VideoFrame yv12_frame;
VideoFrame yv12_frame;
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kYV12, 0, outYV120Buffer.get()));
yv12_frame.CreateFrame(outYV120Buffer.get(),
outYV120Buffer.get() + size_y_,
@@ -223,7 +222,7 @@ TEST_F(TestLibYuv, ConvertTest) {
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, &res_i420_frame));
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
return;
}
@@ -238,7 +237,7 @@ TEST_F(TestLibYuv, ConvertTest) {
EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, &res_i420_frame));
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
return;
}
j++;
@@ -258,7 +257,7 @@ TEST_F(TestLibYuv, ConvertTest) {
EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, &res_i420_frame));
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
return;
}
@@ -278,7 +277,7 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) {
double psnr = 0.0;
I420VideoFrame res_i420_frame;
VideoFrame res_i420_frame;
int stride_y = 0;
int stride_uv = 0;
Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
@@ -290,7 +289,7 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) {
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0, &res_i420_frame));
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
return;
}
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
@@ -301,7 +300,7 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) {
TEST_F(TestLibYuv, RotateTest) {
// Use ConvertToI420 for multiple roatations - see that nothing breaks, all
// memory is properly allocated and end result is equal to the starting point.
I420VideoFrame rotated_res_i420_frame;
VideoFrame rotated_res_i420_frame;
int rotated_width = height_;
int rotated_height = width_;
int stride_y ;

View File

@@ -47,8 +47,7 @@ int Scaler::Set(int src_width, int src_height,
return 0;
}
int Scaler::Scale(const I420VideoFrame& src_frame,
I420VideoFrame* dst_frame) {
int Scaler::Scale(const VideoFrame& src_frame, VideoFrame* dst_frame) {
assert(dst_frame);
if (src_frame.IsZeroSize())
return -1;

View File

@@ -37,7 +37,7 @@ class TestScaler : public ::testing::Test {
Scaler test_scaler_;
FILE* source_file_;
I420VideoFrame test_frame_;
VideoFrame test_frame_;
const int width_;
const int half_width_;
const int height_;
@@ -88,7 +88,7 @@ TEST_F(TestScaler, ScaleBadInitialValues) {
}
TEST_F(TestScaler, ScaleSendingNullSourcePointer) {
I420VideoFrame null_src_frame;
VideoFrame null_src_frame;
EXPECT_EQ(-1, test_scaler_.Scale(null_src_frame, &test_frame_));
}
@@ -98,7 +98,7 @@ TEST_F(TestScaler, ScaleSendingBufferTooSmall) {
half_width_, half_height_,
kI420, kI420,
kScalePoint));
I420VideoFrame test_frame2;
VideoFrame test_frame2;
rtc::scoped_ptr<uint8_t[]> orig_buffer(new uint8_t[frame_length_]);
EXPECT_GT(fread(orig_buffer.get(), 1, frame_length_, source_file_), 0U);
test_frame_.CreateFrame(orig_buffer.get(),
@@ -296,7 +296,7 @@ double TestScaler::ComputeAvgSequencePSNR(FILE* input_file,
int frame_count = 0;
double avg_psnr = 0;
I420VideoFrame in_frame, out_frame;
VideoFrame in_frame, out_frame;
const int half_width = (width + 1) / 2;
in_frame.CreateEmptyFrame(width, height, width, half_width, half_width);
out_frame.CreateEmptyFrame(width, height, width, half_width, half_width);
@@ -337,8 +337,8 @@ void TestScaler::ScaleSequence(ScaleMethod method,
rewind(source_file);
I420VideoFrame input_frame;
I420VideoFrame output_frame;
VideoFrame input_frame;
VideoFrame output_frame;
int64_t start_clock, total_clock;
total_clock = 0;
int frame_count = 0;
@@ -363,7 +363,7 @@ void TestScaler::ScaleSequence(ScaleMethod method,
start_clock = TickTime::MillisecondTimestamp();
EXPECT_EQ(0, test_scaler_.Scale(input_frame, &output_frame));
total_clock += TickTime::MillisecondTimestamp() - start_clock;
if (PrintI420VideoFrame(output_frame, output_file) < 0) {
if (PrintVideoFrame(output_frame, output_file) < 0) {
return;
}
frame_count++;

View File

@@ -102,7 +102,7 @@ size_t CalcBufferSize(VideoType type, int width, int height) {
return buffer_size;
}
int PrintI420VideoFrame(const I420VideoFrame& frame, FILE* file) {
int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
if (file == NULL)
return -1;
if (frame.IsZeroSize())
@@ -123,8 +123,7 @@ int PrintI420VideoFrame(const I420VideoFrame& frame, FILE* file) {
return 0;
}
int ExtractBuffer(const I420VideoFrame& input_frame,
size_t size, uint8_t* buffer) {
int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer) {
assert(buffer);
if (input_frame.IsZeroSize())
return -1;
@@ -237,7 +236,7 @@ int ConvertToI420(VideoType src_video_type,
int src_height,
size_t sample_size,
VideoRotation rotation,
I420VideoFrame* dst_frame) {
VideoFrame* dst_frame) {
int dst_width = dst_frame->width();
int dst_height = dst_frame->height();
// LibYuv expects pre-rotation values for dst.
@@ -260,8 +259,9 @@ int ConvertToI420(VideoType src_video_type,
ConvertVideoType(src_video_type));
}
int ConvertFromI420(const I420VideoFrame& src_frame,
VideoType dst_video_type, int dst_sample_size,
int ConvertFromI420(const VideoFrame& src_frame,
VideoType dst_video_type,
int dst_sample_size,
uint8_t* dst_frame) {
return libyuv::ConvertFromI420(src_frame.buffer(kYPlane),
src_frame.stride(kYPlane),
@@ -275,8 +275,9 @@ int ConvertFromI420(const I420VideoFrame& src_frame,
}
// TODO(mikhal): Create a designated VideoFrame for non I420.
int ConvertFromYV12(const I420VideoFrame& src_frame,
VideoType dst_video_type, int dst_sample_size,
int ConvertFromYV12(const VideoFrame& src_frame,
VideoType dst_video_type,
int dst_sample_size,
uint8_t* dst_frame) {
// YV12 = Y, V, U
return libyuv::ConvertFromI420(src_frame.buffer(kYPlane),
@@ -291,8 +292,7 @@ int ConvertFromYV12(const I420VideoFrame& src_frame,
}
// Compute PSNR for an I420 frame (all planes)
double I420PSNR(const I420VideoFrame* ref_frame,
const I420VideoFrame* test_frame) {
double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame) {
if (!ref_frame || !test_frame)
return -1;
else if ((ref_frame->width() != test_frame->width()) ||
@@ -320,8 +320,7 @@ double I420PSNR(const I420VideoFrame* ref_frame,
}
// Compute SSIM for an I420 frame (all planes)
double I420SSIM(const I420VideoFrame* ref_frame,
const I420VideoFrame* test_frame) {
double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame) {
if (!ref_frame || !test_frame)
return -1;
else if ((ref_frame->width() != test_frame->width()) ||

View File

@@ -17,7 +17,7 @@
namespace webrtc {
// Helper class for I420VideoFrame: Store plane data and perform basic plane
// Helper class for VideoFrame: Store plane data and perform basic plane
// operations.
class Plane {
public:

View File

@@ -19,17 +19,16 @@
namespace webrtc {
I420VideoFrame::I420VideoFrame() {
VideoFrame::VideoFrame() {
// Intentionally using Reset instead of initializer list so that any missed
// fields in Reset will be caught by memory checkers.
Reset();
}
I420VideoFrame::I420VideoFrame(
const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
uint32_t timestamp,
int64_t render_time_ms,
VideoRotation rotation)
VideoFrame::VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
uint32_t timestamp,
int64_t render_time_ms,
VideoRotation rotation)
: video_frame_buffer_(buffer),
timestamp_(timestamp),
ntp_time_ms_(0),
@@ -37,24 +36,27 @@ I420VideoFrame::I420VideoFrame(
rotation_(rotation) {
}
I420VideoFrame::I420VideoFrame(void* native_handle,
int width,
int height,
uint32_t timestamp,
int64_t render_time_ms,
VideoRotation rotation,
const rtc::Callback0<void>& no_longer_used)
: I420VideoFrame(new rtc::RefCountedObject<TextureBuffer>(native_handle,
width,
height,
no_longer_used),
timestamp,
render_time_ms,
rotation) {
VideoFrame::VideoFrame(void* native_handle,
int width,
int height,
uint32_t timestamp,
int64_t render_time_ms,
VideoRotation rotation,
const rtc::Callback0<void>& no_longer_used)
: VideoFrame(new rtc::RefCountedObject<TextureBuffer>(native_handle,
width,
height,
no_longer_used),
timestamp,
render_time_ms,
rotation) {
}
int I420VideoFrame::CreateEmptyFrame(int width, int height,
int stride_y, int stride_u, int stride_v) {
int VideoFrame::CreateEmptyFrame(int width,
int height,
int stride_y,
int stride_u,
int stride_v) {
const int half_width = (width + 1) / 2;
DCHECK_GT(width, 0);
DCHECK_GT(height, 0);
@@ -69,14 +71,11 @@ int I420VideoFrame::CreateEmptyFrame(int width, int height,
rotation_ = kVideoRotation_0;
// Check if it's safe to reuse allocation.
if (video_frame_buffer_ &&
video_frame_buffer_->HasOneRef() &&
if (video_frame_buffer_ && video_frame_buffer_->HasOneRef() &&
!video_frame_buffer_->native_handle() &&
width == video_frame_buffer_->width() &&
height == video_frame_buffer_->height() &&
stride_y == stride(kYPlane) &&
stride_u == stride(kUPlane) &&
stride_v == stride(kVPlane)) {
height == video_frame_buffer_->height() && stride_y == stride(kYPlane) &&
stride_u == stride(kUPlane) && stride_v == stride(kVPlane)) {
return 0;
}
@@ -86,27 +85,27 @@ int I420VideoFrame::CreateEmptyFrame(int width, int height,
return 0;
}
int I420VideoFrame::CreateFrame(const uint8_t* buffer_y,
const uint8_t* buffer_u,
const uint8_t* buffer_v,
int width, int height,
int stride_y,
int stride_u,
int stride_v) {
return CreateFrame(buffer_y, buffer_u, buffer_v,
width, height, stride_y, stride_u, stride_v,
kVideoRotation_0);
int VideoFrame::CreateFrame(const uint8_t* buffer_y,
const uint8_t* buffer_u,
const uint8_t* buffer_v,
int width,
int height,
int stride_y,
int stride_u,
int stride_v) {
return CreateFrame(buffer_y, buffer_u, buffer_v, width, height, stride_y,
stride_u, stride_v, kVideoRotation_0);
}
int I420VideoFrame::CreateFrame(const uint8_t* buffer_y,
const uint8_t* buffer_u,
const uint8_t* buffer_v,
int width,
int height,
int stride_y,
int stride_u,
int stride_v,
VideoRotation rotation) {
int VideoFrame::CreateFrame(const uint8_t* buffer_y,
const uint8_t* buffer_u,
const uint8_t* buffer_v,
int width,
int height,
int stride_y,
int stride_u,
int stride_v,
VideoRotation rotation) {
const int half_height = (height + 1) / 2;
const int expected_size_y = height * stride_y;
const int expected_size_u = half_height * stride_u;
@@ -119,10 +118,10 @@ int I420VideoFrame::CreateFrame(const uint8_t* buffer_y,
return 0;
}
int I420VideoFrame::CreateFrame(const uint8_t* buffer,
int width,
int height,
VideoRotation rotation) {
int VideoFrame::CreateFrame(const uint8_t* buffer,
int width,
int height,
VideoRotation rotation) {
const int stride_y = width;
const int stride_uv = (width + 1) / 2;
@@ -133,7 +132,7 @@ int I420VideoFrame::CreateFrame(const uint8_t* buffer,
stride_uv, stride_uv, rotation);
}
int I420VideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
int VideoFrame::CopyFrame(const VideoFrame& videoFrame) {
if (videoFrame.IsZeroSize()) {
video_frame_buffer_ = nullptr;
} else if (videoFrame.native_handle()) {
@@ -152,7 +151,7 @@ int I420VideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
return 0;
}
void I420VideoFrame::ShallowCopy(const I420VideoFrame& videoFrame) {
void VideoFrame::ShallowCopy(const VideoFrame& videoFrame) {
video_frame_buffer_ = videoFrame.video_frame_buffer();
timestamp_ = videoFrame.timestamp_;
ntp_time_ms_ = videoFrame.ntp_time_ms_;
@@ -160,7 +159,7 @@ void I420VideoFrame::ShallowCopy(const I420VideoFrame& videoFrame) {
rotation_ = videoFrame.rotation_;
}
void I420VideoFrame::Reset() {
void VideoFrame::Reset() {
video_frame_buffer_ = nullptr;
timestamp_ = 0;
ntp_time_ms_ = 0;
@@ -168,47 +167,46 @@ void I420VideoFrame::Reset() {
rotation_ = kVideoRotation_0;
}
uint8_t* I420VideoFrame::buffer(PlaneType type) {
uint8_t* VideoFrame::buffer(PlaneType type) {
return video_frame_buffer_ ? video_frame_buffer_->data(type) : nullptr;
}
const uint8_t* I420VideoFrame::buffer(PlaneType type) const {
const uint8_t* VideoFrame::buffer(PlaneType type) const {
// Const cast to call the correct const-version of data.
const VideoFrameBuffer* const_buffer = video_frame_buffer_.get();
return const_buffer ? const_buffer->data(type) : nullptr;
}
int I420VideoFrame::allocated_size(PlaneType type) const {
int VideoFrame::allocated_size(PlaneType type) const {
const int plane_height = (type == kYPlane) ? height() : (height() + 1) / 2;
return plane_height * stride(type);
}
int I420VideoFrame::stride(PlaneType type) const {
int VideoFrame::stride(PlaneType type) const {
return video_frame_buffer_ ? video_frame_buffer_->stride(type) : 0;
}
int I420VideoFrame::width() const {
int VideoFrame::width() const {
return video_frame_buffer_ ? video_frame_buffer_->width() : 0;
}
int I420VideoFrame::height() const {
int VideoFrame::height() const {
return video_frame_buffer_ ? video_frame_buffer_->height() : 0;
}
bool I420VideoFrame::IsZeroSize() const {
bool VideoFrame::IsZeroSize() const {
return !video_frame_buffer_;
}
void* I420VideoFrame::native_handle() const {
void* VideoFrame::native_handle() const {
return video_frame_buffer_ ? video_frame_buffer_->native_handle() : nullptr;
}
rtc::scoped_refptr<VideoFrameBuffer> I420VideoFrame::video_frame_buffer()
const {
rtc::scoped_refptr<VideoFrameBuffer> VideoFrame::video_frame_buffer() const {
return video_frame_buffer_;
}
void I420VideoFrame::set_video_frame_buffer(
void VideoFrame::set_video_frame_buffer(
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer) {
video_frame_buffer_ = buffer;
}

View File

@@ -26,7 +26,7 @@ VideoRenderFrames::VideoRenderFrames()
: render_delay_ms_(10) {
}
int32_t VideoRenderFrames::AddFrame(const I420VideoFrame& new_frame) {
int32_t VideoRenderFrames::AddFrame(const VideoFrame& new_frame) {
const int64_t time_now = TickTime::MillisecondTimestamp();
// Drop old frames only when there are other frames in the queue, otherwise, a
@@ -53,8 +53,8 @@ int32_t VideoRenderFrames::AddFrame(const I420VideoFrame& new_frame) {
return static_cast<int32_t>(incoming_frames_.size());
}
I420VideoFrame VideoRenderFrames::FrameToRender() {
I420VideoFrame render_frame;
VideoFrame VideoRenderFrames::FrameToRender() {
VideoFrame render_frame;
// Get the newest frame that can be released for rendering.
while (!incoming_frames_.empty() && TimeToNextFrameRelease() <= 0) {
render_frame = incoming_frames_.front();

View File

@@ -25,10 +25,10 @@ class VideoRenderFrames {
VideoRenderFrames();
// Add a frame to the render queue
int32_t AddFrame(const I420VideoFrame& new_frame);
int32_t AddFrame(const VideoFrame& new_frame);
// Get a frame for rendering, or a zero-size frame if it's not time to render.
I420VideoFrame FrameToRender();
VideoFrame FrameToRender();
// Releases all frames
int32_t ReleaseAllFrames();
@@ -48,7 +48,7 @@ class VideoRenderFrames {
enum { KFutureRenderTimestampMS = 10000 };
// Sorted list with framed to be rendered, oldest first.
std::list<I420VideoFrame> incoming_frames_;
std::list<VideoFrame> incoming_frames_;
// Estimated delay from a frame is released until it's rendered.
uint32_t render_delay_ms_;

View File

@@ -17,7 +17,7 @@
namespace webrtc {
class I420VideoFrame;
class VideoFrame;
struct EncodedFrame {
public:
@@ -34,7 +34,7 @@ class I420FrameCallback {
public:
// This function is called with a I420 frame allowing the user to modify the
// frame content.
virtual void FrameCallback(I420VideoFrame* video_frame) = 0;
virtual void FrameCallback(VideoFrame* video_frame) = 0;
protected:
virtual ~I420FrameCallback() {}

View File

@@ -93,15 +93,16 @@ public:
virtual int32_t video_codec_info(VideoCodec& /*videoCodec*/) const
{return -1;}
virtual int32_t GetVideoFromFile(I420VideoFrame& /*videoFrame*/)
{ return -1;}
virtual int32_t GetVideoFromFile(VideoFrame& /*videoFrame*/) { return -1; }
// Same as GetVideoFromFile(). videoFrame will have the resolution specified
// by the width outWidth and height outHeight in pixels.
virtual int32_t GetVideoFromFile(I420VideoFrame& /*videoFrame*/,
virtual int32_t GetVideoFromFile(VideoFrame& /*videoFrame*/,
const uint32_t /*outWidth*/,
const uint32_t /*outHeight*/)
{return -1;}
const uint32_t /*outHeight*/) {
return -1;
}
protected:
virtual ~FilePlayer() {}

View File

@@ -78,8 +78,7 @@ public:
bool videoOnly = false) = 0;
// Record the video frame in videoFrame to AVI file.
virtual int32_t RecordVideoToFile(
const I420VideoFrame& videoFrame) = 0;
virtual int32_t RecordVideoToFile(const VideoFrame& videoFrame) = 0;
protected:
virtual ~FileRecorder() {}

View File

@@ -72,8 +72,7 @@ public:
{
return -1;
}
virtual int32_t RecordVideoToFile(const I420VideoFrame& videoFrame)
{
virtual int32_t RecordVideoToFile(const VideoFrame& videoFrame) {
return -1;
}

View File

@@ -94,8 +94,8 @@ protected:
class VideoCaptureDataCallback
{
public:
virtual void OnIncomingCapturedFrame(const int32_t id,
const I420VideoFrame& videoFrame) = 0;
virtual void OnIncomingCapturedFrame(const int32_t id,
const VideoFrame& videoFrame) = 0;
virtual void OnCaptureDelayChanged(const int32_t id,
const int32_t delay) = 0;
protected:

View File

@@ -64,8 +64,8 @@ static const int kTestWidth = 352;
static const int kTestFramerate = 30;
// Compares the content of two video frames.
static bool CompareFrames(const webrtc::I420VideoFrame& frame1,
const webrtc::I420VideoFrame& frame2) {
static bool CompareFrames(const webrtc::VideoFrame& frame1,
const webrtc::VideoFrame& frame2) {
bool result =
(frame1.stride(webrtc::kYPlane) == frame2.stride(webrtc::kYPlane)) &&
(frame1.stride(webrtc::kUPlane) == frame2.stride(webrtc::kUPlane)) &&
@@ -104,9 +104,8 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
printf("No of timing warnings %d\n", timing_warnings_);
}
virtual void OnIncomingCapturedFrame(
const int32_t id,
const webrtc::I420VideoFrame& videoFrame) {
virtual void OnIncomingCapturedFrame(const int32_t id,
const webrtc::VideoFrame& videoFrame) {
CriticalSectionScoped cs(capture_cs_.get());
int height = videoFrame.height();
int width = videoFrame.width();
@@ -175,7 +174,7 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
return capability_;
}
bool CompareLastFrame(const webrtc::I420VideoFrame& frame) {
bool CompareLastFrame(const webrtc::VideoFrame& frame) {
CriticalSectionScoped cs(capture_cs_.get());
return CompareFrames(last_frame_, frame);
}
@@ -192,7 +191,7 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
int64_t last_render_time_ms_;
int incoming_frames_;
int timing_warnings_;
webrtc::I420VideoFrame last_frame_;
webrtc::VideoFrame last_frame_;
webrtc::VideoRotation rotate_frame_;
};
@@ -463,7 +462,7 @@ class VideoCaptureExternalTest : public testing::Test {
webrtc::VideoCaptureExternal* capture_input_interface_;
webrtc::scoped_refptr<VideoCaptureModule> capture_module_;
rtc::scoped_ptr<webrtc::ProcessThread> process_module_;
webrtc::I420VideoFrame test_frame_;
webrtc::VideoFrame test_frame_;
TestVideoCaptureCallback capture_callback_;
TestVideoCaptureFeedBack capture_feedback_;
};

View File

@@ -211,7 +211,7 @@ int32_t VideoCaptureImpl::CaptureDelay()
return _setCaptureDelay;
}
int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& captureFrame) {
int32_t VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame) {
UpdateFrameCount(); // frame count used for local frame rate callback.
const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay;

View File

@@ -104,7 +104,7 @@ public:
protected:
VideoCaptureImpl(const int32_t id);
virtual ~VideoCaptureImpl();
int32_t DeliverCapturedFrame(I420VideoFrame& captureFrame);
int32_t DeliverCapturedFrame(VideoFrame& captureFrame);
int32_t _id; // Module ID
char* _deviceUniqueId; // current Device unique name;
@@ -132,7 +132,7 @@ private:
VideoRotation _rotateFrame; // Set if the frame should be rotated by the
// capture module.
I420VideoFrame _captureFrame;
VideoFrame _captureFrame;
// Indicate whether rotation should be applied before delivered externally.
bool apply_rotation_;

View File

@@ -48,7 +48,7 @@ class I420Encoder : public VideoEncoder {
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK.
// <0 - Error
int Encode(const I420VideoFrame& inputImage,
int Encode(const VideoFrame& inputImage,
const CodecSpecificInfo* /*codecSpecificInfo*/,
const std::vector<VideoFrameType>* /*frame_types*/) override;
@@ -145,7 +145,7 @@ class I420Decoder : public VideoDecoder {
uint16_t* width,
uint16_t* height);
I420VideoFrame _decodedImage;
VideoFrame _decodedImage;
int _width;
int _height;
bool _inited;

View File

@@ -72,9 +72,7 @@ int I420Encoder::InitEncode(const VideoCodec* codecSettings,
return WEBRTC_VIDEO_CODEC_OK;
}
int I420Encoder::Encode(const I420VideoFrame& inputImage,
int I420Encoder::Encode(const VideoFrame& inputImage,
const CodecSpecificInfo* /*codecSpecificInfo*/,
const std::vector<VideoFrameType>* /*frame_types*/) {
if (!_inited) {

View File

@@ -32,9 +32,10 @@ class MockVideoEncoder : public VideoEncoder {
MOCK_METHOD3(InitEncode, int32_t(const VideoCodec* codecSettings,
int32_t numberOfCores,
size_t maxPayloadSize));
MOCK_METHOD3(Encode, int32_t(const I420VideoFrame& inputImage,
const CodecSpecificInfo* codecSpecificInfo,
const std::vector<VideoFrameType>* frame_types));
MOCK_METHOD3(Encode,
int32_t(const VideoFrame& inputImage,
const CodecSpecificInfo* codecSpecificInfo,
const std::vector<VideoFrameType>* frame_types));
MOCK_METHOD1(RegisterEncodeCompleteCallback,
int32_t(EncodedImageCallback* callback));
MOCK_METHOD0(Release, int32_t());
@@ -48,8 +49,7 @@ class MockVideoEncoder : public VideoEncoder {
class MockDecodedImageCallback : public DecodedImageCallback {
public:
MOCK_METHOD1(Decoded,
int32_t(I420VideoFrame& decodedImage));
MOCK_METHOD1(Decoded, int32_t(VideoFrame& decodedImage));
MOCK_METHOD1(ReceivedDecodedReferenceFrame,
int32_t(const uint64_t pictureId));
MOCK_METHOD1(ReceivedDecodedFrame,

View File

@@ -292,7 +292,7 @@ void VideoProcessorImpl::FrameEncoded(const EncodedImage& encoded_image) {
last_frame_missing_ = copied_image._length == 0;
}
void VideoProcessorImpl::FrameDecoded(const I420VideoFrame& image) {
void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
TickTime decode_stop = TickTime::Now();
int frame_number = image.timestamp();
// Report stats
@@ -312,7 +312,7 @@ void VideoProcessorImpl::FrameDecoded(const I420VideoFrame& image) {
// upsample back to original size: needed for PSNR and SSIM computations.
if (image.width() != config_.codec_settings->width ||
image.height() != config_.codec_settings->height) {
I420VideoFrame up_image;
VideoFrame up_image;
int ret_val = scaler_.Set(image.width(), image.height(),
config_.codec_settings->width,
config_.codec_settings->height,
@@ -404,9 +404,8 @@ VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
video_processor_->FrameEncoded(encoded_image); // Forward to parent class.
return 0;
}
int32_t
VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded(
I420VideoFrame& image) {
int32_t VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded(
VideoFrame& image) {
video_processor_->FrameDecoded(image); // forward to parent class
return 0;
}

View File

@@ -170,7 +170,7 @@ class VideoProcessorImpl : public VideoProcessor {
// Invoked by the callback when a frame has completed encoding.
void FrameEncoded(const webrtc::EncodedImage& encodedImage);
// Invoked by the callback when a frame has completed decoding.
void FrameDecoded(const webrtc::I420VideoFrame& image);
void FrameDecoded(const webrtc::VideoFrame& image);
// Used for getting a 32-bit integer representing time
// (checks the size is within signed 32-bit bounds before casting it)
int GetElapsedTimeMicroseconds(const webrtc::TickTime& start,
@@ -199,7 +199,7 @@ class VideoProcessorImpl : public VideoProcessor {
// Keep track of the last successful frame, since we need to write that
// when decoding fails:
uint8_t* last_successful_frame_buffer_;
webrtc::I420VideoFrame source_frame_;
webrtc::VideoFrame source_frame_;
// To keep track of if we have excluded the first key frame from packet loss:
bool first_key_frame_has_been_excluded_;
// To tell the decoder previous frame have been dropped due to packet loss:
@@ -241,7 +241,7 @@ class VideoProcessorImpl : public VideoProcessor {
explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp)
: video_processor_(vp) {
}
int32_t Decoded(webrtc::I420VideoFrame& image) override;
int32_t Decoded(webrtc::VideoFrame& image) override;
private:
VideoProcessorImpl* video_processor_;

View File

@@ -233,7 +233,7 @@ int SimulcastEncoderAdapter::InitEncode(const VideoCodec* inst,
}
int SimulcastEncoderAdapter::Encode(
const I420VideoFrame& input_image,
const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types) {
if (!Initialized()) {
@@ -286,7 +286,7 @@ int SimulcastEncoderAdapter::Encode(
codec_specific_info,
&stream_frame_types);
} else {
I420VideoFrame dst_frame;
VideoFrame dst_frame;
// Making sure that destination frame is of sufficient size.
// Aligning stride values based on width.
dst_frame.CreateEmptyFrame(dst_width, dst_height,

View File

@@ -40,7 +40,7 @@ class SimulcastEncoderAdapter : public VP8Encoder {
int InitEncode(const VideoCodec* inst,
int number_of_cores,
size_t max_payload_size) override;
int Encode(const I420VideoFrame& input_image,
int Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types) override;
int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;

View File

@@ -115,9 +115,11 @@ class MockVideoEncoder : public VideoEncoder {
return 0;
}
int32_t Encode(const I420VideoFrame& inputImage,
int32_t Encode(const VideoFrame& inputImage,
const CodecSpecificInfo* codecSpecificInfo,
const std::vector<VideoFrameType>* frame_types) { return 0; }
const std::vector<VideoFrameType>* frame_types) {
return 0;
}
int32_t RegisterEncodeCompleteCallback(EncodedImageCallback* callback) {
callback_ = callback;

View File

@@ -124,7 +124,7 @@ class Vp8TestDecodedImageCallback : public DecodedImageCallback {
Vp8TestDecodedImageCallback()
: decoded_frames_(0) {
}
virtual int32_t Decoded(I420VideoFrame& decoded_image) {
virtual int32_t Decoded(VideoFrame& decoded_image) {
for (int i = 0; i < decoded_image.width(); ++i) {
EXPECT_NEAR(kColorY, decoded_image.buffer(kYPlane)[i], 1);
}
@@ -228,9 +228,8 @@ class TestVp8Simulcast : public ::testing::Test {
: encoder_(encoder),
decoder_(decoder) {}
// Creates an I420VideoFrame from |plane_colors|.
static void CreateImage(I420VideoFrame* frame,
int plane_colors[kNumOfPlanes]) {
// Creates an VideoFrame from |plane_colors|.
static void CreateImage(VideoFrame* frame, int plane_colors[kNumOfPlanes]) {
for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) {
int width = (plane_num != kYPlane ? (frame->width() + 1) / 2 :
frame->width());
@@ -995,7 +994,7 @@ class TestVp8Simulcast : public ::testing::Test {
rtc::scoped_ptr<VP8Decoder> decoder_;
MockDecodedImageCallback decoder_callback_;
VideoCodec settings_;
I420VideoFrame input_frame_;
VideoFrame input_frame_;
};
} // namespace testing

View File

@@ -76,13 +76,13 @@ bool Vp8UnitTestEncodeCompleteCallback::EncodeComplete() {
class Vp8UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback {
public:
explicit Vp8UnitTestDecodeCompleteCallback(I420VideoFrame* frame)
explicit Vp8UnitTestDecodeCompleteCallback(VideoFrame* frame)
: decoded_frame_(frame), decode_complete(false) {}
int Decoded(webrtc::I420VideoFrame& frame);
int Decoded(webrtc::VideoFrame& frame);
bool DecodeComplete();
private:
I420VideoFrame* decoded_frame_;
VideoFrame* decoded_frame_;
bool decode_complete;
};
@@ -94,7 +94,7 @@ bool Vp8UnitTestDecodeCompleteCallback::DecodeComplete() {
return false;
}
int Vp8UnitTestDecodeCompleteCallback::Decoded(I420VideoFrame& image) {
int Vp8UnitTestDecodeCompleteCallback::Decoded(VideoFrame& image) {
decoded_frame_->CopyFrame(image);
decode_complete = true;
return 0;
@@ -181,11 +181,11 @@ class TestVp8Impl : public ::testing::Test {
rtc::scoped_ptr<Vp8UnitTestDecodeCompleteCallback> decode_complete_callback_;
rtc::scoped_ptr<uint8_t[]> source_buffer_;
FILE* source_file_;
I420VideoFrame input_frame_;
VideoFrame input_frame_;
rtc::scoped_ptr<VideoEncoder> encoder_;
rtc::scoped_ptr<VideoDecoder> decoder_;
EncodedImage encoded_frame_;
I420VideoFrame decoded_frame_;
VideoFrame decoded_frame_;
size_t length_source_frame_;
VideoCodec codec_inst_;
};

View File

@@ -713,10 +713,9 @@ uint32_t VP8EncoderImpl::MaxIntraTarget(uint32_t optimalBuffersize) {
return (targetPct < minIntraTh) ? minIntraTh: targetPct;
}
int VP8EncoderImpl::Encode(
const I420VideoFrame& frame,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types) {
int VP8EncoderImpl::Encode(const VideoFrame& frame,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types) {
TRACE_EVENT1("webrtc", "VP8::Encode", "timestamp", frame.timestamp());
if (!inited_) {
@@ -734,7 +733,7 @@ int VP8EncoderImpl::Encode(
const bool use_quality_scaler = encoders_.size() == 1 &&
configurations_[0].rc_dropframe_thresh > 0 &&
codec_.codecSpecific.VP8.automaticResizeOn;
const I420VideoFrame& input_image =
const VideoFrame& input_image =
use_quality_scaler ? quality_scaler_.GetScaledFrame(frame) : frame;
if (use_quality_scaler && (input_image.width() != codec_.width ||
@@ -902,8 +901,7 @@ int VP8EncoderImpl::Encode(
}
// TODO(pbos): Make sure this works for properly for >1 encoders.
int VP8EncoderImpl::UpdateCodecFrameSize(
const I420VideoFrame& input_image) {
int VP8EncoderImpl::UpdateCodecFrameSize(const VideoFrame& input_image) {
codec_.width = input_image.width();
codec_.height = input_image.height();
// Update the cpu_speed setting for resolution change.
@@ -952,9 +950,8 @@ void VP8EncoderImpl::PopulateCodecSpecific(
picture_id_[stream_idx] = (picture_id_[stream_idx] + 1) & 0x7FFF;
}
int VP8EncoderImpl::GetEncodedPartitions(
const I420VideoFrame& input_image,
bool only_predicting_from_key_frame) {
int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image,
bool only_predicting_from_key_frame) {
int stream_idx = static_cast<int>(encoders_.size()) - 1;
for (size_t encoder_idx = 0; encoder_idx < encoders_.size();
++encoder_idx, --stream_idx) {
@@ -1342,8 +1339,8 @@ int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
last_frame_width_ = img->d_w;
last_frame_height_ = img->d_h;
// Allocate memory for decoded image.
I420VideoFrame decoded_image(buffer_pool_.CreateBuffer(img->d_w, img->d_h),
timestamp, 0, kVideoRotation_0);
VideoFrame decoded_image(buffer_pool_.CreateBuffer(img->d_w, img->d_h),
timestamp, 0, kVideoRotation_0);
libyuv::I420Copy(
img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],

View File

@@ -46,7 +46,7 @@ class VP8EncoderImpl : public VP8Encoder {
int number_of_cores,
size_t max_payload_size);
virtual int Encode(const I420VideoFrame& input_image,
virtual int Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types);
@@ -72,7 +72,7 @@ class VP8EncoderImpl : public VP8Encoder {
int InitAndSetControlSettings();
// Update frame size for codec.
int UpdateCodecFrameSize(const I420VideoFrame& input_image);
int UpdateCodecFrameSize(const VideoFrame& input_image);
void PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
const vpx_codec_cx_pkt& pkt,
@@ -80,7 +80,7 @@ class VP8EncoderImpl : public VP8Encoder {
uint32_t timestamp,
bool only_predicting_from_key_frame);
int GetEncodedPartitions(const I420VideoFrame& input_image,
int GetEncodedPartitions(const VideoFrame& input_image,
bool only_predicting_from_key_frame);
// Get the stream bitrate, for the stream |stream_idx|, given the bitrate

View File

@@ -68,15 +68,15 @@ class Vp8SequenceCoderDecodeCallback : public webrtc::DecodedImageCallback {
public:
explicit Vp8SequenceCoderDecodeCallback(FILE* decoded_file)
: decoded_file_(decoded_file) {}
int Decoded(webrtc::I420VideoFrame& frame);
int Decoded(webrtc::VideoFrame& frame);
bool DecodeComplete();
private:
FILE* decoded_file_;
};
int Vp8SequenceCoderDecodeCallback::Decoded(webrtc::I420VideoFrame& image) {
EXPECT_EQ(0, webrtc::PrintI420VideoFrame(image, decoded_file_));
int Vp8SequenceCoderDecodeCallback::Decoded(webrtc::VideoFrame& image) {
EXPECT_EQ(0, webrtc::PrintVideoFrame(image, decoded_file_));
return 0;
}
@@ -140,7 +140,7 @@ int SequenceCoder(webrtc::test::CommandLineParser& parser) {
return -1;
}
EXPECT_EQ(0, decoder->InitDecode(&inst, 1));
webrtc::I420VideoFrame input_frame;
webrtc::VideoFrame input_frame;
size_t length = webrtc::CalcBufferSize(webrtc::kI420, width, height);
rtc::scoped_ptr<uint8_t[]> frame_buffer(new uint8_t[length]);

View File

@@ -32,7 +32,7 @@ namespace webrtc {
// The benefit of owning the pool that libvpx relies on for decoding is that the
// decoded frames returned by libvpx (from vpx_codec_get_frame) use parts of our
// buffers for the decoded image data. By retaining ownership of this buffer
// using scoped_refptr, the image buffer can be reused by I420VideoFrames and no
// using scoped_refptr, the image buffer can be reused by VideoFrames and no
// frame copy has to occur during decoding and frame delivery.
//
// Pseudo example usage case:

View File

@@ -261,7 +261,7 @@ uint32_t VP9EncoderImpl::MaxIntraTarget(uint32_t optimal_buffer_size) {
return (target_pct < min_intra_size) ? min_intra_size: target_pct;
}
int VP9EncoderImpl::Encode(const I420VideoFrame& input_image,
int VP9EncoderImpl::Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types) {
if (!inited_) {
@@ -323,7 +323,7 @@ void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
picture_id_ = (picture_id_ + 1) & 0x7FFF;
}
int VP9EncoderImpl::GetEncodedPartitions(const I420VideoFrame& input_image) {
int VP9EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image) {
vpx_codec_iter_t iter = NULL;
encoded_image_._length = 0;
encoded_image_._frameType = kDeltaFrame;
@@ -513,7 +513,7 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
Vp9FrameBufferPool::Vp9FrameBuffer* img_buffer =
static_cast<Vp9FrameBufferPool::Vp9FrameBuffer*>(img->fb_priv);
img_buffer->AddRef();
// The buffer can be used directly by the I420VideoFrame (without copy) by
// The buffer can be used directly by the VideoFrame (without copy) by
// using a WrappedI420Buffer.
rtc::scoped_refptr<WrappedI420Buffer> img_wrapped_buffer(
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
@@ -527,7 +527,7 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
// release |img_buffer|.
rtc::Bind(&WrappedI420BufferNoLongerUsedCb, img_buffer)));
I420VideoFrame decoded_image;
VideoFrame decoded_image;
decoded_image.set_video_frame_buffer(img_wrapped_buffer);
decoded_image.set_timestamp(timestamp);
int ret = decode_complete_callback_->Decoded(decoded_image);

View File

@@ -32,7 +32,7 @@ class VP9EncoderImpl : public VP9Encoder {
int number_of_cores,
size_t max_payload_size) override;
int Encode(const I420VideoFrame& input_image,
int Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types) override;
@@ -55,7 +55,7 @@ class VP9EncoderImpl : public VP9Encoder {
const vpx_codec_cx_pkt& pkt,
uint32_t timestamp);
int GetEncodedPartitions(const I420VideoFrame& input_image);
int GetEncodedPartitions(const VideoFrame& input_image);
// Determine maximum target for Intra frames
//

View File

@@ -291,7 +291,7 @@ public:
// Return value : VCM_OK, on success.
// < 0, on error.
virtual int32_t AddVideoFrame(
const I420VideoFrame& videoFrame,
const VideoFrame& videoFrame,
const VideoContentMetrics* contentMetrics = NULL,
const CodecSpecificInfo* codecSpecificInfo = NULL) = 0;

View File

@@ -81,7 +81,7 @@ class VCMPacketizationCallback {
// Callback class used for passing decoded frames which are ready to be rendered.
class VCMReceiveCallback {
public:
virtual int32_t FrameToRender(I420VideoFrame& videoFrame) = 0;
virtual int32_t FrameToRender(VideoFrame& videoFrame) = 0;
virtual int32_t ReceivedDecodedReferenceFrame(
const uint64_t pictureId) {
return -1;

View File

@@ -46,8 +46,7 @@ VCMReceiveCallback* VCMDecodedFrameCallback::UserReceiveCallback()
return _receiveCallback;
}
int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame& decodedImage)
{
int32_t VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage) {
// TODO(holmer): We should improve this so that we can handle multiple
// callbacks from one call to Decode().
VCMFrameInformation* frameInfo;

View File

@@ -40,7 +40,7 @@ public:
void SetUserReceiveCallback(VCMReceiveCallback* receiveCallback);
VCMReceiveCallback* UserReceiveCallback();
virtual int32_t Decoded(I420VideoFrame& decodedImage);
virtual int32_t Decoded(VideoFrame& decodedImage);
virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId);
virtual int32_t ReceivedDecodedFrame(const uint64_t pictureId);

View File

@@ -98,10 +98,9 @@ VCMGenericEncoder::InitEncode(const VideoCodec* settings,
return 0;
}
int32_t
VCMGenericEncoder::Encode(const I420VideoFrame& inputFrame,
const CodecSpecificInfo* codecSpecificInfo,
const std::vector<FrameType>& frameTypes) {
int32_t VCMGenericEncoder::Encode(const VideoFrame& inputFrame,
const CodecSpecificInfo* codecSpecificInfo,
const std::vector<FrameType>& frameTypes) {
std::vector<VideoFrameType> video_frame_types(frameTypes.size(),
kDeltaFrame);
VCMEncodedFrame::ConvertFrameTypes(frameTypes, &video_frame_types);
@@ -176,7 +175,7 @@ VCMGenericEncoder::SetPeriodicKeyFrames(bool enable)
int32_t VCMGenericEncoder::RequestFrame(
const std::vector<FrameType>& frame_types) {
I420VideoFrame image;
VideoFrame image;
std::vector<VideoFrameType> video_frame_types(frame_types.size(),
kDeltaFrame);
VCMEncodedFrame::ConvertFrameTypes(frame_types, &video_frame_types);

View File

@@ -99,7 +99,7 @@ public:
* cameraFrameRate : Request or information from the remote side
* frameType : The requested frame type to encode
*/
int32_t Encode(const I420VideoFrame& inputFrame,
int32_t Encode(const VideoFrame& inputFrame,
const CodecSpecificInfo* codecSpecificInfo,
const std::vector<FrameType>& frameTypes);
/**

View File

@@ -172,7 +172,7 @@ class VideoCodingModuleImpl : public VideoCodingModule {
return receiver_->SetVideoProtection(videoProtection, enable);
}
int32_t AddVideoFrame(const I420VideoFrame& videoFrame,
int32_t AddVideoFrame(const VideoFrame& videoFrame,
const VideoContentMetrics* contentMetrics,
const CodecSpecificInfo* codecSpecificInfo) override {
return sender_->AddVideoFrame(

View File

@@ -99,7 +99,7 @@ class VideoSender {
int32_t RegisterProtectionCallback(VCMProtectionCallback* protection);
void SetVideoProtection(bool enable, VCMVideoProtection videoProtection);
int32_t AddVideoFrame(const I420VideoFrame& videoFrame,
int32_t AddVideoFrame(const VideoFrame& videoFrame,
const VideoContentMetrics* _contentMetrics,
const CodecSpecificInfo* codecSpecificInfo);

View File

@@ -297,7 +297,7 @@ void VideoSender::SetVideoProtection(bool enable,
}
}
// Add one raw video frame to the encoder, blocking.
int32_t VideoSender::AddVideoFrame(const I420VideoFrame& videoFrame,
int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
const VideoContentMetrics* contentMetrics,
const CodecSpecificInfo* codecSpecificInfo) {
CriticalSectionScoped cs(_sendCritSect);

View File

@@ -71,8 +71,8 @@ MATCHER_P(MatchesVp8StreamInfo, expected, "") {
class EmptyFrameGenerator : public FrameGenerator {
public:
EmptyFrameGenerator(int width, int height) : width_(width), height_(height) {}
I420VideoFrame* NextFrame() override {
frame_.reset(new I420VideoFrame());
VideoFrame* NextFrame() override {
frame_.reset(new VideoFrame());
frame_->CreateEmptyFrame(width_, height_, width_, (width_ + 1) / 2,
(width_ + 1) / 2);
return frame_.get();
@@ -81,7 +81,7 @@ class EmptyFrameGenerator : public FrameGenerator {
private:
const int width_;
const int height_;
rtc::scoped_ptr<I420VideoFrame> frame_;
rtc::scoped_ptr<VideoFrame> frame_;
};
class PacketizationCallback : public VCMPacketizationCallback {

View File

@@ -95,7 +95,7 @@ FileOutputFrameReceiver::~FileOutputFrameReceiver() {
}
int32_t FileOutputFrameReceiver::FrameToRender(
webrtc::I420VideoFrame& video_frame) {
webrtc::VideoFrame& video_frame) {
if (timing_file_ == NULL) {
std::string basename;
std::string extension;
@@ -123,7 +123,7 @@ int32_t FileOutputFrameReceiver::FrameToRender(
}
fprintf(timing_file_, "%u, %u\n", video_frame.timestamp(),
webrtc::MaskWord64ToUWord32(video_frame.render_time_ms()));
if (PrintI420VideoFrame(video_frame, out_file_) < 0) {
if (PrintVideoFrame(video_frame, out_file_) < 0) {
return -1;
}
return 0;

View File

@@ -57,7 +57,7 @@ class FileOutputFrameReceiver : public webrtc::VCMReceiveCallback {
virtual ~FileOutputFrameReceiver();
// VCMReceiveCallback
virtual int32_t FrameToRender(webrtc::I420VideoFrame& video_frame);
virtual int32_t FrameToRender(webrtc::VideoFrame& video_frame);
private:
std::string out_filename_;

View File

@@ -30,15 +30,15 @@ class QualityScaler {
void ReportQP(int qp);
void ReportDroppedFrame();
void Reset(int framerate, int bitrate, int width, int height);
Resolution GetScaledResolution(const I420VideoFrame& frame);
const I420VideoFrame& GetScaledFrame(const I420VideoFrame& frame);
Resolution GetScaledResolution(const VideoFrame& frame);
const VideoFrame& GetScaledFrame(const VideoFrame& frame);
private:
void AdjustScale(bool up);
void ClearSamples();
Scaler scaler_;
I420VideoFrame scaled_frame_;
VideoFrame scaled_frame_;
size_t num_samples_;
int low_qp_threshold_;

View File

@@ -46,7 +46,7 @@ void QualityScaler::ReportDroppedFrame() {
}
QualityScaler::Resolution QualityScaler::GetScaledResolution(
const I420VideoFrame& frame) {
const VideoFrame& frame) {
// Should be set through InitEncode -> Should be set by now.
assert(low_qp_threshold_ >= 0);
assert(num_samples_ > 0);
@@ -83,8 +83,7 @@ QualityScaler::Resolution QualityScaler::GetScaledResolution(
return res;
}
const I420VideoFrame& QualityScaler::GetScaledFrame(
const I420VideoFrame& frame) {
const VideoFrame& QualityScaler::GetScaledFrame(const VideoFrame& frame) {
Resolution res = GetScaledResolution(frame);
if (res.width == frame.width())
return frame;

View File

@@ -61,7 +61,7 @@ class QualityScalerTest : public ::testing::Test {
void ExpectScaleUsingReportedResolution() {
QualityScaler::Resolution res = qs_.GetScaledResolution(input_frame_);
const I420VideoFrame& scaled_frame = qs_.GetScaledFrame(input_frame_);
const VideoFrame& scaled_frame = qs_.GetScaledFrame(input_frame_);
EXPECT_EQ(res.width, scaled_frame.width());
EXPECT_EQ(res.height, scaled_frame.height());
}
@@ -71,7 +71,7 @@ class QualityScalerTest : public ::testing::Test {
void DoesNotDownscaleFrameDimensions(int width, int height);
QualityScaler qs_;
I420VideoFrame input_frame_;
VideoFrame input_frame_;
};
TEST_F(QualityScalerTest, UsesOriginalFrameInitially) {

View File

@@ -114,8 +114,7 @@ class VideoProcessingModule : public Module {
\return 0 on success, -1 on failure.
*/
static int32_t GetFrameStats(FrameStats* stats,
const I420VideoFrame& frame);
static int32_t GetFrameStats(FrameStats* stats, const VideoFrame& frame);
/**
Checks the validity of a FrameStats struct. Currently, valid implies only
@@ -148,7 +147,7 @@ class VideoProcessingModule : public Module {
\return 0 on success, -1 on failure.
*/
static int32_t Brighten(I420VideoFrame* frame, int delta);
static int32_t Brighten(VideoFrame* frame, int delta);
/**
Detects and removes camera flicker from a video stream. Every frame from
@@ -165,7 +164,7 @@ class VideoProcessingModule : public Module {
\return 0 on success, -1 on failure.
*/
virtual int32_t Deflickering(I420VideoFrame* frame, FrameStats* stats) = 0;
virtual int32_t Deflickering(VideoFrame* frame, FrameStats* stats) = 0;
/**
Detects if a video frame is excessively bright or dark. Returns a
@@ -180,7 +179,7 @@ class VideoProcessingModule : public Module {
\return A member of BrightnessWarning on success, -1 on error
*/
virtual int32_t BrightnessDetection(const I420VideoFrame& frame,
virtual int32_t BrightnessDetection(const VideoFrame& frame,
const FrameStats& stats) = 0;
/**
@@ -250,8 +249,8 @@ class VideoProcessingModule : public Module {
\return VPM_OK on success, a negative value on error (see error codes)
*/
virtual int32_t PreprocessFrame(const I420VideoFrame& frame,
I420VideoFrame** processed_frame) = 0;
virtual int32_t PreprocessFrame(const VideoFrame& frame,
VideoFrame** processed_frame) = 0;
/**
Return content metrics for the last processed frame

View File

@@ -15,7 +15,7 @@
namespace webrtc {
namespace VideoProcessing {
int32_t Brighten(I420VideoFrame* frame, int delta) {
int32_t Brighten(VideoFrame* frame, int delta) {
assert(frame);
if (frame->IsZeroSize()) {
return VPM_PARAMETER_ERROR;

View File

@@ -17,7 +17,7 @@
namespace webrtc {
namespace VideoProcessing {
int32_t Brighten(I420VideoFrame* frame, int delta);
int32_t Brighten(VideoFrame* frame, int delta);
} // namespace VideoProcessing
} // namespace webrtc

View File

@@ -27,7 +27,7 @@ void VPMBrightnessDetection::Reset() {
}
int32_t VPMBrightnessDetection::ProcessFrame(
const I420VideoFrame& frame,
const VideoFrame& frame,
const VideoProcessingModule::FrameStats& stats) {
if (frame.IsZeroSize()) {
return VPM_PARAMETER_ERROR;

View File

@@ -24,7 +24,7 @@ class VPMBrightnessDetection {
~VPMBrightnessDetection();
void Reset();
int32_t ProcessFrame(const I420VideoFrame& frame,
int32_t ProcessFrame(const VideoFrame& frame,
const VideoProcessingModule::FrameStats& stats);
private:

View File

@@ -49,9 +49,8 @@ VPMContentAnalysis::~VPMContentAnalysis() {
Release();
}
VideoContentMetrics* VPMContentAnalysis::ComputeContentMetrics(
const I420VideoFrame& inputFrame) {
const VideoFrame& inputFrame) {
if (inputFrame.IsZeroSize())
return NULL;

View File

@@ -35,8 +35,7 @@ class VPMContentAnalysis {
// Input: new frame
// Return value: pointer to structure containing content Analysis
// metrics or NULL value upon error
VideoContentMetrics* ComputeContentMetrics(const I420VideoFrame&
inputFrame);
VideoContentMetrics* ComputeContentMetrics(const VideoFrame& inputFrame);
// Release all allocated memory
// Output: 0 if OK, negative value upon error

View File

@@ -80,7 +80,8 @@ void VPMDeflickering::Reset() {
}
}
int32_t VPMDeflickering::ProcessFrame(I420VideoFrame* frame,
int32_t VPMDeflickering::ProcessFrame(
VideoFrame* frame,
VideoProcessingModule::FrameStats* stats) {
assert(frame);
uint32_t frame_memory;

View File

@@ -24,7 +24,7 @@ class VPMDeflickering {
~VPMDeflickering();
void Reset();
int32_t ProcessFrame(I420VideoFrame* frame,
int32_t ProcessFrame(VideoFrame* frame,
VideoProcessingModule::FrameStats* stats);
private:

View File

@@ -86,9 +86,8 @@ uint32_t VPMFramePreprocessor::DecimatedHeight() const {
return spatial_resampler_->TargetHeight();
}
int32_t VPMFramePreprocessor::PreprocessFrame(const I420VideoFrame& frame,
I420VideoFrame** processed_frame) {
int32_t VPMFramePreprocessor::PreprocessFrame(const VideoFrame& frame,
VideoFrame** processed_frame) {
if (frame.IsZeroSize()) {
return VPM_PARAMETER_ERROR;
}

View File

@@ -52,8 +52,8 @@ class VPMFramePreprocessor {
uint32_t DecimatedHeight() const;
// Preprocess output:
int32_t PreprocessFrame(const I420VideoFrame& frame,
I420VideoFrame** processed_frame);
int32_t PreprocessFrame(const VideoFrame& frame,
VideoFrame** processed_frame);
VideoContentMetrics* ContentMetrics() const;
private:
@@ -62,7 +62,7 @@ class VPMFramePreprocessor {
enum { kSkipFrameCA = 2 };
VideoContentMetrics* content_metrics_;
I420VideoFrame resampled_frame_;
VideoFrame resampled_frame_;
VPMSpatialResampler* spatial_resampler_;
VPMContentAnalysis* ca_;
VPMVideoDecimator* vd_;

View File

@@ -45,8 +45,8 @@ void VPMSimpleSpatialResampler::Reset() {
target_height_ = 0;
}
int32_t VPMSimpleSpatialResampler::ResampleFrame(const I420VideoFrame& inFrame,
I420VideoFrame* outFrame) {
int32_t VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame,
VideoFrame* outFrame) {
// Don't copy if frame remains as is.
if (resampling_mode_ == kNoRescaling)
return VPM_OK;

View File

@@ -28,8 +28,8 @@ class VPMSpatialResampler {
virtual void SetInputFrameResampleMode(VideoFrameResampling
resampling_mode) = 0;
virtual void Reset() = 0;
virtual int32_t ResampleFrame(const I420VideoFrame& inFrame,
I420VideoFrame* outFrame) = 0;
virtual int32_t ResampleFrame(const VideoFrame& inFrame,
VideoFrame* outFrame) = 0;
virtual int32_t TargetWidth() = 0;
virtual int32_t TargetHeight() = 0;
virtual bool ApplyResample(int32_t width, int32_t height) = 0;
@@ -42,8 +42,8 @@ class VPMSimpleSpatialResampler : public VPMSpatialResampler {
virtual int32_t SetTargetFrameSize(int32_t width, int32_t height);
virtual void SetInputFrameResampleMode(VideoFrameResampling resampling_mode);
virtual void Reset();
virtual int32_t ResampleFrame(const I420VideoFrame& inFrame,
I420VideoFrame* outFrame);
virtual int32_t ResampleFrame(const VideoFrame& inFrame,
VideoFrame* outFrame);
virtual int32_t TargetWidth();
virtual int32_t TargetHeight();
virtual bool ApplyResample(int32_t width, int32_t height);

View File

@@ -62,7 +62,7 @@ void VideoProcessingModuleImpl::Reset() {
}
int32_t VideoProcessingModule::GetFrameStats(FrameStats* stats,
const I420VideoFrame& frame) {
const VideoFrame& frame) {
if (frame.IsZeroSize()) {
LOG(LS_ERROR) << "Zero size frame.";
return VPM_PARAMETER_ERROR;
@@ -111,19 +111,19 @@ void VideoProcessingModule::ClearFrameStats(FrameStats* stats) {
memset(stats->hist, 0, sizeof(stats->hist));
}
int32_t VideoProcessingModule::Brighten(I420VideoFrame* frame, int delta) {
int32_t VideoProcessingModule::Brighten(VideoFrame* frame, int delta) {
return VideoProcessing::Brighten(frame, delta);
}
int32_t VideoProcessingModuleImpl::Deflickering(I420VideoFrame* frame,
int32_t VideoProcessingModuleImpl::Deflickering(VideoFrame* frame,
FrameStats* stats) {
CriticalSectionScoped mutex(&mutex_);
return deflickering_.ProcessFrame(frame, stats);
}
int32_t VideoProcessingModuleImpl::BrightnessDetection(
const I420VideoFrame& frame,
const FrameStats& stats) {
const VideoFrame& frame,
const FrameStats& stats) {
CriticalSectionScoped mutex(&mutex_);
return brightness_detection_.ProcessFrame(frame, stats);
}
@@ -164,8 +164,8 @@ uint32_t VideoProcessingModuleImpl::DecimatedHeight() const {
}
int32_t VideoProcessingModuleImpl::PreprocessFrame(
const I420VideoFrame& frame,
I420VideoFrame **processed_frame) {
const VideoFrame& frame,
VideoFrame** processed_frame) {
CriticalSectionScoped mutex(&mutex_);
return frame_pre_processor_.PreprocessFrame(frame, processed_frame);
}

View File

@@ -28,9 +28,9 @@ class VideoProcessingModuleImpl : public VideoProcessingModule {
void Reset() override;
int32_t Deflickering(I420VideoFrame* frame, FrameStats* stats) override;
int32_t Deflickering(VideoFrame* frame, FrameStats* stats) override;
int32_t BrightnessDetection(const I420VideoFrame& frame,
int32_t BrightnessDetection(const VideoFrame& frame,
const FrameStats& stats) override;
// Frame pre-processor functions
@@ -57,8 +57,8 @@ class VideoProcessingModuleImpl : public VideoProcessingModule {
// Pre-process incoming frame: Sample when needed and compute content
// metrics when enabled.
// If no resampling takes place - processed_frame is set to NULL.
int32_t PreprocessFrame(const I420VideoFrame& frame,
I420VideoFrame** processed_frame) override;
int32_t PreprocessFrame(const VideoFrame& frame,
VideoFrame** processed_frame) override;
VideoContentMetrics* ContentMetrics() const override;
private:

View File

@@ -70,7 +70,7 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
if (run_idx == 0)
{
if (PrintI420VideoFrame(video_frame_, deflickerFile) < 0) {
if (PrintVideoFrame(video_frame_, deflickerFile) < 0) {
return;
}
}

View File

@@ -26,11 +26,11 @@ DEFINE_bool(gen_files, false, "Output files for visual inspection.");
} // namespace
static void PreprocessFrameAndVerify(const I420VideoFrame& source,
static void PreprocessFrameAndVerify(const VideoFrame& source,
int target_width,
int target_height,
VideoProcessingModule* vpm,
I420VideoFrame** out_frame);
VideoFrame** out_frame);
static void CropFrame(const uint8_t* source_data,
int source_width,
int source_height,
@@ -38,22 +38,21 @@ static void CropFrame(const uint8_t* source_data,
int offset_y,
int cropped_width,
int cropped_height,
I420VideoFrame* cropped_frame);
VideoFrame* cropped_frame);
// The |source_data| is cropped and scaled to |target_width| x |target_height|,
// and then scaled back to the expected cropped size. |expected_psnr| is used to
// verify basic quality, and is set to be ~0.1/0.05dB lower than actual PSNR
// verified under the same conditions.
static void TestSize(const I420VideoFrame& source_frame,
const I420VideoFrame& cropped_source_frame,
static void TestSize(const VideoFrame& source_frame,
const VideoFrame& cropped_source_frame,
int target_width,
int target_height,
double expected_psnr,
VideoProcessingModule* vpm);
static bool CompareFrames(const webrtc::I420VideoFrame& frame1,
const webrtc::I420VideoFrame& frame2);
static void WriteProcessedFrameForVisualInspection(
const I420VideoFrame& source,
const I420VideoFrame& processed);
static bool CompareFrames(const webrtc::VideoFrame& frame1,
const webrtc::VideoFrame& frame2);
static void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
const VideoFrame& processed);
VideoProcessingModuleTest::VideoProcessingModuleTest()
: vpm_(NULL),
@@ -98,7 +97,7 @@ TEST_F(VideoProcessingModuleTest, HandleNullBuffer) {
// TODO(mikhal/stefan): Do we need this one?
VideoProcessingModule::FrameStats stats;
// Video frame with unallocated buffer.
I420VideoFrame videoFrame;
VideoFrame videoFrame;
EXPECT_EQ(-3, vpm_->GetFrameStats(&stats, videoFrame));
@@ -121,7 +120,7 @@ TEST_F(VideoProcessingModuleTest, HandleBadStats) {
}
TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset) {
I420VideoFrame video_frame2;
VideoFrame video_frame2;
VideoProcessingModule::FrameStats stats;
// Only testing non-static functions here.
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
@@ -184,7 +183,7 @@ TEST_F(VideoProcessingModuleTest, PreprocessorLogic) {
// Disable spatial sampling.
vpm_->SetInputFrameResampleMode(kNoRescaling);
EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30));
I420VideoFrame* out_frame = NULL;
VideoFrame* out_frame = NULL;
// Set rescaling => output frame != NULL.
vpm_->SetInputFrameResampleMode(kFastRescaling);
PreprocessFrameAndVerify(video_frame_, resolution, resolution, vpm_,
@@ -218,7 +217,7 @@ TEST_F(VideoProcessingModuleTest, Resampler) {
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
0, kVideoRotation_0, &video_frame_));
// Cropped source frame that will contain the expected visible region.
I420VideoFrame cropped_source_frame;
VideoFrame cropped_source_frame;
cropped_source_frame.CopyFrame(video_frame_);
for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
@@ -283,11 +282,11 @@ TEST_F(VideoProcessingModuleTest, Resampler) {
static_cast<int>(min_runtime));
}
void PreprocessFrameAndVerify(const I420VideoFrame& source,
void PreprocessFrameAndVerify(const VideoFrame& source,
int target_width,
int target_height,
VideoProcessingModule* vpm,
I420VideoFrame** out_frame) {
VideoFrame** out_frame) {
ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30));
ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(source, out_frame));
@@ -312,7 +311,7 @@ void CropFrame(const uint8_t* source_data,
int offset_y,
int cropped_width,
int cropped_height,
I420VideoFrame* cropped_frame) {
VideoFrame* cropped_frame) {
cropped_frame->CreateEmptyFrame(cropped_width, cropped_height, cropped_width,
(cropped_width + 1) / 2,
(cropped_width + 1) / 2);
@@ -321,14 +320,14 @@ void CropFrame(const uint8_t* source_data,
source_height, 0, kVideoRotation_0, cropped_frame));
}
void TestSize(const I420VideoFrame& source_frame,
const I420VideoFrame& cropped_source_frame,
void TestSize(const VideoFrame& source_frame,
const VideoFrame& cropped_source_frame,
int target_width,
int target_height,
double expected_psnr,
VideoProcessingModule* vpm) {
// Resample source_frame to out_frame.
I420VideoFrame* out_frame = NULL;
VideoFrame* out_frame = NULL;
vpm->SetInputFrameResampleMode(kBox);
PreprocessFrameAndVerify(source_frame, target_width, target_height, vpm,
&out_frame);
@@ -337,7 +336,7 @@ void TestSize(const I420VideoFrame& source_frame,
WriteProcessedFrameForVisualInspection(source_frame, *out_frame);
// Scale |resampled_source_frame| back to the source scale.
I420VideoFrame resampled_source_frame;
VideoFrame resampled_source_frame;
resampled_source_frame.CopyFrame(*out_frame);
PreprocessFrameAndVerify(resampled_source_frame, cropped_source_frame.width(),
cropped_source_frame.height(), vpm, &out_frame);
@@ -352,8 +351,8 @@ void TestSize(const I420VideoFrame& source_frame,
target_width, target_height);
}
bool CompareFrames(const webrtc::I420VideoFrame& frame1,
const webrtc::I420VideoFrame& frame2) {
bool CompareFrames(const webrtc::VideoFrame& frame1,
const webrtc::VideoFrame& frame2) {
for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) {
webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
int allocated_size1 = frame1.allocated_size(plane_type);
@@ -368,8 +367,8 @@ bool CompareFrames(const webrtc::I420VideoFrame& frame1,
return true;
}
void WriteProcessedFrameForVisualInspection(const I420VideoFrame& source,
const I420VideoFrame& processed) {
void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
const VideoFrame& processed) {
// Skip if writing to files is not enabled.
if (!FLAGS_gen_files)
return;
@@ -381,7 +380,7 @@ void WriteProcessedFrameForVisualInspection(const I420VideoFrame& source,
std::cout << "Watch " << filename.str() << " and verify that it is okay."
<< std::endl;
FILE* stand_alone_file = fopen(filename.str().c_str(), "wb");
if (PrintI420VideoFrame(processed, stand_alone_file) < 0)
if (PrintVideoFrame(processed, stand_alone_file) < 0)
std::cerr << "Failed to write: " << filename.str() << std::endl;
if (stand_alone_file)
fclose(stand_alone_file);

View File

@@ -33,7 +33,7 @@ class VideoProcessingModuleTest : public ::testing::Test {
}
VideoProcessingModule* vpm_;
FILE* source_file_;
I420VideoFrame video_frame_;
VideoFrame video_frame_;
const int width_;
const int half_width_;
const int height_;

View File

@@ -381,9 +381,8 @@ int32_t AndroidNativeOpenGl2Channel::Init(int32_t zOrder,
return 0;
}
int32_t AndroidNativeOpenGl2Channel::RenderFrame(
const uint32_t /*streamId*/,
const I420VideoFrame& videoFrame) {
int32_t AndroidNativeOpenGl2Channel::RenderFrame(const uint32_t /*streamId*/,
const VideoFrame& videoFrame) {
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();
_bufferToRender = videoFrame;

View File

@@ -33,9 +33,8 @@ class AndroidNativeOpenGl2Channel: public AndroidStream {
const float right, const float bottom);
//Implement VideoRenderCallback
virtual int32_t RenderFrame(
const uint32_t streamId,
const I420VideoFrame& videoFrame);
virtual int32_t RenderFrame(const uint32_t streamId,
const VideoFrame& videoFrame);
//Implements AndroidStream
virtual void DeliverFrame(JNIEnv* jniEnv);
@@ -54,7 +53,7 @@ class AndroidNativeOpenGl2Channel: public AndroidStream {
uint32_t _id;
CriticalSectionWrapper& _renderCritSect;
I420VideoFrame _bufferToRender;
VideoFrame _bufferToRender;
VideoRenderAndroid& _renderer;
JavaVM* _jvm;
jobject _javaRenderObj;

View File

@@ -409,10 +409,8 @@ int32_t AndroidSurfaceViewChannel::Init(
return 0;
}
int32_t AndroidSurfaceViewChannel::RenderFrame(
const uint32_t /*streamId*/,
const I420VideoFrame& videoFrame) {
int32_t AndroidSurfaceViewChannel::RenderFrame(const uint32_t /*streamId*/,
const VideoFrame& videoFrame) {
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();
_bufferToRender = videoFrame;

View File

@@ -33,7 +33,7 @@ class AndroidSurfaceViewChannel : public AndroidStream {
//Implement VideoRenderCallback
virtual int32_t RenderFrame(const uint32_t streamId,
const I420VideoFrame& videoFrame);
const VideoFrame& videoFrame);
//Implements AndroidStream
virtual void DeliverFrame(JNIEnv* jniEnv);
@@ -42,7 +42,7 @@ class AndroidSurfaceViewChannel : public AndroidStream {
uint32_t _id;
CriticalSectionWrapper& _renderCritSect;
I420VideoFrame _bufferToRender;
VideoFrame _bufferToRender;
VideoRenderAndroid& _renderer;
JavaVM* _jvm;
jobject _javaRenderObj;

View File

@@ -214,8 +214,7 @@ int32_t VideoRenderOpenGles20::SetCoordinates(int32_t zOrder,
return 0;
}
int32_t VideoRenderOpenGles20::Render(const I420VideoFrame& frameToRender) {
int32_t VideoRenderOpenGles20::Render(const VideoFrame& frameToRender) {
if (frameToRender.IsZeroSize()) {
return -1;
}
@@ -335,7 +334,7 @@ static void InitializeTexture(int name, int id, int width, int height) {
GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
}
void VideoRenderOpenGles20::SetupTextures(const I420VideoFrame& frameToRender) {
void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: width %d, height %d", __FUNCTION__,
frameToRender.width(), frameToRender.height());
@@ -373,8 +372,7 @@ static void GlTexSubImage2D(GLsizei width, GLsizei height, int stride,
}
}
void VideoRenderOpenGles20::UpdateTextures(const
I420VideoFrame& frameToRender) {
void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender) {
const GLsizei width = frameToRender.width();
const GLsizei height = frameToRender.height();

View File

@@ -25,7 +25,7 @@ class VideoRenderOpenGles20 {
~VideoRenderOpenGles20();
int32_t Setup(int32_t widht, int32_t height);
int32_t Render(const I420VideoFrame& frameToRender);
int32_t Render(const VideoFrame& frameToRender);
int32_t SetCoordinates(int32_t zOrder, const float left, const float top,
const float right, const float bottom);
@@ -35,8 +35,8 @@ class VideoRenderOpenGles20 {
GLuint loadShader(GLenum shaderType, const char* pSource);
GLuint createProgram(const char* pVertexSource,
const char* pFragmentSource);
void SetupTextures(const I420VideoFrame& frameToRender);
void UpdateTextures(const I420VideoFrame& frameToRender);
void SetupTextures(const VideoFrame& frameToRender);
void UpdateTextures(const VideoFrame& frameToRender);
int32_t _id;
GLuint _textureIds[3]; // Texture id of Y,U and V texture.

View File

@@ -189,8 +189,7 @@ int32_t VideoRenderExternalImpl::SetBitmap(const void* bitMap,
// VideoRenderCallback
int32_t VideoRenderExternalImpl::RenderFrame(const uint32_t streamId,
const I420VideoFrame& videoFrame)
{
const VideoFrame& videoFrame) {
return 0;
}
} // namespace webrtc

View File

@@ -115,7 +115,7 @@ public:
// VideoRenderCallback
virtual int32_t RenderFrame(const uint32_t streamId,
const I420VideoFrame& videoFrame);
const VideoFrame& videoFrame);
private:
CriticalSectionWrapper& _critSect;

View File

@@ -254,16 +254,15 @@ public:
/*
* Set a start image. The image is rendered before the first image has been delivered
*/
virtual int32_t
SetStartImage(const uint32_t streamId,
const I420VideoFrame& videoFrame) = 0;
virtual int32_t SetStartImage(const uint32_t streamId,
const VideoFrame& videoFrame) = 0;
/*
* Set a timout image. The image is rendered if no videoframe has been delivered
*/
virtual int32_t SetTimeoutImage(const uint32_t streamId,
const I420VideoFrame& videoFrame,
const uint32_t timeout)= 0;
const VideoFrame& videoFrame,
const uint32_t timeout) = 0;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_H_

View File

@@ -16,7 +16,7 @@
#include "webrtc/modules/video_render/include/video_render_defines.h"
/*
* This OpenGles20 is the class of renderer for I420VideoFrame into a GLES 2.0
* This OpenGles20 is the class of renderer for VideoFrame into a GLES 2.0
* windows used in the VideoRenderIosView class.
*/
namespace webrtc {
@@ -26,7 +26,7 @@ class OpenGles20 {
~OpenGles20();
bool Setup(int32_t width, int32_t height);
bool Render(const I420VideoFrame& frame);
bool Render(const VideoFrame& frame);
// SetCoordinates
// Sets the coordinates where the stream shall be rendered.
@@ -45,10 +45,10 @@ class OpenGles20 {
GLuint CreateProgram(const char* vertex_source, const char* fragment_source);
// Initialize the textures by the frame width and height
void SetupTextures(const I420VideoFrame& frame);
void SetupTextures(const VideoFrame& frame);
// Update the textures by the YUV data from the frame
void UpdateTextures(const I420VideoFrame& frame);
void UpdateTextures(const VideoFrame& frame);
GLuint texture_ids_[3]; // Texture id of Y,U and V texture.
GLuint program_;

View File

@@ -151,7 +151,7 @@ bool OpenGles20::SetCoordinates(const float z_order,
return true;
}
bool OpenGles20::Render(const I420VideoFrame& frame) {
bool OpenGles20::Render(const VideoFrame& frame) {
if (texture_width_ != (GLsizei)frame.width() ||
texture_height_ != (GLsizei)frame.height()) {
SetupTextures(frame);
@@ -261,7 +261,7 @@ static void InitializeTexture(int name, int id, int width, int height) {
NULL);
}
void OpenGles20::SetupTextures(const I420VideoFrame& frame) {
void OpenGles20::SetupTextures(const VideoFrame& frame) {
const GLsizei width = frame.width();
const GLsizei height = frame.height();
@@ -310,7 +310,7 @@ static void GlTexSubImage2D(GLsizei width,
}
}
void OpenGles20::UpdateTextures(const I420VideoFrame& frame) {
void OpenGles20::UpdateTextures(const VideoFrame& frame) {
const GLsizei width = frame.width();
const GLsizei height = frame.height();

View File

@@ -25,7 +25,7 @@ class VideoRenderIosChannel : public VideoRenderCallback {
// Implementation of VideoRenderCallback.
int32_t RenderFrame(const uint32_t stream_id,
const I420VideoFrame& video_frame) override;
const VideoFrame& video_frame) override;
int SetStreamSettings(const float z_order,
const float left,
@@ -37,7 +37,7 @@ class VideoRenderIosChannel : public VideoRenderCallback {
private:
VideoRenderIosView* view_;
I420VideoFrame* current_frame_;
VideoFrame* current_frame_;
bool buffer_is_updated_;
};

View File

@@ -17,14 +17,13 @@
using namespace webrtc;
VideoRenderIosChannel::VideoRenderIosChannel(VideoRenderIosView* view)
: view_(view),
current_frame_(new I420VideoFrame()),
buffer_is_updated_(false) {}
: view_(view), current_frame_(new VideoFrame()), buffer_is_updated_(false) {
}
VideoRenderIosChannel::~VideoRenderIosChannel() { delete current_frame_; }
int32_t VideoRenderIosChannel::RenderFrame(const uint32_t stream_id,
const I420VideoFrame& video_frame) {
const VideoFrame& video_frame) {
current_frame_->CopyFrame(video_frame);
current_frame_->set_render_time_ms(0);
buffer_is_updated_ = true;

View File

@@ -20,7 +20,7 @@
- (BOOL)createContext;
- (BOOL)presentFramebuffer;
- (BOOL)renderFrame:(webrtc::I420VideoFrame*)frameToRender;
- (BOOL)renderFrame:(webrtc::VideoFrame*)frameToRender;
- (BOOL)setCoordinatesForZOrder:(const float)zOrder
Left:(const float)left
Top:(const float)top

Some files were not shown because too many files have changed in this diff Show More