Using Convert in lieu of ExtractBuffer: Less error prone (as we don't need to compute buffer sizes etc.). This cl is first in a series (doing all of WebRtc would make it quite a big cl). While at it, fixing a few headers.

BUG=988

Review URL: https://webrtc-codereview.appspot.com/995014

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3343 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mikhal@webrtc.org 2013-01-08 19:19:59 +00:00
parent 27cb3017f5
commit 658d423e81
12 changed files with 123 additions and 179 deletions

View File

@ -185,19 +185,11 @@ I420Decoder::Decode(const EncodedImage& inputImage,
}
// Set decoded image parameters.
int half_width = (_width + 1) / 2;
int half_height = (_height + 1) / 2;
int size_y = _width * _height;
int size_uv = half_width * half_height;
const uint8_t* buffer_y = inputImage._buffer;
const uint8_t* buffer_u = buffer_y + size_y;
const uint8_t* buffer_v = buffer_u + size_uv;
// TODO(mikhal): Do we need an align stride?
int ret = _decodedImage.CreateFrame(size_y, buffer_y,
size_uv, buffer_u,
size_uv, buffer_v,
_width, _height,
_width, half_width, half_width);
_decodedImage.CreateEmptyFrame(_width, _height,
_width, half_width, half_width);
// Converting from buffer to plane representation.
int ret = ConvertToI420(kI420, inputImage._buffer, 0, 0, _width, _height,
0, kRotateNone, &_decodedImage);
if (ret < 0) {
return WEBRTC_VIDEO_CODEC_MEMORY;
}

View File

@ -255,6 +255,9 @@ UnitTest::Setup()
unsigned int frameLength = 0;
int i=0;
_inputVideoBuffer.CreateEmptyFrame(_inst.width, _inst.height, _inst.width,
(_inst.width + 1) / 2,
(_inst.width + 1) / 2);
while (frameLength == 0)
{
if (i > 0)
@ -262,13 +265,8 @@ UnitTest::Setup()
// Insert yet another frame
ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame,
_sourceFile) == _lengthSourceFrame);
_inputVideoBuffer.CreateFrame(size_y, _refFrame,
size_uv, _refFrame + size_y,
size_uv, _refFrame + size_y + size_uv,
_inst.width, _inst.height,
_inst.width,
(_inst.width + 1) / 2,
(_inst.width + 1) / 2);
EXPECT_EQ(0, ConvertToI420(kI420, _refFrame, 0, 0, _width, _height,
0, kRotateNone, &_inputVideoBuffer));
_encoder->Encode(_inputVideoBuffer, NULL, NULL);
ASSERT_TRUE(WaitForEncodedFrame() > 0);
}

View File

@ -144,11 +144,7 @@ int SequenceCoder(webrtc::test::CommandLineParser parser) {
unsigned int length = webrtc::CalcBufferSize(webrtc::kI420, width, height);
webrtc::scoped_array<uint8_t> frame_buffer(new uint8_t[length]);
int half_height = (height + 1) / 2;
int half_width = (width + 1) / 2;
int size_y = width * height;
int size_uv = half_width * half_height;
// Set and register callbacks.
Vp8SequenceCoderEncodeCallback encoder_callback(encoded_file);
encoder->RegisterEncodeCompleteCallback(&encoder_callback);
@ -159,17 +155,15 @@ int SequenceCoder(webrtc::test::CommandLineParser parser) {
int64_t starttime = webrtc::TickTime::MillisecondTimestamp();
int frame_cnt = 1;
int frames_processed = 0;
input_frame.CreateEmptyFrame(width, height, width, half_width, half_width);
while (!feof(input_file) &&
(num_frames == -1 || frames_processed < num_frames)) {
if (fread(frame_buffer.get(), 1, length, input_file) != length)
continue;
if (frame_cnt >= start_frame) {
input_frame.CreateFrame(size_y, frame_buffer.get(),
size_uv, frame_buffer.get() + size_y,
size_uv, frame_buffer.get() + size_y +
size_uv,
width, height,
width, half_width, half_width);
webrtc::ConvertToI420(webrtc::kI420, frame_buffer.get(), 0, 0,
width, height, 0, webrtc::kRotateNone,
&input_frame);
encoder->Encode(input_frame, NULL, NULL);
decoder->Decode(encoder_callback.encoded_image(), false, NULL);
++frames_processed;

View File

@ -8,8 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "unit_test.h"
#include "video_processing.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
#include "webrtc/modules/video_processing/main/test/unit_test/unit_test.h"
using namespace webrtc;
@ -22,12 +23,9 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
_width, _height,
0, kRotateNone, &_videoFrame));
frameNum++;
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
@ -53,12 +51,9 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
_frame_length &&
frameNum < 300)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
_width, _height,
0, kRotateNone, &_videoFrame));
frameNum++;
WebRtc_UWord8* frame = _videoFrame.buffer(kYPlane);
@ -96,12 +91,9 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length && frameNum < 300)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
_width, _height,
0, kRotateNone, &_videoFrame));
frameNum++;
WebRtc_UWord8* y_plane = _videoFrame.buffer(kYPlane);

View File

@ -43,12 +43,10 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
// Using ConvertToI420 to add stride to the image.
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
_width, _height,
0, kRotateNone, &_videoFrame));
frameNum++;
t0 = TickTime::Now();
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&_videoFrame));
@ -84,26 +82,23 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
ASSERT_EQ(refLen, testLen) << "File lengths differ.";
I420VideoFrame refVideoFrame;
refVideoFrame.CreateEmptyFrame(_width, _height,
_width, _half_width, _half_width);
// Compare frame-by-frame.
scoped_array<uint8_t> ref_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, modFile) ==
_frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
ASSERT_EQ(_frame_length, fread(ref_buffer.get(), 1, _frame_length,
refFile));
refVideoFrame.CreateFrame(_size_y, ref_buffer.get(),
_size_uv, ref_buffer.get() + _size_y,
_size_uv, ref_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
// Using ConvertToI420 to add stride to the image.
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
_width, _height,
0, kRotateNone, &_videoFrame));
ASSERT_EQ(_frame_length, fread(ref_buffer.get(), 1, _frame_length,
refFile));
EXPECT_EQ(0, ConvertToI420(kI420, ref_buffer.get(), 0, 0,
_width, _height,
0, kRotateNone, &refVideoFrame));
EXPECT_EQ(0, memcmp(_videoFrame.buffer(kYPlane),
refVideoFrame.buffer(kYPlane),
_size_y));
@ -126,11 +121,11 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
memset(testFrame.get(), 128, _frame_length);
I420VideoFrame testVideoFrame;
testVideoFrame.CreateFrame(_size_y, testFrame.get(),
_size_uv, testFrame.get() + _size_y,
_size_uv, testFrame.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
testVideoFrame.CreateEmptyFrame(_width, _height,
_width, _half_width, _half_width);
EXPECT_EQ(0, ConvertToI420(kI420, testFrame.get(), 0, 0,
_width, _height, 0, kRotateNone,
&testVideoFrame));
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testVideoFrame));

View File

@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_processing/main/interface/video_processing.h"
#include "modules/video_processing/main/source/content_analysis.h"
#include "modules/video_processing/main/test/unit_test/unit_test.h"
@ -27,12 +28,10 @@ TEST_F(VideoProcessingModuleTest, ContentAnalysis)
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile)
== _frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
// Using ConvertToI420 to add stride to the image.
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
_width, _height,
0, kRotateNone, &_videoFrame));
_cM_c = _ca_c.ComputeContentMetrics(_videoFrame);
_cM_SSE = _ca_sse.ComputeContentMetrics(_videoFrame);

View File

@ -11,11 +11,11 @@
#include <cstdio>
#include <cstdlib>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_processing/main/interface/video_processing.h"
#include "modules/video_processing/main/test/unit_test/unit_test.h"
#include "system_wrappers/interface/tick_util.h"
#include "testsupport/fileutils.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
#include "webrtc/modules/video_processing/main/test/unit_test/unit_test.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
namespace webrtc {
@ -56,12 +56,9 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
_frame_length)
{
frameNum++;
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
_width, _height,
0, kRotateNone, &_videoFrame));
_videoFrame.set_timestamp(timeStamp);
t0 = TickTime::Now();

View File

@ -52,12 +52,9 @@ TEST_F(VideoProcessingModuleTest, Denoising)
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv,
video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
_width, _height,
0, kRotateNone, &_videoFrame));
frameNum++;
WebRtc_UWord8* sourceBuffer = _videoFrame.buffer(kYPlane);

View File

@ -95,11 +95,9 @@ TEST_F(VideoProcessingModuleTest, HandleBadStats)
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
_width, _height,
0, kRotateNone, &_videoFrame));
EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats));
@ -139,11 +137,9 @@ TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset)
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
ASSERT_EQ(0, _videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
_width, _height,
0, kRotateNone, &_videoFrame));
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
ASSERT_EQ(0, videoFrame2.CopyFrame(_videoFrame));
ASSERT_EQ(0, _vpm->Deflickering(&_videoFrame, &stats));
@ -155,11 +151,10 @@ TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset)
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
// Using ConvertToI420 to add stride to the image.
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
_width, _height,
0, kRotateNone, &_videoFrame));
videoFrame2.CopyFrame(_videoFrame);
EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2));
ASSERT_GE(_vpm->Denoising(&_videoFrame), 0);
@ -169,11 +164,9 @@ TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset)
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
_width, _height,
0, kRotateNone, &_videoFrame));
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
videoFrame2.CopyFrame(_videoFrame);
ASSERT_EQ(0, _vpm->BrightnessDetection(_videoFrame, stats));
@ -188,11 +181,9 @@ TEST_F(VideoProcessingModuleTest, FrameStats)
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
_width, _height,
0, kRotateNone, &_videoFrame));
EXPECT_FALSE(_vpm->ValidFrameStats(stats));
EXPECT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
@ -259,15 +250,13 @@ TEST_F(VideoProcessingModuleTest, Resampler)
_vpm->EnableTemporalDecimation(false);
// Reading test frame
I420VideoFrame sourceFrame;
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
sourceFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
// Using ConvertToI420 to add stride to the image.
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
_width, _height,
0, kRotateNone, &_videoFrame));
for (WebRtc_UWord32 runIdx = 0; runIdx < NumRuns; runIdx++)
{
@ -275,24 +264,24 @@ TEST_F(VideoProcessingModuleTest, Resampler)
t0 = TickTime::Now();
// Init the sourceFrame with a timestamp.
sourceFrame.set_render_time_ms(t0.MillisecondTimestamp());
sourceFrame.set_timestamp(t0.MillisecondTimestamp() * 90);
_videoFrame.set_render_time_ms(t0.MillisecondTimestamp());
_videoFrame.set_timestamp(t0.MillisecondTimestamp() * 90);
// Test scaling to different sizes: source is of |width|/|height| = 352/288.
// Scaling mode in VPM is currently fixed to kScaleBox (mode = 3).
TestSize(sourceFrame, 100, 50, 3, 24.0, _vpm);
TestSize(sourceFrame, 352/4, 288/4, 3, 25.2, _vpm);
TestSize(sourceFrame, 352/2, 288/2, 3, 28.1, _vpm);
TestSize(sourceFrame, 352, 288, 3, -1, _vpm); // no resampling.
TestSize(sourceFrame, 2*352, 2*288, 3, 32.2, _vpm);
TestSize(sourceFrame, 400, 256, 3, 31.3, _vpm);
TestSize(sourceFrame, 480, 640, 3, 32.15, _vpm);
TestSize(sourceFrame, 960, 720, 3, 32.2, _vpm);
TestSize(sourceFrame, 1280, 720, 3, 32.15, _vpm);
TestSize(_videoFrame, 100, 50, 3, 24.0, _vpm);
TestSize(_videoFrame, 352/4, 288/4, 3, 25.2, _vpm);
TestSize(_videoFrame, 352/2, 288/2, 3, 28.1, _vpm);
TestSize(_videoFrame, 352, 288, 3, -1, _vpm); // no resampling.
TestSize(_videoFrame, 2*352, 2*288, 3, 32.2, _vpm);
TestSize(_videoFrame, 400, 256, 3, 31.3, _vpm);
TestSize(_videoFrame, 480, 640, 3, 32.15, _vpm);
TestSize(_videoFrame, 960, 720, 3, 32.2, _vpm);
TestSize(_videoFrame, 1280, 720, 3, 32.15, _vpm);
// Upsampling to odd size.
TestSize(sourceFrame, 501, 333, 3, 32.05, _vpm);
TestSize(_videoFrame, 501, 333, 3, 32.05, _vpm);
// Downsample to odd size.
TestSize(sourceFrame, 281, 175, 3, 29.3, _vpm);
TestSize(_videoFrame, 281, 175, 3, 29.3, _vpm);
// stop timer
t1 = TickTime::Now();

View File

@ -113,22 +113,19 @@ int CalculateMetrics(VideoMetricsType video_metrics_type,
scoped_array<uint8_t> ref_buffer(new uint8_t[frame_length]);
scoped_array<uint8_t> test_buffer(new uint8_t[frame_length]);
int size_y = width * height;
int size_uv = ((width + 1 ) / 2) * ((height + 1) / 2);
// Set decoded image parameters.
int half_width = (width + 1) / 2;
ref_frame.CreateEmptyFrame(width, height, width, half_width, half_width);
test_frame.CreateEmptyFrame(width, height, width, half_width, half_width);
int ref_bytes = fread(ref_buffer.get(), 1, frame_length, ref_fp);
int test_bytes = fread(test_buffer.get(), 1, frame_length, test_fp);
while (ref_bytes == frame_length && test_bytes == frame_length) {
ref_frame.CreateFrame(size_y, ref_buffer.get(),
size_uv, ref_buffer.get() + size_y,
size_uv, ref_buffer.get() + size_y + size_uv,
width, height,
width, (width + 1) / 2, (width + 1) / 2);
test_frame.CreateFrame(size_y, test_buffer.get(),
size_uv, test_buffer.get() + size_y,
size_uv, test_buffer.get() + size_y + size_uv,
width, height,
width, (width + 1) / 2, (width + 1) / 2);
// Converting from buffer to plane representation.
ConvertToI420(kI420, ref_buffer.get(), 0, 0, width, height, 0,
kRotateNone, &ref_frame);
ConvertToI420(kI420, test_buffer.get(), 0, 0, width, height, 0,
kRotateNone, &test_frame);
switch (video_metrics_type) {
case kPSNR:
CalculateFrame(kPSNR, &ref_frame, &test_frame, frame_number,

View File

@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_engine/test/libvietest/include/tb_I420_codec.h"
#include "webrtc/video_engine/test/libvietest/include/tb_I420_codec.h"
#include <string.h>
#include <stdio.h>
#include <assert.h>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
TbI420Encoder::TbI420Encoder() :
_inited(false), _encodedImage(), _encodedCompleteCallback(NULL)
@ -232,6 +232,9 @@ WebRtc_Word32 TbI420Decoder::InitDecode(const webrtc::VideoCodec* inst,
}
_width = inst->width;
_height = inst->height;
int half_width = (_width + 1 ) / 2 ;
_decodedImage.CreateEmptyFrame(_width, _height,
_width, half_width, half_width);
_inited = true;
return WEBRTC_VIDEO_CODEC_OK;
}
@ -261,15 +264,10 @@ WebRtc_Word32 TbI420Decoder::Decode(
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
int size_y = _width * _height;
int size_uv = ((_width + 1 ) / 2) * ((_height + 1) / 2);
int ret = _decodedImage.CreateFrame(size_y, inputImage._buffer,
size_uv, inputImage._buffer + size_y,
size_uv, inputImage._buffer + size_y +
size_uv,
_width, _height,
_width, (_width + 1 ) / 2,
(_width + 1 ) / 2);
int ret = ConvertToI420(webrtc::kI420, inputImage._buffer, 0, 0,
_width, _height,
0, webrtc::kRotateNone, &_decodedImage);
if (ret < 0)
return WEBRTC_VIDEO_CODEC_ERROR;
_decodedImage.set_timestamp(inputImage._timeStamp);

View File

@ -13,13 +13,14 @@
#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
#include "video_engine/vie_file_image.h"
#include "webrtc/video_engine/vie_file_image.h"
#include <stdio.h> // NOLINT
#include "common_video/interface/video_image.h"
#include "common_video/jpeg/include/jpeg.h"
#include "system_wrappers/interface/trace.h"
#include "webrtc/common_video/interface/video_image.h"
#include "webrtc/common_video/jpeg/include/jpeg.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
@ -91,16 +92,11 @@ int ViEFileImage::ConvertPictureToI420VideoFrame(int engine_id,
const ViEPicture& picture,
I420VideoFrame* video_frame) {
int half_width = (picture.width + 1) / 2;
int half_height = (picture.height + 1) / 2;
int size_uv = half_width * half_height;
int size_y = picture.width * picture.height;
return video_frame->CreateFrame(size_y, picture.data,
size_uv, picture.data + size_y,
size_uv, picture.data + size_y +
size_uv,
picture.width, picture.height,
picture.width, half_width, half_width);
return 0;
video_frame->CreateEmptyFrame(picture.width, picture.height,
picture.width, half_width, half_width);
return ConvertToI420(kI420, picture.data, 0, 0,
picture.width, picture.height,
0, kRotateNone, video_frame);
}
} // namespace webrtc