Switching to I420VideoFrame

Review URL: https://webrtc-codereview.appspot.com/922004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@2983 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mikhal@webrtc.org 2012-10-24 18:33:04 +00:00
parent 6392657643
commit 9fedff7c17
152 changed files with 2076 additions and 1862 deletions

View File

@ -28,6 +28,9 @@
'jpeg/include',
'libyuv/include',
],
'dependencies': [
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
],
'direct_dependent_settings': {
'include_dirs': [
'interface',

View File

@ -35,6 +35,9 @@ int I420VideoFrame::CreateEmptyFrame(int width, int height,
y_plane_.CreateEmptyPlane(size_y, stride_y, size_y);
u_plane_.CreateEmptyPlane(size_u, stride_u, size_u);
v_plane_.CreateEmptyPlane(size_v, stride_v, size_v);
// Creating empty frame - reset all values.
timestamp_ = 0;
render_time_ms_ = 0;
return 0;
}
@ -128,7 +131,7 @@ int I420VideoFrame::set_height(int height) {
return 0;
}
bool I420VideoFrame::IsZeroSize() {
bool I420VideoFrame::IsZeroSize() const {
return (y_plane_.IsZeroSize() && u_plane_.IsZeroSize() &&
v_plane_.IsZeroSize());
}

View File

@ -24,7 +24,7 @@ enum PlaneType {
kYPlane = 0,
kUPlane = 1,
kVPlane = 2,
KNumOfPlanes = 3
kNumOfPlanes = 3
};
class I420VideoFrame {
@ -94,7 +94,7 @@ class I420VideoFrame {
int64_t render_time_ms() const {return render_time_ms_;}
// Return true if underlying plane buffers are of zero size, false if not.
bool IsZeroSize();
bool IsZeroSize() const;
// Reset underlying plane buffers sizes to 0. This function doesn't
// clear memory.

View File

@ -12,7 +12,7 @@
#define WEBRTC_COMMON_VIDEO_JPEG
#include "typedefs.h"
#include "modules/interface/module_common_types.h" // VideoFrame
#include "common_video/interface/i420_video_frame.h"
#include "common_video/interface/video_image.h" // EncodedImage
// jpeg forward declaration
@ -21,7 +21,7 @@ struct jpeg_compress_struct;
namespace webrtc
{
// TODO(mikhal): Move this to LibYuv wrappar, when LibYuv will have a JPG
// TODO(mikhal): Move this to LibYuv wrapper, when LibYuv will have a JPG
// Encode.
class JpegEncoder
{
@ -46,7 +46,7 @@ public:
// Output:
// - 0 : OK
// - (-1) : Error
WebRtc_Word32 Encode(const VideoFrame& inputImage);
WebRtc_Word32 Encode(const I420VideoFrame& inputImage);
private:
@ -67,6 +67,6 @@ private:
// - (-1) : Error
// - (-2) : Unsupported format
int ConvertJpegToI420(const EncodedImage& input_image,
VideoFrame* output_image);
I420VideoFrame* output_image);
}
#endif /* WEBRTC_COMMON_VIDEO_JPEG */

View File

@ -84,21 +84,21 @@ JpegEncoder::SetFileName(const char* fileName)
WebRtc_Word32
JpegEncoder::Encode(const VideoFrame& inputImage)
JpegEncoder::Encode(const I420VideoFrame& inputImage)
{
if (inputImage.Buffer() == NULL || inputImage.Size() == 0)
if (inputImage.IsZeroSize())
{
return -1;
}
if (inputImage.Width() < 1 || inputImage.Height() < 1)
if (inputImage.width() < 1 || inputImage.height() < 1)
{
return -1;
}
FILE* outFile = NULL;
const WebRtc_UWord32 width = inputImage.Width();
const WebRtc_UWord32 height = inputImage.Height();
const int width = inputImage.width();
const int height = inputImage.height();
// Set error handler
myErrorMgr jerr;
@ -141,9 +141,15 @@ JpegEncoder::Encode(const VideoFrame& inputImage)
_cinfo->comp_info[2].h_samp_factor = 1; // V
_cinfo->comp_info[2].v_samp_factor = 1;
_cinfo->raw_data_in = TRUE;
// Converting to a buffer
// TODO(mikhal): This is a tmp implementation. Will update to use LibYuv
// Encode when that becomes available.
unsigned int length = CalcBufferSize(kI420, width, height);
scoped_array<uint8_t> image_buffer(new uint8_t[length]);
ExtractBuffer(inputImage, length, image_buffer.get());
int height16 = (height + 15) & ~15;
WebRtc_UWord8* imgPtr = image_buffer.get();
WebRtc_UWord32 height16 = (height + 15) & ~15;
WebRtc_UWord8* imgPtr = inputImage.Buffer();
WebRtc_UWord8* origImagePtr = NULL;
if (height16 != height)
{
@ -151,7 +157,7 @@ JpegEncoder::Encode(const VideoFrame& inputImage)
WebRtc_UWord32 requiredSize = CalcBufferSize(kI420, width, height16);
origImagePtr = new WebRtc_UWord8[requiredSize];
memset(origImagePtr, 0, requiredSize);
memcpy(origImagePtr, inputImage.Buffer(), inputImage.Length());
memcpy(origImagePtr, image_buffer.get(), length);
imgPtr = origImagePtr;
}
@ -164,7 +170,7 @@ JpegEncoder::Encode(const VideoFrame& inputImage)
data[1] = u;
data[2] = v;
WebRtc_UWord32 i, j;
int i, j;
for (j = 0; j < height; j += 16)
{
@ -197,7 +203,7 @@ JpegEncoder::Encode(const VideoFrame& inputImage)
}
int ConvertJpegToI420(const EncodedImage& input_image,
VideoFrame* output_image) {
I420VideoFrame* output_image) {
if (output_image == NULL)
return -1;
@ -211,11 +217,8 @@ int ConvertJpegToI420(const EncodedImage& input_image,
return -2; // not supported.
int width = jpeg_decoder.GetWidth();
int height = jpeg_decoder.GetHeight();
int req_size = CalcBufferSize(kI420, width, height);
output_image->VerifyAndAllocate(req_size);
output_image->SetWidth(width);
output_image->SetHeight(height);
output_image->SetLength(req_size);
output_image->CreateEmptyFrame(width, height, width,
(width + 1) / 2, (width + 1) / 2);
return ConvertToI420(kMJPG,
input_image._buffer,
0, 0, // no cropping

View File

@ -11,16 +11,17 @@
#include <cstdio>
#include <string>
#include "common_video/jpeg/include/jpeg.h"
#include "common_video/interface/video_image.h"
#include "common_video/jpeg/include/jpeg.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "gtest/gtest.h"
#include "testsupport/fileutils.h"
#include "modules/interface/module_common_types.h"
namespace webrtc {
const unsigned int kImageWidth = 640;
const unsigned int kImageHeight = 480;
const int kImageWidth = 640;
const int kImageHeight = 480;
class JpegTest: public testing::Test {
protected:
@ -72,38 +73,30 @@ class JpegTest: public testing::Test {
TEST_F(JpegTest, Decode) {
encoded_buffer_ = ReadEncodedImage(input_filename_);
VideoFrame image_buffer;
I420VideoFrame image_buffer;
EXPECT_EQ(0, ConvertJpegToI420(*encoded_buffer_, &image_buffer));
EXPECT_GT(image_buffer.Length(), 0u);
EXPECT_EQ(kImageWidth, image_buffer.Width());
EXPECT_EQ(kImageHeight, image_buffer.Height());
image_buffer.Free();
EXPECT_FALSE(image_buffer.IsZeroSize());
EXPECT_EQ(kImageWidth, image_buffer.width());
EXPECT_EQ(kImageHeight, image_buffer.height());
}
TEST_F(JpegTest, EncodeInvalidInputs) {
VideoFrame empty;
empty.SetWidth(164);
empty.SetHeight(164);
I420VideoFrame empty;
empty.set_width(164);
empty.set_height(164);
EXPECT_EQ(-1, encoder_->SetFileName(0));
// Test empty (null) frame.
EXPECT_EQ(-1, encoder_->Encode(empty));
empty.VerifyAndAllocate(0);
// Create empty frame (allocate memory) - arbitrary dimensions.
empty.CreateEmptyFrame(10, 10, 10, 5, 5);
empty.ResetSize();
EXPECT_EQ(-1, encoder_->Encode(empty));
empty.VerifyAndAllocate(10);
empty.SetHeight(0);
EXPECT_EQ(-1, encoder_->Encode(empty));
empty.SetHeight(164);
empty.SetWidth(0);
EXPECT_EQ(-1, encoder_->Encode(empty));
empty.Free();
}
TEST_F(JpegTest, Encode) {
// Decode our input image then encode it again to a new file:
encoded_buffer_ = ReadEncodedImage(input_filename_);
VideoFrame image_buffer;
I420VideoFrame image_buffer;
EXPECT_EQ(0, ConvertJpegToI420(*encoded_buffer_, &image_buffer));
EXPECT_EQ(0, encoder_->SetFileName(encoded_filename_.c_str()));
@ -111,13 +104,11 @@ TEST_F(JpegTest, Encode) {
// Save decoded image to file.
FILE* save_file = fopen(decoded_filename_.c_str(), "wb");
if (fwrite(image_buffer.Buffer(), 1,
image_buffer.Length(), save_file) != image_buffer.Length()) {
if (PrintI420VideoFrame(image_buffer, save_file)) {
return;
}
fclose(save_file);
image_buffer.Free();
}
} // namespace webrtc

View File

@ -15,6 +15,7 @@
#ifndef WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_
#define WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_
#include "common_video/interface/i420_video_frame.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "typedefs.h"
@ -47,8 +48,8 @@ class Scaler {
// Return value: 0 - OK,
// -1 - parameter error
// -2 - scaler not set
int Scale(const VideoFrame& src_frame,
VideoFrame* dst_frame);
int Scale(const I420VideoFrame& src_frame,
I420VideoFrame* dst_frame);
private:
// Determine if the VideoTypes are currently supported.

View File

@ -15,15 +15,14 @@
#ifndef WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_
#define WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_
#include <stdio.h>
#include "common_types.h" // RawVideoTypes.
#include "modules/interface/module_common_types.h" // VideoFrame
#include "common_video/interface/i420_video_frame.h"
#include "typedefs.h"
namespace webrtc {
// TODO(mikhal): 1. Sync libyuv and WebRtc meaning of stride.
// 2. Reorder parameters for consistency.
// Supported video types.
enum VideoType {
kUnknown,
@ -73,6 +72,24 @@ int AlignInt(int value, int alignment);
// video frame or -1 in case of an error .
int CalcBufferSize(VideoType type, int width, int height);
// TODO(mikhal): Add unit test for these two functions and determine location.
// Print I420VideoFrame to file
// Input:
// - frame : Reference to video frame.
// - file : pointer to file object. It is assumed that the file is
// already open for writing.
// Return value: 0 if OK, < 0 otherwise.
int PrintI420VideoFrame(const I420VideoFrame& frame, FILE* file);
// Extract buffer from I420VideoFrame (consecutive planes, no stride)
// Input:
// - frame : Reference to video frame.
// - size : pointer to the size of the allocated buffer. If size is
// insufficient, an error will be returned.
// - buffer : Pointer to buffer
// Return value: length of buffer if OK, < 0 otherwise.
int ExtractBuffer(const I420VideoFrame& input_frame,
int size, uint8_t* buffer);
// Convert To I420
// Input:
// - src_video_type : Type of input video.
@ -92,25 +109,23 @@ int ConvertToI420(VideoType src_video_type,
int src_width, int src_height,
int sample_size,
VideoRotationMode rotation,
VideoFrame* dst_frame);
I420VideoFrame* dst_frame);
// Convert From I420
// Input:
// - src_frame : Pointer to a source frame.
// - src_stride : Number of bytes in a row of the src Y plane.
// - src_frame : Reference to a source frame.
// - dst_video_type : Type of output video.
// - dst_sample_size : Required only for the parsing of MJPG.
// - dst_frame : Pointer to a destination frame.
// Return value: 0 if OK, < 0 otherwise.
// It is assumed that source and destination have equal height.
int ConvertFromI420(const VideoFrame& src_frame, int src_stride,
int ConvertFromI420(const I420VideoFrame& src_frame,
VideoType dst_video_type, int dst_sample_size,
uint8_t* dst_frame);
// ConvertFrom YV12.
// Interface - same as above.
int ConvertFromYV12(const uint8_t* src_frame, int src_stride,
int ConvertFromYV12(const I420VideoFrame& src_frame,
VideoType dst_video_type, int dst_sample_size,
int width, int height,
uint8_t* dst_frame);
// The following list describes designated conversion functions which
@ -133,17 +148,17 @@ int ConvertNV12ToRGB565(const uint8_t* src_frame,
// - dst_frame : Pointer to a destination frame.
// Return value: 0 if OK, < 0 otherwise.
// It is assumed that src and dst frames have equal dimensions.
int MirrorI420LeftRight(const VideoFrame* src_frame,
VideoFrame* dst_frame);
int MirrorI420UpDown(const VideoFrame* src_frame,
VideoFrame* dst_frame);
int MirrorI420LeftRight(const I420VideoFrame* src_frame,
I420VideoFrame* dst_frame);
int MirrorI420UpDown(const I420VideoFrame* src_frame,
I420VideoFrame* dst_frame);
// Compute PSNR for an I420 frame (all planes).
double I420PSNR(const VideoFrame* ref_frame,
const VideoFrame* test_frame);
double I420PSNR(const I420VideoFrame* ref_frame,
const I420VideoFrame* test_frame);
// Compute SSIM for an I420 frame (all planes).
double I420SSIM(const VideoFrame* ref_frame,
const VideoFrame* test_frame);
double I420SSIM(const I420VideoFrame* ref_frame,
const I420VideoFrame* test_frame);
// TODO(mikhal): Remove these functions and keep only the above functionality.
// Compute PSNR for an I420 buffer (all planes).

View File

@ -11,22 +11,26 @@
#include <math.h>
#include <string.h>
#include "common_video/interface/i420_video_frame.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "gtest/gtest.h"
#include "modules/interface/module_common_types.h" // VideoFrame
#include "system_wrappers/interface/tick_util.h"
#include "system_wrappers/interface/scoped_ptr.h"
#include "testsupport/fileutils.h"
namespace webrtc {
int PrintBuffer(const uint8_t* buffer, int width, int height) {
int PrintBuffer(const uint8_t* buffer, int width, int height, int stride) {
if (buffer == NULL)
return -1;
int k = 0;
int k;
const uint8_t* tmp_buffer = buffer;
for (int i = 0; i < height; i++) {
k = 0;
for (int j = 0; j < width; j++) {
printf("%d ", buffer[k++]);
printf("%d ", tmp_buffer[k++]);
}
tmp_buffer += stride;
printf(" \n");
}
printf(" \n");
@ -34,54 +38,40 @@ int PrintBuffer(const uint8_t* buffer, int width, int height) {
}
int PrintFrame(const VideoFrame* frame, const char* str) {
int PrintFrame(const I420VideoFrame* frame, const char* str) {
if (frame == NULL)
return -1;
printf("%s %dx%d \n", str, frame->Width(), frame->Height());
printf("%s %dx%d \n", str, frame->width(), frame->height());
int ret = 0;
int width = frame->Width();
int height = frame->Height();
ret += PrintBuffer(frame->Buffer(), width, height);
int half_width = (frame->Width() + 1) / 2;
int half_height = (frame->Height() + 1) / 2;
ret += PrintBuffer(frame->Buffer() + width * height, half_width, half_height);
ret += PrintBuffer(frame->Buffer() + width * height +
half_width * half_height, half_width, half_height);
for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) {
PlaneType plane_type = static_cast<PlaneType>(plane_num);
int width = (plane_num ? (frame->width() + 1) / 2 : frame->width());
int height = (plane_num ? (frame->height() + 1) / 2 : frame->height());
ret += PrintBuffer(frame->buffer(plane_type), width, height,
frame->stride(plane_type));
}
return ret;
}
// Create an image from on a YUV frame. Every plane value starts with a start
// value, and will be set to increasing values.
// plane_offset - prep for PlaneType.
void CreateImage(VideoFrame* frame, int plane_offset[3]) {
void CreateImage(I420VideoFrame* frame, int plane_offset[kNumOfPlanes]) {
if (frame == NULL)
return;
int width = frame->Width();
int height = frame->Height();
int half_width = (frame->Width() + 1) / 2;
int half_height = (frame->Height() + 1) / 2;
uint8_t *data = frame->Buffer();
// Y plane.
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
*data = static_cast<uint8_t>((i + plane_offset[0]) + j);
data++;
}
}
// U plane.
for (int i = 0; i < half_height; i++) {
for (int j = 0; j < half_width; j++) {
*data = static_cast<uint8_t>((i + plane_offset[1]) + j);
data++;
}
}
// V Plane.
for (int i = 0; i < half_height; i++) {
for (int j = 0; j < half_width; j++) {
*data = static_cast<uint8_t>((i + plane_offset[2]) + j);
data++;
for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) {
int width = (plane_num != kYPlane ? (frame->width() + 1) / 2 :
frame->width());
int height = (plane_num != kYPlane ? (frame->height() + 1) / 2 :
frame->height());
PlaneType plane_type = static_cast<PlaneType>(plane_num);
uint8_t *data = frame->buffer(plane_type);
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
data[j] = static_cast<uint8_t>(i + plane_offset[plane_num] + j);
}
data += frame->stride(plane_type);
}
}
}
@ -98,7 +88,6 @@ class TestLibYuv : public ::testing::Test {
const int frame_length_;
};
// TODO (mikhal): Use scoped_ptr when handling buffers.
TestLibYuv::TestLibYuv()
: source_file_(NULL),
width_(352),
@ -135,179 +124,162 @@ TEST_F(TestLibYuv, ConvertTest) {
double psnr = 0;
VideoFrame orig_frame;
orig_frame.VerifyAndAllocate(frame_length_);
orig_frame.SetWidth(width_);
orig_frame.SetHeight(height_);
EXPECT_GT(fread(orig_frame.Buffer(), 1, frame_length_, source_file_), 0U);
orig_frame.SetLength(frame_length_);
I420VideoFrame orig_frame;
scoped_array<uint8_t> orig_buffer(new uint8_t[frame_length_]);
EXPECT_EQ(fread(orig_buffer.get(), 1, frame_length_, source_file_),
static_cast<unsigned int>(frame_length_));
int size_y = width_ * height_;
int size_uv = ((width_ + 1 ) / 2) * ((height_ + 1) / 2);
orig_frame.CreateFrame(size_y, orig_buffer.get(),
size_uv, orig_buffer.get() + size_y,
size_uv, orig_buffer.get() + size_y + size_uv,
width_, height_,
width_, (width_ + 1) / 2, (width_ + 1) / 2);
printf("\nConvert #%d I420 <-> RGB24\n", j);
scoped_array<uint8_t> res_rgb_buffer2(new uint8_t[width_ * height_ * 3]);
I420VideoFrame res_i420_frame;
res_i420_frame.CreateEmptyFrame(width_, height_, width_,
(width_ + 1) / 2, (width_ + 1) / 2);
EXPECT_EQ(0, ConvertFromI420(orig_frame, kRGB24, 0, res_rgb_buffer2.get()));
// printf("\nConvert #%d I420 <-> RGB24\n", j);
uint8_t* res_rgb_buffer2 = new uint8_t[width_ * height_ * 3];
VideoFrame res_i420_frame;
res_i420_frame.VerifyAndAllocate(frame_length_);
res_i420_frame.SetHeight(height_);
res_i420_frame.SetWidth(width_);
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_, kRGB24, 0,
res_rgb_buffer2));
EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2.get(), 0, 0, width_,
height_, 0, kRotateNone, &res_i420_frame));
EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2, 0, 0, width_, height_,
0, kRotateNone, &res_i420_frame));
if (fwrite(res_i420_frame.Buffer(), 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
}
psnr = I420PSNR(&orig_frame, &res_i420_frame);
// Optimization Speed- quality trade-off => 45 dB only (platform dependant).
EXPECT_GT(ceil(psnr), 44);
j++;
delete [] res_rgb_buffer2;
// printf("\nConvert #%d I420 <-> UYVY\n", j);
uint8_t* out_uyvy_buffer = new uint8_t[width_ * height_ * 2];
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
kUYVY, 0, out_uyvy_buffer));
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer, 0, 0, width_, height_,
0, kRotateNone, &res_i420_frame));
printf("\nConvert #%d I420 <-> UYVY\n", j);
scoped_array<uint8_t> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, ConvertFromI420(orig_frame, kUYVY, 0, out_uyvy_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
height_, 0, kRotateNone, &res_i420_frame));
psnr = I420PSNR(&orig_frame, &res_i420_frame);
EXPECT_EQ(48.0, psnr);
if (fwrite(res_i420_frame.Buffer(), 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
}
j++;
delete [] out_uyvy_buffer;
// printf("\nConvert #%d I420 <-> I420 \n", j);
uint8_t* out_i420_buffer = new uint8_t[width_ * height_ * 3 / 2 ];
EXPECT_EQ(0, ConvertToI420(kI420, orig_frame.Buffer(), 0, 0, width_, height_,
printf("\nConvert #%d I420 <-> I420 \n", j);
scoped_array<uint8_t> out_i420_buffer(new uint8_t[width_ * height_ * 3 / 2]);
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer.get(), 0, 0, width_, height_,
0, kRotateNone, &res_i420_frame));
EXPECT_EQ(0, ConvertFromI420(res_i420_frame, width_, kI420, 0,
out_i420_buffer));
if (fwrite(res_i420_frame.Buffer(), 1, frame_length_,
EXPECT_EQ(0, ConvertFromI420(res_i420_frame, kI420, 0,
out_i420_buffer.get()));
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
}
psnr = I420PSNR(orig_buffer.get(), out_i420_buffer.get(), width_, height_);
EXPECT_EQ(48.0, psnr);
j++;
printf("\nConvert #%d I420 <-> YV12\n", j);
scoped_array<uint8_t> outYV120Buffer(new uint8_t[frame_length_]);
scoped_array<uint8_t> res_i420_buffer(new uint8_t[frame_length_]);
I420VideoFrame yv12_frame;
EXPECT_EQ(0, ConvertFromI420(orig_frame, kYV12, 0, outYV120Buffer.get()));
yv12_frame.CreateFrame(size_y, outYV120Buffer.get(),
size_uv, outYV120Buffer.get() + size_y,
size_uv, outYV120Buffer.get() + size_y + size_uv,
width_, height_,
width_, (width_ + 1) / 2, (width_ + 1) / 2);
EXPECT_EQ(0, ConvertFromYV12(yv12_frame, kI420, 0, res_i420_buffer.get()));
if (fwrite(res_i420_buffer.get(), 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
}
psnr = I420PSNR(orig_frame.Buffer(), out_i420_buffer, width_, height_);
psnr = I420PSNR(orig_buffer.get(), res_i420_buffer.get(), width_, height_);
EXPECT_EQ(48.0, psnr);
j++;
delete [] out_i420_buffer;
// printf("\nConvert #%d I420 <-> YV12\n", j);
uint8_t* outYV120Buffer = new uint8_t[frame_length_];
printf("\nConvert #%d I420 <-> YUY2\n", j);
scoped_array<uint8_t> out_yuy2_buffer(new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, ConvertFromI420(orig_frame, kYUY2, 0, out_yuy2_buffer.get()));
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_, kYV12, 0,
outYV120Buffer));
EXPECT_EQ(0, ConvertFromYV12(outYV120Buffer, width_,
kI420, 0,
width_, height_,
res_i420_frame.Buffer()));
if (fwrite(res_i420_frame.Buffer(), 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_,
height_, 0, kRotateNone, &res_i420_frame));
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
}
psnr = I420PSNR(&orig_frame, &res_i420_frame);
EXPECT_EQ(48.0, psnr);
printf("\nConvert #%d I420 <-> RGB565\n", j);
scoped_array<uint8_t> out_rgb565_buffer(new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, ConvertFromI420(orig_frame, kRGB565, 0,
out_rgb565_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer.get(), 0, 0, width_,
height_, 0, kRotateNone, &res_i420_frame));
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
}
j++;
delete [] outYV120Buffer;
// printf("\nConvert #%d I420 <-> YUY2\n", j);
uint8_t* out_yuy2_buffer = new uint8_t[width_ * height_ * 2];
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
kYUY2, 0, out_yuy2_buffer));
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer, 0, 0, width_, height_,
0, kRotateNone, &res_i420_frame));
if (fwrite(res_i420_frame.Buffer(), 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
}
psnr = I420PSNR(&orig_frame, &res_i420_frame);
EXPECT_EQ(48.0, psnr);
// printf("\nConvert #%d I420 <-> RGB565\n", j);
uint8_t* out_rgb565_buffer = new uint8_t[width_ * height_ * 2];
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
kRGB565, 0, out_rgb565_buffer));
EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer, 0, 0, width_, height_,
0, kRotateNone, &res_i420_frame));
if (fwrite(res_i420_frame.Buffer(), 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
}
psnr = I420PSNR(&orig_frame, &res_i420_frame);
// TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
// Another example is I420ToRGB24, the psnr is 44
EXPECT_GT(ceil(psnr), 40);
// printf("\nConvert #%d I420 <-> ARGB8888\n", j);
uint8_t* out_argb8888_buffer = new uint8_t[width_ * height_ * 4];
EXPECT_EQ(0, ConvertFromI420(orig_frame, width_,
kARGB, 0, out_argb8888_buffer));
printf("\nConvert #%d I420 <-> ARGB8888\n", j);
scoped_array<uint8_t> out_argb8888_buffer(new uint8_t[width_ * height_ * 4]);
EXPECT_EQ(0, ConvertFromI420(orig_frame, kARGB, 0,
out_argb8888_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer, 0, 0, width_, height_,
0, kRotateNone, &res_i420_frame));
EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer.get(), 0, 0, width_,
height_, 0, kRotateNone, &res_i420_frame));
if (fwrite(res_i420_frame.Buffer(), 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) {
return;
}
psnr = I420PSNR(&orig_frame, &res_i420_frame);
// TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888,
EXPECT_GT(ceil(psnr), 42);
ASSERT_EQ(0, fclose(output_file));
res_i420_frame.Free();
orig_frame.Free();
delete [] out_argb8888_buffer;
delete [] out_rgb565_buffer;
delete [] out_yuy2_buffer;
}
// TODO(holmer): Disabled for now due to crashes on Linux 32 bit. The theory
// is that it crashes due to the fact that the buffers are not 16 bit aligned.
// See http://code.google.com/p/webrtc/issues/detail?id=335 for more info.
TEST_F(TestLibYuv, DISABLED_MirrorTest) {
TEST_F(TestLibYuv, MirrorTest) {
// TODO (mikhal): Add an automated test to confirm output.
// TODO(mikhal): Update to new I420VideoFrame and align values. Until then,
// this test is disabled, only insuring build.
std::string str;
int width = 16;
int half_width = (width + 1) / 2;
int height = 8;
int length = webrtc::CalcBufferSize(kI420, width, height);
int half_height = (height + 1) / 2;
VideoFrame test_frame;
test_frame.VerifyAndAllocate(length);
test_frame.SetWidth(width);
test_frame.SetHeight(height);
memset(test_frame.Buffer(), 255, length);
I420VideoFrame test_frame;
test_frame.CreateEmptyFrame(width, height, width,
half_width, half_width);
memset(test_frame.buffer(kYPlane), 255, width * height);
memset(test_frame.buffer(kUPlane), 255, half_width * half_height);
memset(test_frame.buffer(kVPlane), 255, half_width * half_height);
// Create input frame.
VideoFrame in_frame, test_in_frame;
in_frame.VerifyAndAllocate(length);
in_frame.SetWidth(width);
in_frame.SetHeight(height);
in_frame.SetLength(length);
int plane_offset[3]; // prep for kNumPlanes.
plane_offset[0] = 10;
plane_offset[1] = 100;
plane_offset[2] = 200;
I420VideoFrame in_frame, test_in_frame;
in_frame.CreateEmptyFrame(width, height, width,
half_width ,half_width);
int plane_offset[kNumOfPlanes];
plane_offset[kYPlane] = 10;
plane_offset[kUPlane] = 100;
plane_offset[kVPlane] = 200;
CreateImage(&in_frame, plane_offset);
test_in_frame.CopyFrame(in_frame);
EXPECT_EQ(0, PrintFrame(&in_frame, "InputFrame"));
test_in_frame.CopyFrame(in_frame);
VideoFrame out_frame, test_out_frame;
out_frame.VerifyAndAllocate(length);
out_frame.SetWidth(width);
out_frame.SetHeight(height);
out_frame.SetLength(length);
I420VideoFrame out_frame, test_out_frame;
out_frame.CreateEmptyFrame(width, height, width,
half_width ,half_width);
CreateImage(&out_frame, plane_offset);
test_out_frame.CopyFrame(out_frame);
@ -317,22 +289,28 @@ TEST_F(TestLibYuv, DISABLED_MirrorTest) {
EXPECT_EQ(0, PrintFrame(&out_frame, "OutputFrame"));
EXPECT_EQ(0, MirrorI420LeftRight(&out_frame, &in_frame));
EXPECT_EQ(0, memcmp(in_frame.Buffer(), test_in_frame.Buffer(), length));
EXPECT_EQ(0, memcmp(in_frame.buffer(kYPlane),
test_in_frame.buffer(kYPlane), width * height));
EXPECT_EQ(0, memcmp(in_frame.buffer(kUPlane),
test_in_frame.buffer(kUPlane), half_width * half_height));
EXPECT_EQ(0, memcmp(in_frame.buffer(kVPlane),
test_in_frame.buffer(kVPlane), half_width * half_height));
// UpDown
std::cout << "Test Mirror function: UpDown" << std::endl;
EXPECT_EQ(0, MirrorI420UpDown(&in_frame, &out_frame));
EXPECT_EQ(0, PrintFrame(&test_out_frame, "OutputFrame"));
EXPECT_EQ(0, MirrorI420UpDown(&out_frame, &test_frame));
EXPECT_EQ(0, memcmp(in_frame.Buffer(), test_frame.Buffer(), length));
EXPECT_EQ(0, memcmp(in_frame.buffer(kYPlane),
test_in_frame.buffer(kYPlane), width * height));
EXPECT_EQ(0, memcmp(in_frame.buffer(kUPlane),
test_in_frame.buffer(kUPlane), half_width * half_height));
EXPECT_EQ(0, memcmp(in_frame.buffer(kVPlane),
test_in_frame.buffer(kVPlane), half_width * half_height));
// TODO(mikhal): Write to a file, and ask to look at the file.
std::cout << "Do the mirrored frames look correct?" << std::endl;
in_frame.Free();
test_in_frame.Free();
out_frame.Free();
test_out_frame.Free();
}
TEST_F(TestLibYuv, alignment) {

View File

@ -44,42 +44,34 @@ int Scaler::Set(int src_width, int src_height,
return 0;
}
int Scaler::Scale(const VideoFrame& src_frame,
VideoFrame* dst_frame) {
int Scaler::Scale(const I420VideoFrame& src_frame,
I420VideoFrame* dst_frame) {
assert(dst_frame);
if (src_frame.Buffer() == NULL || src_frame.Length() == 0)
if (src_frame.IsZeroSize())
return -1;
if (!set_)
return -2;
// Making sure that destination frame is of sufficient size.
int required_dst_size = CalcBufferSize(kI420, dst_width_, dst_height_);
dst_frame->VerifyAndAllocate(required_dst_size);
// Set destination length and dimensions.
dst_frame->SetLength(required_dst_size);
dst_frame->SetWidth(dst_width_);
dst_frame->SetHeight(dst_height_);
// Aligning stride values based on width.
int src_half_width = (src_width_ + 1) >> 1;
int src_half_height = (src_height_ + 1) >> 1;
int dst_half_width = (dst_width_ + 1) >> 1;
int dst_half_height = (dst_height_ + 1) >> 1;
// Converting to planes:
const uint8_t* src_yplane = src_frame.Buffer();
const uint8_t* src_uplane = src_yplane + src_width_ * src_height_;
const uint8_t* src_vplane = src_uplane + src_half_width * src_half_height;
dst_frame->CreateEmptyFrame(dst_width_, dst_height_,
dst_width_, (dst_width_ + 1) / 2,
(dst_width_ + 1) / 2);
uint8_t* dst_yplane = dst_frame->Buffer();
uint8_t* dst_uplane = dst_yplane + dst_width_ * dst_height_;
uint8_t* dst_vplane = dst_uplane + dst_half_width * dst_half_height;
return libyuv::I420Scale(src_yplane, src_width_,
src_uplane, src_half_width,
src_vplane, src_half_width,
return libyuv::I420Scale(src_frame.buffer(kYPlane),
src_frame.stride(kYPlane),
src_frame.buffer(kUPlane),
src_frame.stride(kUPlane),
src_frame.buffer(kVPlane),
src_frame.stride(kVPlane),
src_width_, src_height_,
dst_yplane, dst_width_,
dst_uplane, dst_half_width,
dst_vplane, dst_half_width,
dst_frame->buffer(kYPlane),
dst_frame->stride(kYPlane),
dst_frame->buffer(kUPlane),
dst_frame->stride(kUPlane),
dst_frame->buffer(kVPlane),
dst_frame->stride(kVPlane),
dst_width_, dst_height_,
libyuv::FilterMode(method_));
}

View File

@ -39,19 +39,25 @@ class TestScaler : public ::testing::Test {
Scaler test_scaler_;
FILE* source_file_;
VideoFrame test_frame_;
I420VideoFrame test_frame_;
const int width_;
const int half_width_;
const int height_;
const int half_height_;
const int size_y_;
const int size_uv_;
const int frame_length_;
};
// TODO (mikhal): Use scoped_ptr when handling buffers.
TestScaler::TestScaler()
: source_file_(NULL),
width_(352),
half_width_(width_ / 2),
height_(288),
frame_length_(CalcBufferSize(kI420, 352, 288)) {
half_height_(height_ / 2),
size_y_(width_ * height_),
size_uv_(half_width_ * half_height_),
frame_length_(CalcBufferSize(kI420, width_, height_)) {
}
void TestScaler::SetUp() {
@ -60,8 +66,8 @@ void TestScaler::SetUp() {
source_file_ = fopen(input_file_name.c_str(), "rb");
ASSERT_TRUE(source_file_ != NULL) << "Cannot read file: "<<
input_file_name << "\n";
test_frame_.VerifyAndAllocate(frame_length_);
test_frame_.SetLength(frame_length_);
test_frame_.CreateEmptyFrame(width_, height_,
width_, half_width_, half_width_);
}
void TestScaler::TearDown() {
@ -69,7 +75,6 @@ void TestScaler::TearDown() {
ASSERT_EQ(0, fclose(source_file_));
}
source_file_ = NULL;
test_frame_.Free();
}
TEST_F(TestScaler, ScaleWithoutSettingValues) {
@ -85,22 +90,30 @@ TEST_F(TestScaler, ScaleBadInitialValues) {
}
TEST_F(TestScaler, ScaleSendingNullSourcePointer) {
VideoFrame null_src_frame;
I420VideoFrame null_src_frame;
EXPECT_EQ(-1, test_scaler_.Scale(null_src_frame, &test_frame_));
}
TEST_F(TestScaler, ScaleSendingBufferTooSmall) {
// Sending a buffer which is too small (should reallocate and update size)
EXPECT_EQ(0, test_scaler_.Set(352, 288, 144, 288, kI420, kI420, kScalePoint));
VideoFrame test_frame2;
EXPECT_GT(fread(test_frame_.Buffer(), 1, frame_length_, source_file_), 0U);
EXPECT_EQ(0, test_scaler_.Set(width_, height_,
half_width_, half_height_,
kI420, kI420,
kScalePoint));
I420VideoFrame test_frame2;
scoped_array<uint8_t> orig_buffer(new uint8_t[frame_length_]);
EXPECT_GT(fread(orig_buffer.get(), 1, frame_length_, source_file_), 0U);
test_frame_.CreateFrame(size_y_, orig_buffer.get(),
size_uv_, orig_buffer.get() + size_y_,
size_uv_, orig_buffer.get() + size_y_ + size_uv_,
width_, height_,
width_, half_width_, half_width_);
EXPECT_EQ(0, test_scaler_.Scale(test_frame_, &test_frame2));
EXPECT_EQ(CalcBufferSize(kI420, 144, 288),
static_cast<int>(test_frame2.Size()));
EXPECT_EQ(144u, test_frame2.Width());
EXPECT_EQ(288u, test_frame2.Height());
EXPECT_EQ(CalcBufferSize(kI420, 144, 288),
static_cast<int>(test_frame2.Length()));
EXPECT_GT(width_ * height_, test_frame2.allocated_size(kYPlane));
EXPECT_GT(size_uv_, test_frame2.allocated_size(kUPlane));
EXPECT_GT(size_uv_, test_frame2.allocated_size(kVPlane));
EXPECT_EQ(half_width_, test_frame2.width());
EXPECT_EQ(half_height_, test_frame2.height());
}
//TODO (mikhal): Converge the test into one function that accepts the method.
@ -113,7 +126,7 @@ TEST_F(TestScaler, PointScaleTest) {
ScaleSequence(method,
source_file_, out_name,
width_, height_,
width_ / 2, height_ / 2);
half_width_, half_height_);
// Upsample back up and check PSNR.
source_file2 = fopen(out_name.c_str(), "rb");
out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_352_288_"
@ -422,31 +435,34 @@ void TestScaler::ScaleSequence(ScaleMethod method,
rewind(source_file);
int out_required_size = CalcBufferSize(kI420, dst_width, dst_height);
int in_required_size = CalcBufferSize(kI420, src_width, src_height);
VideoFrame input_frame, output_frame;
input_frame.VerifyAndAllocate(in_required_size);
input_frame.SetLength(in_required_size);
output_frame.VerifyAndAllocate(out_required_size);
output_frame.SetLength(out_required_size);
I420VideoFrame input_frame;
I420VideoFrame output_frame;
int64_t start_clock, total_clock;
total_clock = 0;
int frame_count = 0;
int src_required_size = CalcBufferSize(kI420, src_width, src_height);
scoped_array<uint8_t> frame_buffer(new uint8_t[src_required_size]);
int size_y = src_width * src_height;
int size_uv = ((src_width + 1) / 2) * ((src_height + 1) / 2);
// Running through entire sequence.
while (feof(source_file) == 0) {
if ((size_t)in_required_size !=
fread(input_frame.Buffer(), 1, in_required_size, source_file))
break;
if ((size_t)src_required_size !=
fread(frame_buffer.get(), 1, src_required_size, source_file))
break;
input_frame.CreateFrame(size_y, frame_buffer.get(),
size_uv, frame_buffer.get() + size_y,
size_uv, frame_buffer.get() + size_y + size_uv,
src_width, src_height,
src_width, (src_width + 1) / 2,
(src_width + 1) / 2);
start_clock = TickTime::MillisecondTimestamp();
EXPECT_EQ(0, test_scaler_.Scale(input_frame, &output_frame));
total_clock += TickTime::MillisecondTimestamp() - start_clock;
if (fwrite(output_frame.Buffer(), 1, output_frame.Size(),
output_file) != static_cast<unsigned int>(output_frame.Size())) {
return;
if (PrintI420VideoFrame(output_frame, output_file) < 0) {
return;
}
frame_count++;
}

View File

@ -11,6 +11,7 @@
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include <assert.h>
#include <string.h>
#include "libyuv.h"
@ -91,6 +92,57 @@ int CalcBufferSize(VideoType type, int width, int height) {
return buffer_size;
}
int PrintI420VideoFrame(const I420VideoFrame& frame, FILE* file) {
if (file == NULL)
return -1;
if (frame.IsZeroSize())
return -1;
for (int planeNum = 0; planeNum < kNumOfPlanes; ++planeNum) {
int width = (planeNum ? (frame.width() + 1) / 2 : frame.width());
int height = (planeNum ? (frame.height() + 1) / 2 : frame.height());
PlaneType plane_type = static_cast<PlaneType>(planeNum);
const uint8_t* plane_buffer = frame.buffer(plane_type);
for (int y = 0; y < height; y++) {
if (fwrite(plane_buffer, 1, width, file) !=
static_cast<unsigned int>(width)) {
return -1;
}
plane_buffer += frame.stride(plane_type);
}
}
return 0;
}
int ExtractBuffer(const I420VideoFrame& input_frame,
int size, uint8_t* buffer) {
assert(buffer);
if (input_frame.IsZeroSize())
return -1;
int length = CalcBufferSize(kI420, input_frame.width(), input_frame.height());
if (size < length) {
return -1;
}
int pos = 0;
uint8_t* buffer_ptr = buffer;
for (int plane = 0; plane < kNumOfPlanes; ++plane) {
int width = (plane ? (input_frame.width() + 1) / 2 :
input_frame.width());
int height = (plane ? (input_frame.height() + 1) / 2 :
input_frame.height());
const uint8_t* plane_ptr = input_frame.buffer(
static_cast<PlaneType>(plane));
for (int y = 0; y < height; y++) {
memcpy(&buffer_ptr[pos], plane_ptr, width);
pos += width;
plane_ptr += input_frame.stride(static_cast<PlaneType>(plane));
}
}
return length;
}
int ConvertNV12ToRGB565(const uint8_t* src_frame,
uint8_t* dst_frame,
int width, int height) {
@ -172,179 +224,148 @@ int ConvertToI420(VideoType src_video_type,
int src_width, int src_height,
int sample_size,
VideoRotationMode rotation,
VideoFrame* dst_frame) {
// All sanity tests are conducted within LibYuv.
int dst_height = dst_frame->Height();
int dst_width = dst_frame->Width();
// TODO(mikhal): When available, use actual stride value.
int dst_stride = dst_frame->Width();
int half_dst_width = (dst_width + 1) >> 1;
int half_dst_height = (dst_height + 1) >> 1;
uint8_t* dst_yplane = dst_frame->Buffer();
uint8_t* dst_uplane = dst_yplane + dst_width * dst_height;
uint8_t* dst_vplane = dst_uplane + half_dst_width * half_dst_height;
I420VideoFrame* dst_frame) {
return libyuv::ConvertToI420(src_frame, sample_size,
dst_yplane, dst_stride,
dst_uplane, (dst_stride + 1) / 2,
dst_vplane, (dst_stride + 1) / 2,
dst_frame->buffer(kYPlane),
dst_frame->stride(kYPlane),
dst_frame->buffer(kUPlane),
dst_frame->stride(kUPlane),
dst_frame->buffer(kVPlane),
dst_frame->stride(kVPlane),
crop_x, crop_y,
src_width, src_height,
dst_width, dst_height,
dst_frame->width(), dst_frame->height(),
ConvertRotationMode(rotation),
ConvertVideoType(src_video_type));
}
int ConvertFromI420(const VideoFrame& src_frame, int src_stride,
int ConvertFromI420(const I420VideoFrame& src_frame,
VideoType dst_video_type, int dst_sample_size,
uint8_t* dst_frame) {
int height = src_frame.Height();
int width = src_frame.Width();
int abs_height = (height < 0) ? -height : height;
int half_width = (width + 1) >> 1;
int half_height = (abs_height + 1) >> 1;
const uint8_t* src_yplane = src_frame.Buffer();
const uint8_t* src_uplane = src_yplane + width * abs_height;
const uint8_t* src_vplane = src_uplane + half_width * half_height;
return libyuv::ConvertFromI420(src_yplane, src_stride,
src_uplane, (src_stride + 1) / 2,
src_vplane, (src_stride + 1) / 2,
return libyuv::ConvertFromI420(src_frame.buffer(kYPlane),
src_frame.stride(kYPlane),
src_frame.buffer(kUPlane),
src_frame.stride(kUPlane),
src_frame.buffer(kVPlane),
src_frame.stride(kVPlane),
dst_frame, dst_sample_size,
width, height,
src_frame.width(), src_frame.height(),
ConvertVideoType(dst_video_type));
}
int ConvertFromYV12(const uint8_t* src_frame, int src_stride,
// TODO(mikhal): Create a designated VideoFrame for non I420.
int ConvertFromYV12(const I420VideoFrame& src_frame,
VideoType dst_video_type, int dst_sample_size,
int width, int height,
uint8_t* dst_frame) {
int half_src_stride = (src_stride + 1) >> 1;
int abs_height = (height < 0) ? -height : height;
int half_height = (abs_height + 1) >> 1;
const uint8_t* src_yplane = src_frame;
const uint8_t* src_uplane = src_yplane + width * abs_height;
const uint8_t* src_vplane = src_uplane + half_src_stride * half_height;
// YV12 = Y, V, U
return libyuv::ConvertFromI420(src_yplane, src_stride,
src_vplane, half_src_stride,
src_uplane, half_src_stride,
return libyuv::ConvertFromI420(src_frame.buffer(kYPlane),
src_frame.stride(kYPlane),
src_frame.buffer(kVPlane),
src_frame.stride(kVPlane),
src_frame.buffer(kUPlane),
src_frame.stride(kUPlane),
dst_frame, dst_sample_size,
width, height,
src_frame.width(), src_frame.height(),
ConvertVideoType(dst_video_type));
}
int MirrorI420LeftRight(const VideoFrame* src_frame,
VideoFrame* dst_frame) {
int MirrorI420LeftRight(const I420VideoFrame* src_frame,
I420VideoFrame* dst_frame) {
// Source and destination frames should have equal resolution.
if (src_frame->Width() != dst_frame->Width() ||
src_frame->Height() != dst_frame->Height())
if (src_frame->width() != dst_frame->width() ||
src_frame->height() != dst_frame->height())
return -1;
int width = src_frame->Width();
int height = src_frame->Height();
int half_width = (width + 1) >> 1;
int half_height = (height + 1) >> 1;
const uint8_t* src_yplane = src_frame->Buffer();
const uint8_t* src_uplane = src_yplane + width * height;
const uint8_t* src_vplane = src_uplane + half_width * half_height;
uint8_t* dst_yplane = dst_frame->Buffer();
uint8_t* dst_uplane = dst_yplane + width * height;
uint8_t* dst_vplane = dst_uplane + half_width * half_height;
return libyuv::I420Mirror(src_yplane, width,
src_uplane, half_width,
src_vplane, half_width,
dst_yplane, width,
dst_uplane, half_width,
dst_vplane, half_width,
width, height);
return libyuv::I420Mirror(src_frame->buffer(kYPlane),
src_frame->stride(kYPlane),
src_frame->buffer(kUPlane),
src_frame->stride(kUPlane),
src_frame->buffer(kVPlane),
src_frame->stride(kVPlane),
dst_frame->buffer(kYPlane),
dst_frame->stride(kYPlane),
dst_frame->buffer(kUPlane),
dst_frame->stride(kUPlane),
dst_frame->buffer(kVPlane),
dst_frame->stride(kVPlane),
src_frame->width(), src_frame->height());
}
int MirrorI420UpDown(const VideoFrame* src_frame,
VideoFrame* dst_frame) {
int MirrorI420UpDown(const I420VideoFrame* src_frame,
I420VideoFrame* dst_frame) {
// Source and destination frames should have equal resolution
if (src_frame->Width() != dst_frame->Width() ||
src_frame->Height() != dst_frame->Height())
if (src_frame->width() != dst_frame->width() ||
src_frame->height() != dst_frame->height())
return -1;
int width = src_frame->Width();
int height = src_frame->Height();
int half_width = (width + 1) >> 1;
int half_height = (height + 1) >> 1;
const uint8_t* src_yplane = src_frame->Buffer();
const uint8_t* src_uplane = src_yplane + width * height;
const uint8_t* src_vplane = src_uplane + half_width * half_height;
uint8_t* dst_yplane = dst_frame->Buffer();
uint8_t* dst_uplane = dst_yplane + width * height;
uint8_t* dst_vplane = dst_uplane + half_width * half_height;
// Inserting negative height flips the frame.
return libyuv::I420Copy(src_yplane, width,
src_uplane, half_width,
src_vplane, half_width,
dst_yplane, width,
dst_uplane, half_width,
dst_vplane, half_width,
width, -height);
return libyuv::I420Copy(src_frame->buffer(kYPlane),
src_frame->stride(kYPlane),
src_frame->buffer(kUPlane),
src_frame->stride(kUPlane),
src_frame->buffer(kVPlane),
src_frame->stride(kVPlane),
dst_frame->buffer(kYPlane),
dst_frame->stride(kYPlane),
dst_frame->buffer(kUPlane),
dst_frame->stride(kUPlane),
dst_frame->buffer(kVPlane),
dst_frame->stride(kVPlane),
src_frame->width(), -(src_frame->height()));
}
// Compute PSNR for an I420 frame (all planes)
double I420PSNR(const VideoFrame* ref_frame,
const VideoFrame* test_frame) {
double I420PSNR(const I420VideoFrame* ref_frame,
const I420VideoFrame* test_frame) {
if (!ref_frame || !test_frame)
return -1;
else if ((ref_frame->Width() != test_frame->Width()) ||
(ref_frame->Height() != test_frame->Height()))
else if ((ref_frame->width() != test_frame->width()) ||
(ref_frame->height() != test_frame->height()))
return -1;
else if (ref_frame->Width() == 0u || ref_frame->Height() == 0u)
else if (ref_frame->width() < 0 || ref_frame->height() < 0)
return -1;
int height = ref_frame->Height() ;
int width = ref_frame->Width();
int half_width = (width + 1) >> 1;
int half_height = (height + 1) >> 1;
const uint8_t* src_y_a = ref_frame->Buffer();
const uint8_t* src_u_a = src_y_a + width * height;
const uint8_t* src_v_a = src_u_a + half_width * half_height;
const uint8_t* src_y_b = test_frame->Buffer();
const uint8_t* src_u_b = src_y_b + width * height;
const uint8_t* src_v_b = src_u_b + half_width * half_height;
// In the following: stride is determined by width.
double psnr = libyuv::I420Psnr(src_y_a, width,
src_u_a, half_width,
src_v_a, half_width,
src_y_b, width,
src_u_b, half_width,
src_v_b, half_width,
width, height);
double psnr = libyuv::I420Psnr(ref_frame->buffer(kYPlane),
ref_frame->stride(kYPlane),
ref_frame->buffer(kUPlane),
ref_frame->stride(kUPlane),
ref_frame->buffer(kVPlane),
ref_frame->stride(kVPlane),
test_frame->buffer(kYPlane),
test_frame->stride(kYPlane),
test_frame->buffer(kUPlane),
test_frame->stride(kUPlane),
test_frame->buffer(kVPlane),
test_frame->stride(kVPlane),
test_frame->width(), test_frame->height());
// LibYuv sets the max psnr value to 128, we restrict it to 48.
// In case of 0 mse in one frame, 128 can skew the results significantly.
return (psnr > 48.0) ? 48.0 : psnr;
}
// Compute SSIM for an I420 frame (all planes)
double I420SSIM(const VideoFrame* ref_frame,
const VideoFrame* test_frame) {
double I420SSIM(const I420VideoFrame* ref_frame,
const I420VideoFrame* test_frame) {
if (!ref_frame || !test_frame)
return -1;
else if ((ref_frame->Width() != test_frame->Width()) ||
(ref_frame->Height() != test_frame->Height()))
else if ((ref_frame->width() != test_frame->width()) ||
(ref_frame->height() != test_frame->height()))
return -1;
else if (ref_frame->Width() == 0u || ref_frame->Height() == 0u)
else if (ref_frame->width() < 0 || ref_frame->height() < 0)
return -1;
int height = ref_frame->Height() ;
int width = ref_frame->Width();
int half_width = (width + 1) >> 1;
int half_height = (height + 1) >> 1;
const uint8_t* src_y_a = ref_frame->Buffer();
const uint8_t* src_u_a = src_y_a + width * height;
const uint8_t* src_v_a = src_u_a + half_width * half_height;
const uint8_t* src_y_b = test_frame->Buffer();
const uint8_t* src_u_b = src_y_b + width * height;
const uint8_t* src_v_b = src_u_b + half_width * half_height;
int stride_y = width;
int stride_uv = half_width;
return libyuv::I420Ssim(src_y_a, stride_y,
src_u_a, stride_uv,
src_v_a, stride_uv,
src_y_b, stride_y,
src_u_b, stride_uv,
src_v_b, stride_uv,
width, height);
return libyuv::I420Ssim(ref_frame->buffer(kYPlane),
ref_frame->stride(kYPlane),
ref_frame->buffer(kUPlane),
ref_frame->stride(kUPlane),
ref_frame->buffer(kVPlane),
ref_frame->stride(kVPlane),
test_frame->buffer(kYPlane),
test_frame->stride(kYPlane),
test_frame->buffer(kUPlane),
test_frame->stride(kUPlane),
test_frame->buffer(kVPlane),
test_frame->stride(kVPlane),
test_frame->width(), test_frame->height());
}
// Compute PSNR for an I420 frame (all planes)

View File

@ -30,8 +30,9 @@ int Plane::CreateEmptyPlane(int allocated_size, int stride, int plane_size) {
if (allocated_size < 1 || stride < 1 || plane_size < 1)
return -1;
stride_ = stride;
if (MaybeResize(allocated_size) < 0)
return -1;
plane_size_ = plane_size;
MaybeResize(allocated_size);
return 0;
}

View File

@ -44,10 +44,10 @@ class Plane {
int allocated_size() const {return allocated_size_;}
// Set actual size.
void ResetSize() {plane_size_ = 0;};
void ResetSize() {plane_size_ = 0;}
// Return true is plane size is zero, false if not.
bool IsZeroSize() {return plane_size_ == 0;};
bool IsZeroSize() const {return plane_size_ == 0;}
// Get stride value.
int stride() const {return stride_;}

View File

@ -12,6 +12,7 @@
#define WEBRTC_MODULES_UTILITY_INTERFACE_FILE_PLAYER_H_
#include "common_types.h"
#include "common_video/interface/i420_video_frame.h"
#include "engine_configurations.h"
#include "module_common_types.h"
#include "typedefs.h"
@ -93,12 +94,12 @@ public:
virtual WebRtc_Word32 video_codec_info(VideoCodec& /*videoCodec*/) const
{return -1;}
virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& /*videoFrame*/)
virtual WebRtc_Word32 GetVideoFromFile(I420VideoFrame& /*videoFrame*/)
{ return -1;}
// Same as GetVideoFromFile(). videoFrame will have the resolution specified
// by the width outWidth and height outHeight in pixels.
virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& /*videoFrame*/,
virtual WebRtc_Word32 GetVideoFromFile(I420VideoFrame& /*videoFrame*/,
const WebRtc_UWord32 /*outWidth*/,
const WebRtc_UWord32 /*outHeight*/)
{return -1;}

View File

@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_FILE_RECORDER_H_
#define WEBRTC_MODULES_UTILITY_INTERFACE_FILE_RECORDER_H_
#include "common_video/interface/i420_video_frame.h"
#include "common_types.h"
#include "engine_configurations.h"
#include "modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
@ -78,7 +79,8 @@ public:
bool videoOnly = false) = 0;
// Record the video frame in videoFrame to AVI file.
virtual WebRtc_Word32 RecordVideoToFile(const VideoFrame& videoFrame) = 0;
virtual WebRtc_Word32 RecordVideoToFile(
const I420VideoFrame& videoFrame) = 0;
protected:
virtual ~FileRecorder() {}

View File

@ -536,7 +536,7 @@ WebRtc_Word32 VideoFilePlayerImpl::StopPlayingFile()
return FilePlayerImpl::StopPlayingFile();
}
WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame,
WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(I420VideoFrame& videoFrame,
WebRtc_UWord32 outWidth,
WebRtc_UWord32 outHeight)
{
@ -547,7 +547,7 @@ WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame,
{
return retVal;
}
if( videoFrame.Length() > 0)
if (!videoFrame.IsZeroSize())
{
retVal = _frameScaler.ResizeFrameIfNeeded(&videoFrame, outWidth,
outHeight);
@ -555,22 +555,32 @@ WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame,
return retVal;
}
WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame)
WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(I420VideoFrame& videoFrame)
{
CriticalSectionScoped lock( _critSec);
// No new video data read from file.
if(_encodedData.payloadSize == 0)
{
videoFrame.SetLength(0);
videoFrame.ResetSize();
return -1;
}
WebRtc_Word32 retVal = 0;
if(strncmp(video_codec_info_.plName, "I420", 5) == 0)
{
videoFrame.CopyFrame(_encodedData.payloadSize,_encodedData.payloadData);
videoFrame.SetLength(_encodedData.payloadSize);
videoFrame.SetWidth(video_codec_info_.width);
videoFrame.SetHeight(video_codec_info_.height);
int size_y = video_codec_info_.width * video_codec_info_.height;
int half_width = (video_codec_info_.width + 1) / 2;
int half_height = (video_codec_info_.height + 1) / 2;
int size_uv = half_width * half_height;
// TODO(mikhal): Do we need to align the stride here?
const uint8_t* buffer_y = _encodedData.payloadData;
const uint8_t* buffer_u = buffer_y + size_y;
const uint8_t* buffer_v = buffer_u + size_uv;
videoFrame.CreateFrame(size_y, buffer_y,
size_uv, buffer_u,
size_uv, buffer_v,
video_codec_info_.width, video_codec_info_.height,
video_codec_info_.height, half_width, half_width);
}else
{
// Set the timestamp manually since there is no timestamp in the file.
@ -580,7 +590,7 @@ WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame)
}
WebRtc_Word64 renderTimeMs = TickTime::MillisecondTimestamp();
videoFrame.SetRenderTime(renderTimeMs);
videoFrame.set_render_time_ms(renderTimeMs);
// Indicate that the current frame in the encoded buffer is old/has
// already been read.

View File

@ -93,8 +93,8 @@ public:
bool videoOnly);
virtual WebRtc_Word32 StopPlayingFile();
virtual WebRtc_Word32 video_codec_info(VideoCodec& videoCodec) const;
virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& videoFrame);
virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& videoFrame,
virtual WebRtc_Word32 GetVideoFromFile(I420VideoFrame& videoFrame);
virtual WebRtc_Word32 GetVideoFromFile(I420VideoFrame& videoFrame,
const WebRtc_UWord32 outWidth,
const WebRtc_UWord32 outHeight);

View File

@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "engine_configurations.h"
#include "file_recorder_impl.h"
#include "media_file.h"
@ -481,11 +482,10 @@ WebRtc_Word32 AviRecorder::SetUpVideoEncoder()
return 0;
}
WebRtc_Word32 AviRecorder::RecordVideoToFile(const VideoFrame& videoFrame)
WebRtc_Word32 AviRecorder::RecordVideoToFile(const I420VideoFrame& videoFrame)
{
CriticalSectionScoped lock(_critSec);
if(!IsRecording() || ( videoFrame.Length() == 0))
if(!IsRecording() || !videoFrame.IsZeroSize())
{
return -1;
}
@ -548,7 +548,7 @@ WebRtc_Word32 AviRecorder::ProcessAudio()
// Get the most recent frame that is due for writing to file. Since
// frames are unencoded it's safe to throw away frames if necessary
// for synchronizing audio and video.
VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord();
I420VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord();
if(frameToProcess)
{
// Syncronize audio to the current frame to process by throwing away
@ -563,7 +563,7 @@ WebRtc_Word32 AviRecorder::ProcessAudio()
{
if(TickTime::TicksToMilliseconds(
frameInfo->_playoutTS.Ticks()) <
frameToProcess->RenderTimeMs())
frameToProcess->render_time_ms())
{
delete frameInfo;
_audioFramesToWrite.PopFront();
@ -622,7 +622,7 @@ bool AviRecorder::Process()
// Get the most recent frame to write to file (if any). Synchronize it with
// the audio stream (if any). Synchronization the video based on its render
// timestamp (i.e. VideoFrame::RenderTimeMS())
VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord();
I420VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord();
if( frameToProcess == NULL)
{
return true;
@ -692,9 +692,9 @@ bool AviRecorder::Process()
return error == 0;
}
WebRtc_Word32 AviRecorder::EncodeAndWriteVideoToFile(VideoFrame& videoFrame)
WebRtc_Word32 AviRecorder::EncodeAndWriteVideoToFile(I420VideoFrame& videoFrame)
{
if(!IsRecording() || (videoFrame.Length() == 0))
if (!IsRecording() || videoFrame.IsZeroSize())
{
return -1;
}
@ -709,14 +709,18 @@ WebRtc_Word32 AviRecorder::EncodeAndWriteVideoToFile(VideoFrame& videoFrame)
if( STR_CASE_CMP(_videoCodecInst.plName, "I420") == 0)
{
_videoEncodedData.VerifyAndAllocate(videoFrame.Length());
int length = CalcBufferSize(kI420, videoFrame.width(),
videoFrame.height());
_videoEncodedData.VerifyAndAllocate(length);
// I420 is raw data. No encoding needed (each sample is represented by
// 1 byte so there is no difference depending on endianness).
memcpy(_videoEncodedData.payloadData, videoFrame.Buffer(),
videoFrame.Length());
int ret_length = ExtractBuffer(videoFrame, length,
_videoEncodedData.payloadData);
if (ret_length < 0)
return -1;
_videoEncodedData.payloadSize = videoFrame.Length();
_videoEncodedData.payloadSize = ret_length;
_videoEncodedData.frameType = kVideoFrameKey;
}else {
if( _videoEncoder->Encode(videoFrame, _videoEncodedData) != 0)

View File

@ -74,7 +74,7 @@ public:
{
return -1;
}
virtual WebRtc_Word32 RecordVideoToFile(const VideoFrame& videoFrame)
virtual WebRtc_Word32 RecordVideoToFile(const I420VideoFrame& videoFrame)
{
return -1;
}
@ -117,7 +117,7 @@ public:
ACMAMRPackingFormat amrFormat = AMRFileStorage,
bool videoOnly = false);
virtual WebRtc_Word32 StopRecording();
virtual WebRtc_Word32 RecordVideoToFile(const VideoFrame& videoFrame);
virtual WebRtc_Word32 RecordVideoToFile(const I420VideoFrame& videoFrame);
protected:
virtual WebRtc_Word32 WriteEncodedAudioData(
@ -132,7 +132,7 @@ private:
bool StartThread();
bool StopThread();
WebRtc_Word32 EncodeAndWriteVideoToFile(VideoFrame& videoFrame);
WebRtc_Word32 EncodeAndWriteVideoToFile(I420VideoFrame& videoFrame);
WebRtc_Word32 ProcessAudio();
WebRtc_Word32 CalcI420FrameSize() const;

View File

@ -23,26 +23,26 @@ FrameScaler::FrameScaler()
FrameScaler::~FrameScaler() {}
int FrameScaler::ResizeFrameIfNeeded(VideoFrame* video_frame,
WebRtc_UWord32 out_width,
WebRtc_UWord32 out_height) {
if (video_frame->Length() == 0) {
int FrameScaler::ResizeFrameIfNeeded(I420VideoFrame* video_frame,
int out_width,
int out_height) {
if (video_frame->IsZeroSize()) {
return -1;
}
if ((video_frame->Width() != out_width) ||
(video_frame->Height() != out_height)) {
if ((video_frame->width() != out_width) ||
(video_frame->height() != out_height)) {
// Set correct scale settings and scale |video_frame| into |scaled_frame_|.
scaler_->Set(video_frame->Width(), video_frame->Height(), out_width,
scaler_->Set(video_frame->width(), video_frame->height(), out_width,
out_height, kI420, kI420, kScaleBox);
int ret = scaler_->Scale(*video_frame, &scaled_frame_);
if (ret < 0) {
return ret;
}
scaled_frame_.SetRenderTime(video_frame->RenderTimeMs());
scaled_frame_.SetTimeStamp(video_frame->TimeStamp());
video_frame->SwapFrame(scaled_frame_);
scaled_frame_.set_render_time_ms(video_frame->render_time_ms());
scaled_frame_.set_timestamp(video_frame->timestamp());
video_frame->SwapFrame(&scaled_frame_);
}
return 0;
}

View File

@ -15,6 +15,7 @@
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
#include "common_video/interface/i420_video_frame.h"
#include "engine_configurations.h"
#include "modules/interface/module_common_types.h"
#include "system_wrappers/interface/scoped_ptr.h"
@ -31,13 +32,13 @@ class FrameScaler {
// Re-sizes |video_frame| so that it has the width |out_width| and height
// |out_height|.
int ResizeFrameIfNeeded(VideoFrame* video_frame,
WebRtc_UWord32 out_width,
WebRtc_UWord32 out_height);
int ResizeFrameIfNeeded(I420VideoFrame* video_frame,
int out_width,
int out_height);
private:
scoped_ptr<Scaler> scaler_;
VideoFrame scaled_frame_;
I420VideoFrame scaled_frame_;
};
} // namespace webrtc

View File

@ -74,10 +74,10 @@ WebRtc_Word32 VideoCoder::SetDecodeCodec(VideoCodec& videoCodecInst,
return 0;
}
WebRtc_Word32 VideoCoder::Decode(VideoFrame& decodedVideo,
WebRtc_Word32 VideoCoder::Decode(I420VideoFrame& decodedVideo,
const EncodedVideoData& encodedData)
{
decodedVideo.SetLength(0);
decodedVideo.ResetSize();
if(encodedData.payloadSize <= 0)
{
return -1;
@ -92,7 +92,7 @@ WebRtc_Word32 VideoCoder::Decode(VideoFrame& decodedVideo,
}
WebRtc_Word32 VideoCoder::Encode(const VideoFrame& videoFrame,
WebRtc_Word32 VideoCoder::Encode(const I420VideoFrame& videoFrame,
EncodedVideoData& videoEncodedData)
{
// The AddVideoFrame(..) call will (indirectly) call SendData(). Store a
@ -121,7 +121,7 @@ WebRtc_Word8 VideoCoder::DefaultPayloadType(const char* plName)
return -1;
}
WebRtc_Word32 VideoCoder::FrameToRender(VideoFrame& videoFrame)
WebRtc_Word32 VideoCoder::FrameToRender(I420VideoFrame& videoFrame)
{
return _decodedVideo->CopyFrame(videoFrame);
}

View File

@ -35,10 +35,10 @@ public:
WebRtc_Word32 SetDecodeCodec(VideoCodec& videoCodecInst,
WebRtc_Word32 numberOfCores);
WebRtc_Word32 Decode(VideoFrame& decodedVideo,
WebRtc_Word32 Decode(I420VideoFrame& decodedVideo,
const EncodedVideoData& encodedData);
WebRtc_Word32 Encode(const VideoFrame& videoFrame,
WebRtc_Word32 Encode(const I420VideoFrame& videoFrame,
EncodedVideoData& videoEncodedData);
WebRtc_Word8 DefaultPayloadType(const char* plName);
@ -46,7 +46,7 @@ public:
private:
// VCMReceiveCallback function.
// Note: called by VideoCodingModule when decoding finished.
WebRtc_Word32 FrameToRender(VideoFrame& videoFrame);
WebRtc_Word32 FrameToRender(I420VideoFrame& videoFrame);
// VCMPacketizationCallback function.
// Note: called by VideoCodingModule when encoding finished.
@ -61,7 +61,7 @@ private:
const RTPVideoHeader* rtpTypeHdr);
VideoCodingModule* _vcm;
VideoFrame* _decodedVideo;
I420VideoFrame* _decodedVideo;
EncodedVideoData* _videoEncodedData;
};
} // namespace webrtc

View File

@ -32,9 +32,9 @@ VideoFramesQueue::~VideoFramesQueue()
ListItem* item = _incomingFrames.First();
if (item)
{
VideoFrame* ptrFrame = static_cast<VideoFrame*>(item->GetItem());
I420VideoFrame* ptrFrame =
static_cast<I420VideoFrame*>(item->GetItem());
assert(ptrFrame != NULL);
ptrFrame->Free();
delete ptrFrame;
}
_incomingFrames.Erase(item);
@ -42,27 +42,20 @@ VideoFramesQueue::~VideoFramesQueue()
while (!_emptyFrames.Empty())
{
ListItem* item = _emptyFrames.First();
if (item)
{
VideoFrame* ptrFrame = static_cast<VideoFrame*>(item->GetItem());
assert(ptrFrame != NULL);
ptrFrame->Free();
delete ptrFrame;
}
_emptyFrames.Erase(item);
}
}
WebRtc_Word32 VideoFramesQueue::AddFrame(const VideoFrame& newFrame)
WebRtc_Word32 VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame)
{
VideoFrame* ptrFrameToAdd = NULL;
I420VideoFrame* ptrFrameToAdd = NULL;
// Try to re-use a VideoFrame. Only allocate new memory if it is necessary.
if (!_emptyFrames.Empty())
{
ListItem* item = _emptyFrames.First();
if (item)
{
ptrFrameToAdd = static_cast<VideoFrame*>(item->GetItem());
ptrFrameToAdd = static_cast<I420VideoFrame*>(item->GetItem());
_emptyFrames.Erase(item);
}
}
@ -81,7 +74,7 @@ WebRtc_Word32 VideoFramesQueue::AddFrame(const VideoFrame& newFrame)
"%s: allocating buffer %d", __FUNCTION__,
_emptyFrames.GetSize() + _incomingFrames.GetSize());
ptrFrameToAdd = new VideoFrame();
ptrFrameToAdd = new I420VideoFrame();
if (!ptrFrameToAdd)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
@ -98,15 +91,15 @@ WebRtc_Word32 VideoFramesQueue::AddFrame(const VideoFrame& newFrame)
// lower than current time in ms (TickTime::MillisecondTimestamp()).
// Note _incomingFrames is sorted so that the oldest frame is first.
// Recycle all frames that are older than the most recent frame.
VideoFrame* VideoFramesQueue::FrameToRecord()
I420VideoFrame* VideoFramesQueue::FrameToRecord()
{
VideoFrame* ptrRenderFrame = NULL;
I420VideoFrame* ptrRenderFrame = NULL;
ListItem* item = _incomingFrames.First();
while(item)
{
VideoFrame* ptrOldestFrameInList =
static_cast<VideoFrame*>(item->GetItem());
if (ptrOldestFrameInList->RenderTimeMs() <=
I420VideoFrame* ptrOldestFrameInList =
static_cast<I420VideoFrame*>(item->GetItem());
if (ptrOldestFrameInList->render_time_ms() <=
TickTime::MillisecondTimestamp() + _renderDelayMs)
{
if (ptrRenderFrame)
@ -129,13 +122,13 @@ VideoFrame* VideoFramesQueue::FrameToRecord()
return ptrRenderFrame;
}
WebRtc_Word32 VideoFramesQueue::ReturnFrame(VideoFrame* ptrOldFrame)
WebRtc_Word32 VideoFramesQueue::ReturnFrame(I420VideoFrame* ptrOldFrame)
{
ptrOldFrame->SetTimeStamp(0);
ptrOldFrame->SetWidth(0);
ptrOldFrame->SetHeight(0);
ptrOldFrame->SetRenderTime(0);
ptrOldFrame->SetLength(0);
ptrOldFrame->set_timestamp(0);
ptrOldFrame->set_width(0);
ptrOldFrame->set_height(0);
ptrOldFrame->set_render_time_ms(0);
ptrOldFrame->ResetSize();
_emptyFrames.PushBack(ptrOldFrame);
return 0;
}

View File

@ -13,12 +13,12 @@
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
#include "common_video/interface/i420_video_frame.h"
#include "engine_configurations.h"
#include "list_wrapper.h"
#include "typedefs.h"
namespace webrtc {
class VideoFrame;
class VideoFramesQueue
{
@ -27,12 +27,12 @@ public:
~VideoFramesQueue();
// Put newFrame (last) in the queue.
WebRtc_Word32 AddFrame(const VideoFrame& newFrame);
WebRtc_Word32 AddFrame(const I420VideoFrame& newFrame);
// Return the most current frame. I.e. the frame with the highest
// VideoFrame::RenderTimeMs() that is lower than
// TickTime::MillisecondTimestamp().
VideoFrame* FrameToRecord();
I420VideoFrame* FrameToRecord();
// Set the render delay estimate to renderDelay ms.
WebRtc_Word32 SetRenderDelay(WebRtc_UWord32 renderDelay);
@ -40,7 +40,7 @@ public:
protected:
// Make ptrOldFrame available for re-use. I.e. put it in the empty frames
// queue.
WebRtc_Word32 ReturnFrame(VideoFrame* ptrOldFrame);
WebRtc_Word32 ReturnFrame(I420VideoFrame* ptrOldFrame);
private:
// Don't allow the buffer to expand beyond KMaxNumberOfFrames VideoFrames.

View File

@ -122,10 +122,11 @@ int main(int /*argc*/, char** /*argv*/)
assert(fileRecorder.IsRecording());
WebRtc_UWord32 videoReadSize = static_cast<WebRtc_UWord32>( (videoCodec.width * videoCodec.height * 3.0) / 2.0);
webrtc::VideoFrame videoFrame;
videoFrame.VerifyAndAllocate(videoReadSize);
webrtc::I420VideoFrame videoFrame;
videoFrame.CreateEmptyFrame(videoCodec.width, videoCodec.height,
videoCodec.width,
(videoCodec.width + 1) / 2,
(videoCodec.width + 1) / 2);
int frameCount = 0;
bool audioNotDone = true;
@ -142,7 +143,7 @@ int main(int /*argc*/, char** /*argv*/)
break;
}
frameCount++;
videoNotDone = ( videoFrame.Length() > 0);
videoNotDone = !videoFrame.IsZeroSize();
videoFrame.SetRenderTime(TickTime::MillisecondTimestamp());
if( videoNotDone)
{
@ -219,12 +220,14 @@ int main(int /*argc*/, char** /*argv*/)
audioFrame.sample_rate_hz_ = 8000;
// prepare the video frame
videoFrame.VerifyAndAllocate(KVideoWriteSize);
memset(videoFrame.Buffer(), 127, videoCodec.width * videoCodec.height);
memset(videoFrame.Buffer() +(videoCodec.width * videoCodec.height), 0, videoCodec.width * videoCodec.height/2);
videoFrame.SetLength(KVideoWriteSize);
videoFrame.SetHeight(videoCodec.height);
videoFrame.SetWidth(videoCodec.width);
int half_width = (videoCodec.width + 1) / 2;
int half_height = (videoCodec.height + 1) / 2;
videoFrame.CreateEmptyFrame(videoCodec.width, videoCodec.height,
videoCodec.width, half_width, half_width);
memset(videoFrame.buffer(kYPlane), 127,
videoCodec.width * videoCodec.height);
memset(videoFrame.buffer(kUPlane), 0, half_width * half_height);
memset(videoFrame.buffer(kVPlane), 0, half_width * half_height);
// write avi file, with 20 video frames
const int KWriteNumFrames = 20;
@ -310,10 +313,9 @@ int main(int /*argc*/, char** /*argv*/)
assert(fileRecorder.IsRecording());
WebRtc_UWord32 videoReadSize = static_cast<WebRtc_UWord32>( (videoCodec.width * videoCodec.height * 3.0) / 2.0);
webrtc::VideoFrame videoFrame;
videoFrame.VerifyAndAllocate(videoReadSize);
webrtc::I420VideoFrame videoFrame;
videoFrame.CreateEmptyFrame(videoCodec.width, videoCodec.height,
videoCodec.width, half_width,half_width);
int videoFrameCount = 0;
int audioFrameCount = 0;
@ -325,12 +327,12 @@ int main(int /*argc*/, char** /*argv*/)
{
if(filePlayer.TimeUntilNextVideoFrame() <= 0)
{
if(filePlayer.GetVideoFromFile( videoFrame) != 0)
if(filePlayer.GetVideoFromFile(videoFrame) != 0)
{
break;
}
videoFrameCount++;
videoNotDone = ( videoFrame.Length() > 0);
videoNotDone = !videoFrame.IsZeroSize();
if( videoNotDone)
{
assert(fileRecorder.RecordVideoToFile(videoFrame) == 0);

View File

@ -14,6 +14,7 @@
// Includes
#include "typedefs.h"
#include "modules/interface/module_common_types.h"
#include "common_video/interface/i420_video_frame.h"
namespace webrtc
{
@ -132,8 +133,11 @@ class VideoCaptureDataCallback
{
public:
virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id,
VideoFrame& videoFrame,
I420VideoFrame& videoFrame,
VideoCodecType codecType) = 0;
virtual void OnIncomingCapturedEncodedFrame(const WebRtc_Word32 id,
VideoFrame& videoFrame,
VideoCodecType codecType) = 0;
virtual void OnCaptureDelayChanged(const WebRtc_Word32 id,
const WebRtc_Word32 delay) = 0;
protected:

View File

@ -181,8 +181,8 @@ WebRtc_Word32 VideoCaptureImpl::CaptureDelay()
return _setCaptureDelay;
}
WebRtc_Word32 VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame,
WebRtc_Word64 capture_time, VideoCodecType codec_type) {
WebRtc_Word32 VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame&
captureFrame, WebRtc_Word64 capture_time, VideoCodecType codec_type) {
UpdateFrameCount(); // frame count used for local frame rate callback.
const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay;
@ -193,17 +193,17 @@ WebRtc_Word32 VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame,
// Set the capture time
if (capture_time != 0) {
captureFrame.SetRenderTime(capture_time);
captureFrame.set_render_time_ms(capture_time);
}
else {
captureFrame.SetRenderTime(TickTime::MillisecondTimestamp());
captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp());
}
if (captureFrame.RenderTimeMs() == last_capture_time_) {
if (captureFrame.render_time_ms() == last_capture_time_) {
// We don't allow the same capture time for two frames, drop this one.
return -1;
}
last_capture_time_ = captureFrame.RenderTimeMs();
last_capture_time_ = captureFrame.render_time_ms();
if (_dataCallBack) {
if (callOnCaptureDelayChanged) {
@ -228,7 +228,7 @@ WebRtc_Word32 VideoCaptureImpl::DeliverEncodedCapturedFrame(
// Set the capture time
if (capture_time != 0) {
captureFrame.SetRenderTime(capture_time);
captureFrame.SetRenderTime(capture_time);
}
else {
captureFrame.SetRenderTime(TickTime::MillisecondTimestamp());
@ -244,7 +244,8 @@ WebRtc_Word32 VideoCaptureImpl::DeliverEncodedCapturedFrame(
if (callOnCaptureDelayChanged) {
_dataCallBack->OnCaptureDelayChanged(_id, _captureDelay);
}
_dataCallBack->OnIncomingCapturedFrame(_id, captureFrame, codec_type);
_dataCallBack->OnIncomingCapturedEncodedFrame(_id, captureFrame,
codec_type);
}
return 0;
@ -282,23 +283,18 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(
return -1;
}
// Allocate I420 buffer.
int requiredLength = CalcBufferSize(kI420, width, abs(height));
_captureFrame.VerifyAndAllocate(requiredLength);
if (!_captureFrame.Buffer())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to allocate frame buffer.");
return -1;
}
memset(_captureFrame.Buffer(), 0, _captureFrame.Size());
_captureFrame.SetWidth(width);
// Setting absolute height (in case it was negative).
// In Windows, the image starts bottom left, instead of top left.
// Setting a negative source height, inverts the image (within LibYuv).
_captureFrame.SetHeight(abs(height));
// TODO(mikhal) : Set stride when available.
int ret = _captureFrame.CreateEmptyFrame(width, abs(height),
width, (width + 1) / 2,
(width + 1) / 2);
if (ret < 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to allocate I420 frame.");
return -1;
}
const int conversionResult = ConvertToI420(commonVideoType,
videoFrame,
0, 0, // No cropping
@ -313,7 +309,6 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(
frameInfo.rawType);
return -1;
}
_captureFrame.SetLength(requiredLength);
DeliverCapturedFrame(_captureFrame, captureTime, frameInfo.codecType);
}
else // Encoded format
@ -328,7 +323,6 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(
frameInfo.codecType);
}
const WebRtc_UWord32 processTime =
(WebRtc_UWord32)(TickTime::Now() - startProcessTime).Milliseconds();
if (processTime > 10) // If the process time is too long MJPG will not work well.
@ -345,52 +339,23 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrameI420(
const VideoFrameI420& video_frame, WebRtc_Word64 captureTime) {
CriticalSectionScoped cs(&_callBackCs);
// Allocate I420 buffer
int frame_size = CalcBufferSize(kI420,
video_frame.width,
video_frame.height);
_captureFrame.VerifyAndAllocate(frame_size);
if (!_captureFrame.Buffer()) {
// TODO(mikhal): Do we take the stride as is, or do we align it?
int size_y = video_frame.height * video_frame.y_pitch;
int size_u = video_frame.u_pitch * (video_frame.height + 1) / 2;
int size_v = video_frame.v_pitch * (video_frame.height + 1) / 2;
// TODO(mikhal): Can we use Swap here? This will do a memcpy.
int ret = _captureFrame.CreateFrame(size_y, video_frame.y_plane,
size_u, video_frame.u_plane,
size_v, video_frame.v_plane,
video_frame.width, video_frame.height,
video_frame.y_pitch, video_frame.u_pitch,
video_frame.v_pitch);
if (ret < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to allocate frame buffer.");
"Failed to create I420VideoFrame");
return -1;
}
// Copy planes to the _captureFrame
int y_width = video_frame.width;
int uv_width = video_frame.width / 2;
int y_rows = video_frame.height;
int uv_rows = video_frame.height / 2; // I420
unsigned char* current_pointer = _captureFrame.Buffer();
unsigned char* y_plane = video_frame.y_plane;
unsigned char* u_plane = video_frame.u_plane;
unsigned char* v_plane = video_frame.v_plane;
// Copy Y
for (int i = 0; i < y_rows; ++i) {
memcpy(current_pointer, y_plane, y_width);
// Remove the alignment which ViE doesn't support.
current_pointer += y_width;
y_plane += video_frame.y_pitch;
}
// Copy U
for (int i = 0; i < uv_rows; ++i) {
memcpy(current_pointer, u_plane, uv_width);
// Remove the alignment which ViE doesn't support.
current_pointer += uv_width;
u_plane += video_frame.u_pitch;
}
// Copy V
for (int i = 0; i < uv_rows; ++i) {
memcpy(current_pointer, v_plane, uv_width);
// Remove the alignment which ViE doesn't support.
current_pointer += uv_width;
v_plane += video_frame.v_pitch;
}
_captureFrame.SetLength(frame_size);
_captureFrame.SetWidth(video_frame.width);
_captureFrame.SetHeight(video_frame.height);
DeliverCapturedFrame(_captureFrame, captureTime, kVideoCodecUnknown);
return 0;

View File

@ -18,6 +18,7 @@
#include "video_capture.h"
#include "video_capture_config.h"
#include "tick_util.h"
#include "common_video/interface/i420_video_frame.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
namespace webrtc
@ -97,9 +98,10 @@ public:
protected:
VideoCaptureImpl(const WebRtc_Word32 id);
virtual ~VideoCaptureImpl();
WebRtc_Word32 DeliverCapturedFrame(
VideoFrame& captureFrame,
WebRtc_Word64 capture_time, VideoCodecType codec_type);
// TODO(mikhal): Remove codec_type.
WebRtc_Word32 DeliverCapturedFrame(I420VideoFrame& captureFrame,
WebRtc_Word64 capture_time,
VideoCodecType codec_type);
WebRtc_Word32 DeliverEncodedCapturedFrame(
VideoFrame& captureFrame,
WebRtc_Word64 capture_time, VideoCodecType codec_type);
@ -129,7 +131,7 @@ private:
TickTime _incomingFrameTimes[kFrameRateCountHistorySize];// timestamp for local captured frames
VideoRotationMode _rotateFrame; //Set if the frame should be rotated by the capture module.
VideoFrame _captureFrame;
I420VideoFrame _captureFrame;
VideoFrame _capture_encoded_frame;
// Used to make sure incoming timestamp is increasing for every frame.

View File

@ -14,6 +14,8 @@
#include "modules/utility/interface/process_thread.h"
#include "modules/video_capture/main/interface/video_capture.h"
#include "modules/video_capture/main/interface/video_capture_factory.h"
#include "common_video/interface/i420_video_frame.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "system_wrappers/interface/critical_section_wrapper.h"
#include "system_wrappers/interface/scoped_ptr.h"
#include "system_wrappers/interface/scoped_refptr.h"
@ -57,63 +59,80 @@ static const int kTestWidth = 352;
static const int kTestFramerate = 30;
// Compares the content of two video frames.
static bool CompareFrames(const webrtc::VideoFrame& frame1,
const webrtc::VideoFrame& frame2) {
static bool CompareFrames(const webrtc::I420VideoFrame& frame1,
const webrtc::I420VideoFrame& frame2) {
bool result =
(frame1.Length() == frame2.Length()) &&
(frame1.Width() == frame2.Width()) &&
(frame1.Height() == frame2.Height());
(frame1.stride(webrtc::kYPlane) == frame2.stride(webrtc::kYPlane)) &&
(frame1.stride(webrtc::kUPlane) == frame2.stride(webrtc::kUPlane)) &&
(frame1.stride(webrtc::kVPlane) == frame2.stride(webrtc::kVPlane)) &&
(frame1.width() == frame2.width()) &&
(frame1.height() == frame2.height());
for (unsigned int i = 0; i < frame1.Length() && result; ++i)
result = (*(frame1.Buffer()+i) == *(frame2.Buffer()+i));
return result;
if (!result)
return false;
for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) {
webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
int allocated_size1 = frame1.allocated_size(plane_type);
int allocated_size2 = frame2.allocated_size(plane_type);
if (allocated_size1 != allocated_size2)
return false;
const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
if (memcmp(plane_buffer1, plane_buffer2, allocated_size1))
return false;
}
return true;
}
// Compares the content of a I420 frame in planar form and video frame.
// Compares the content of a I420 frame in planar form and the new video frame.
static bool CompareFrames(const webrtc::VideoFrameI420& frame1,
const webrtc::VideoFrame& frame2) {
if (frame1.width != frame2.Width() ||
frame1.height != frame2.Height()) {
const webrtc::I420VideoFrame& frame2) {
if (frame1.width != frame2.width() ||
frame1.height != frame2.height()) {
return false;
}
// Compare Y
unsigned char* y_plane = frame1.y_plane;
for (unsigned int i = 0; i < frame2.Height(); ++i) {
for (unsigned int j = 0; j < frame2.Width(); ++j) {
if (*y_plane != *(frame2.Buffer()+i*frame2.Width() +j))
const unsigned char* y_plane = frame1.y_plane;
const unsigned char* y_plane2 = frame2.buffer(webrtc::kYPlane);
for (int i = 0; i < frame2.height(); ++i) {
for (int j = 0; j < frame2.width(); ++j) {
if (*y_plane != *y_plane2)
return false;
++y_plane;
++y_plane2;
}
y_plane += frame1.y_pitch - frame1.width;
y_plane2 += frame2.stride(webrtc::kYPlane) - frame2.width();
}
// Compare U
unsigned char* u_plane = frame1.u_plane;
for (unsigned int i = 0; i < frame2.Height() /2; ++i) {
for (unsigned int j = 0; j < frame2.Width() /2; ++j) {
if (*u_plane !=*(
frame2.Buffer()+frame2.Width() * frame2.Height() +
i*frame2.Width() / 2 + j)) {
const unsigned char* u_plane = frame1.u_plane;
const unsigned char* u_plane2 = frame2.buffer(webrtc::kUPlane);
for (int i = 0; i < (frame2.height() + 1) / 2; ++i) {
for (int j = 0; j < (frame2.width() + 1) / 2; ++j) {
if (*u_plane != *u_plane2)
return false;
}
++u_plane;
++u_plane2;
}
u_plane += frame1.u_pitch - frame1.width / 2;
u_plane += frame1.u_pitch - (frame1.width + 1) / 2;
u_plane2+= frame2.stride(webrtc::kUPlane) - (frame2.width() + 1) / 2;
}
// Compare V
unsigned char* v_plane = frame1.v_plane;
for (unsigned int i = 0; i < frame2.Height() /2; ++i) {
for (unsigned int j = 0; j < frame2.Width() /2; ++j) {
if (*v_plane != *(
frame2.Buffer()+frame2.Width() * frame2.Height()* 5 / 4 +
i*frame2.Width() / 2 + j)) {
const unsigned char* v_plane2 = frame2.buffer(webrtc::kVPlane);
for (int i = 0; i < frame2.height() /2; ++i) {
for (int j = 0; j < frame2.width() /2; ++j) {
if (*u_plane != *u_plane2) {
return false;
}
++v_plane;
++v_plane2;
}
v_plane += frame1.v_pitch - frame1.width / 2;
v_plane += frame1.v_pitch - (frame1.width + 1) / 2;
u_plane2+= frame2.stride(webrtc::kVPlane) - (frame2.width() + 1) / 2;
}
return true;
}
@ -135,32 +154,38 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
}
virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id,
webrtc::VideoFrame& videoFrame,
webrtc::I420VideoFrame& videoFrame,
webrtc::VideoCodecType codecType) {
CriticalSectionScoped cs(capture_cs_.get());
int height = static_cast<int>(videoFrame.Height());
int width = static_cast<int>(videoFrame.Width());
int height = videoFrame.height();
int width = videoFrame.width();
EXPECT_EQ(height, capability_.height);
EXPECT_EQ(width, capability_.width);
// RenderTimstamp should be the time now.
EXPECT_TRUE(
videoFrame.RenderTimeMs() >= TickTime::MillisecondTimestamp()-30 &&
videoFrame.RenderTimeMs() <= TickTime::MillisecondTimestamp());
videoFrame.render_time_ms() >= TickTime::MillisecondTimestamp()-30 &&
videoFrame.render_time_ms() <= TickTime::MillisecondTimestamp());
if ((videoFrame.RenderTimeMs() >
if ((videoFrame.render_time_ms() >
last_render_time_ms_ + (1000 * 1.1) / capability_.maxFPS &&
last_render_time_ms_ > 0) ||
(videoFrame.RenderTimeMs() <
(videoFrame.render_time_ms() <
last_render_time_ms_ + (1000 * 0.9) / capability_.maxFPS &&
last_render_time_ms_ > 0)) {
timing_warnings_++;
}
incoming_frames_++;
last_render_time_ms_ = videoFrame.RenderTimeMs();
last_render_time_ms_ = videoFrame.render_time_ms();
last_frame_.CopyFrame(videoFrame);
}
virtual void OnIncomingCapturedEncodedFrame(const WebRtc_Word32 id,
webrtc::VideoFrame& videoFrame,
webrtc::VideoCodecType codecType)
{
assert(!"NOTIMPLEMENTED");
}
virtual void OnCaptureDelayChanged(const WebRtc_Word32 id,
const WebRtc_Word32 delay) {
@ -193,7 +218,7 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
return capability_;
}
bool CompareLastFrame(const webrtc::VideoFrame& frame) {
bool CompareLastFrame(const webrtc::I420VideoFrame& frame) {
CriticalSectionScoped cs(capture_cs_.get());
return CompareFrames(last_frame_, frame);
}
@ -210,7 +235,7 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
WebRtc_Word64 last_render_time_ms_;
int incoming_frames_;
int timing_warnings_;
webrtc::VideoFrame last_frame_;
webrtc::I420VideoFrame last_frame_;
};
class TestVideoCaptureFeedBack : public VideoCaptureFeedBack {
@ -421,12 +446,14 @@ class VideoCaptureExternalTest : public testing::Test {
capability.maxFPS = kTestFramerate;
capture_callback_.SetExpectedCapability(capability);
test_frame_.VerifyAndAllocate(kTestWidth * kTestHeight * 3 / 2);
test_frame_.SetLength(kTestWidth * kTestHeight * 3 / 2);
test_frame_.SetHeight(kTestHeight);
test_frame_.SetWidth(kTestWidth);
test_frame_.CreateEmptyFrame(kTestWidth, kTestHeight, kTestWidth,
((kTestWidth + 1) / 2), (kTestWidth + 1) / 2);
SleepMs(1); // Wait 1ms so that two tests can't have the same timestamp.
memset(test_frame_.Buffer(), 127, test_frame_.Length());
memset(test_frame_.buffer(webrtc::kYPlane), 127, kTestWidth * kTestHeight);
memset(test_frame_.buffer(webrtc::kUPlane), 127,
((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
memset(test_frame_.buffer(webrtc::kVPlane), 127,
((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
EXPECT_EQ(0, capture_module_->RegisterCaptureDataCallback(
capture_callback_));
@ -443,16 +470,20 @@ class VideoCaptureExternalTest : public testing::Test {
webrtc::VideoCaptureExternal* capture_input_interface_;
webrtc::scoped_refptr<VideoCaptureModule> capture_module_;
webrtc::ProcessThread* process_module_;
webrtc::VideoFrame test_frame_;
webrtc::I420VideoFrame test_frame_;
TestVideoCaptureCallback capture_callback_;
TestVideoCaptureFeedBack capture_feedback_;
};
// Test input of external video frames.
TEST_F(VideoCaptureExternalTest , TestExternalCapture) {
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(
test_frame_.Buffer(), test_frame_.Length(),
capture_callback_.capability(), 0));
TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
unsigned int length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
webrtc::scoped_array<uint8_t> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
length, capture_callback_.capability(), 0));
EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_));
}
@ -463,7 +494,7 @@ TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) {
webrtc::VideoFrameI420 frame_i420;
frame_i420.width = kTestWidth;
frame_i420.height = kTestHeight;
frame_i420.y_plane = test_frame_.Buffer();
frame_i420.y_plane = test_frame_.buffer(webrtc::kYPlane);
frame_i420.u_plane = frame_i420.y_plane + (kTestWidth * kTestHeight);
frame_i420.v_plane = frame_i420.u_plane + ((kTestWidth * kTestHeight) >> 2);
frame_i420.y_pitch = kTestWidth;
@ -473,26 +504,34 @@ TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) {
EXPECT_TRUE(capture_callback_.CompareLastFrame(frame_i420));
// Test with a frame with pitch not equal to width
memset(test_frame_.Buffer(), 0xAA, test_frame_.Length());
webrtc::VideoFrame aligned_test_frame;
memset(test_frame_.buffer(webrtc::kYPlane), 0xAA,
test_frame_.allocated_size(webrtc::kYPlane));
memset(test_frame_.buffer(webrtc::kUPlane), 0xAA,
test_frame_.allocated_size(webrtc::kUPlane));
memset(test_frame_.buffer(webrtc::kVPlane), 0xAA,
test_frame_.allocated_size(webrtc::kVPlane));
webrtc::I420VideoFrame aligned_test_frame;
int y_pitch = kTestWidth + 2;
int u_pitch = kTestWidth / 2 + 1;
int v_pitch = u_pitch;
aligned_test_frame.VerifyAndAllocate(kTestHeight * y_pitch +
(kTestHeight / 2) * u_pitch +
(kTestHeight / 2) * v_pitch);
aligned_test_frame.SetLength(aligned_test_frame.Size());
memset(aligned_test_frame.Buffer(), 0, aligned_test_frame.Length());
aligned_test_frame.CreateEmptyFrame(kTestWidth, kTestHeight,
y_pitch, u_pitch, v_pitch);
memset(aligned_test_frame.buffer(webrtc::kYPlane), 0,
kTestWidth * kTestHeight);
memset(aligned_test_frame.buffer(webrtc::kUPlane), 0,
(kTestWidth + 1) / 2 * (kTestHeight + 1) / 2);
memset(aligned_test_frame.buffer(webrtc::kVPlane), 0,
(kTestWidth + 1) / 2 * (kTestHeight + 1) / 2);
// Copy the test_frame_ to aligned_test_frame.
int y_width = kTestWidth;
int uv_width = kTestWidth / 2;
int y_rows = kTestHeight;
int uv_rows = kTestHeight / 2;
unsigned char* current_pointer = aligned_test_frame.Buffer();
unsigned char* y_plane = test_frame_.Buffer();
unsigned char* u_plane = y_plane + kTestWidth * kTestHeight;
unsigned char* v_plane = u_plane + ((kTestWidth * kTestHeight) >> 2);
unsigned char* y_plane = test_frame_.buffer(webrtc::kYPlane);
unsigned char* u_plane = test_frame_.buffer(webrtc::kUPlane);
unsigned char* v_plane = test_frame_.buffer(webrtc::kVPlane);
// Copy Y
unsigned char* current_pointer = aligned_test_frame.buffer(webrtc::kYPlane);
for (int i = 0; i < y_rows; ++i) {
memcpy(current_pointer, y_plane, y_width);
// Remove the alignment which ViE doesn't support.
@ -500,6 +539,7 @@ TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) {
y_plane += y_width;
}
// Copy U
current_pointer = aligned_test_frame.buffer(webrtc::kUPlane);
for (int i = 0; i < uv_rows; ++i) {
memcpy(current_pointer, u_plane, uv_width);
// Remove the alignment which ViE doesn't support.
@ -507,6 +547,7 @@ TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) {
u_plane += uv_width;
}
// Copy V
current_pointer = aligned_test_frame.buffer(webrtc::kVPlane);
for (int i = 0; i < uv_rows; ++i) {
memcpy(current_pointer, v_plane, uv_width);
// Remove the alignment which ViE doesn't support.
@ -515,9 +556,9 @@ TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) {
}
frame_i420.width = kTestWidth;
frame_i420.height = kTestHeight;
frame_i420.y_plane = aligned_test_frame.Buffer();
frame_i420.u_plane = frame_i420.y_plane + (y_pitch * y_rows);
frame_i420.v_plane = frame_i420.u_plane + (u_pitch * uv_rows);
frame_i420.y_plane = aligned_test_frame.buffer(webrtc::kYPlane);
frame_i420.u_plane = aligned_test_frame.buffer(webrtc::kYPlane);
frame_i420.v_plane = aligned_test_frame.buffer(webrtc::kVPlane);
frame_i420.y_pitch = y_pitch;
frame_i420.u_pitch = u_pitch;
frame_i420.v_pitch = v_pitch;
@ -532,9 +573,13 @@ TEST_F(VideoCaptureExternalTest , FrameRate) {
TickTime startTime = TickTime::Now();
while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) {
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(
test_frame_.Buffer(), test_frame_.Length(),
capture_callback_.capability(), 0));
unsigned int length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
webrtc::scoped_array<uint8_t> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
length, capture_callback_.capability(), 0));
SleepMs(100);
}
EXPECT_TRUE(capture_feedback_.frame_rate() >= 8 &&
@ -544,9 +589,13 @@ TEST_F(VideoCaptureExternalTest , FrameRate) {
startTime = TickTime::Now();
while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) {
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(
test_frame_.Buffer(), test_frame_.Length(),
capture_callback_.capability(), 0));
unsigned int length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
webrtc::scoped_array<uint8_t> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
length, capture_callback_.capability(), 0));
SleepMs(1000 / 30);
}
EXPECT_EQ(webrtc::Cleared, capture_feedback_.alarm());

View File

@ -47,7 +47,7 @@ public:
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK.
// <0 - Error
virtual int Encode(const VideoFrame& inputImage,
virtual int Encode(const I420VideoFrame& inputImage,
const CodecSpecificInfo* /*codecSpecificInfo*/,
const std::vector<VideoFrameType>* /*frame_types*/);
@ -138,7 +138,7 @@ public:
private:
VideoFrame _decodedImage;
I420VideoFrame _decodedImage;
int _width;
int _height;
bool _inited;

View File

@ -14,7 +14,6 @@
#include "common_video/libyuv/include/webrtc_libyuv.h"
namespace webrtc
{
@ -76,9 +75,9 @@ int I420Encoder::InitEncode(const VideoCodec* codecSettings,
int I420Encoder::Encode(const VideoFrame& inputImage,
const CodecSpecificInfo* /*codecSpecificInfo*/,
const std::vector<VideoFrameType>* /*frame_types*/) {
int I420Encoder::Encode(const I420VideoFrame& inputImage,
const CodecSpecificInfo* /*codecSpecificInfo*/,
const std::vector<VideoFrameType>* /*frame_types*/) {
if (!_inited) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
@ -87,29 +86,32 @@ int I420Encoder::Encode(const VideoFrame& inputImage,
}
_encodedImage._frameType = kKeyFrame; // No coding.
_encodedImage._timeStamp = inputImage.TimeStamp();
_encodedImage._encodedHeight = inputImage.Height();
_encodedImage._encodedWidth = inputImage.Width();
if (inputImage.Length() > _encodedImage._size) {
_encodedImage._timeStamp = inputImage.timestamp();
_encodedImage._encodedHeight = inputImage.height();
_encodedImage._encodedWidth = inputImage.width();
int req_length = CalcBufferSize(kI420, inputImage.width(),
inputImage.height());
if (_encodedImage._size > static_cast<unsigned int>(req_length)) {
// Allocating encoded memory.
if (_encodedImage._buffer != NULL) {
delete [] _encodedImage._buffer;
_encodedImage._buffer = NULL;
_encodedImage._size = 0;
}
const uint32_t newSize = CalcBufferSize(kI420,
_encodedImage._encodedWidth,
_encodedImage._encodedHeight);
uint8_t* newBuffer = new uint8_t[newSize];
uint8_t* newBuffer = new uint8_t[req_length];
if (newBuffer == NULL) {
return WEBRTC_VIDEO_CODEC_MEMORY;
}
_encodedImage._size = newSize;
_encodedImage._size = req_length;
_encodedImage._buffer = newBuffer;
}
memcpy(_encodedImage._buffer, inputImage.Buffer(), inputImage.Length());
_encodedImage._length = inputImage.Length();
int ret_length = ExtractBuffer(inputImage, req_length, _encodedImage._buffer);
if (ret_length < 0)
return WEBRTC_VIDEO_CODEC_MEMORY;
_encodedImage._length = ret_length;
_encodedCompleteCallback->Encoded(_encodedImage);
return WEBRTC_VIDEO_CODEC_OK;
}
@ -174,12 +176,24 @@ I420Decoder::Decode(const EncodedImage& inputImage,
}
// Set decoded image parameters.
if (_decodedImage.CopyFrame(inputImage._length, inputImage._buffer) < 0) {
int half_width = (_width + 1) / 2;
int half_height = (_height + 1) / 2;
int size_y = _width * _height;
int size_uv = half_width * half_height;
const uint8_t* buffer_y = inputImage._buffer;
const uint8_t* buffer_u = buffer_y + size_y;
const uint8_t* buffer_v = buffer_u + size_uv;
// TODO(mikhal): Do we need an align stride?
int ret = _decodedImage.CreateFrame(size_y, buffer_y,
size_uv, buffer_u,
size_uv, buffer_v,
_width, _height,
_width, half_width, half_width);
if (ret < 0) {
return WEBRTC_VIDEO_CODEC_MEMORY;
}
_decodedImage.SetHeight(_height);
_decodedImage.SetWidth(_width);
_decodedImage.SetTimeStamp(inputImage._timeStamp);
_decodedImage.set_timestamp(inputImage._timeStamp);
_decodeCompleteCallback->Decoded(_decodedImage);
return WEBRTC_VIDEO_CODEC_OK;
@ -193,7 +207,6 @@ I420Decoder::RegisterDecodeCompleteCallback(DecodedImageCallback* callback) {
int
I420Decoder::Release() {
_decodedImage.Free();
_inited = false;
return WEBRTC_VIDEO_CODEC_OK;
}

View File

@ -37,7 +37,7 @@ class MockVideoEncoder : public VideoEncoder {
WebRtc_Word32 numberOfCores,
WebRtc_UWord32 maxPayloadSize));
MOCK_METHOD3(Encode,
WebRtc_Word32(const VideoFrame& inputImage,
WebRtc_Word32(const I420VideoFrame& inputImage,
const CodecSpecificInfo* codecSpecificInfo,
const std::vector<VideoFrameType>* frame_types));
MOCK_METHOD1(RegisterEncodeCompleteCallback,
@ -57,7 +57,7 @@ class MockVideoEncoder : public VideoEncoder {
class MockDecodedImageCallback : public DecodedImageCallback {
public:
MOCK_METHOD1(Decoded,
WebRtc_Word32(VideoFrame& decodedImage));
WebRtc_Word32(I420VideoFrame& decodedImage));
MOCK_METHOD1(ReceivedDecodedReferenceFrame,
WebRtc_Word32(const WebRtc_UWord64 pictureId));
MOCK_METHOD1(ReceivedDecodedFrame,

View File

@ -14,9 +14,11 @@
#include <vector>
#include "common_types.h"
#include "common_video/interface/i420_video_frame.h"
#include "modules/interface/module_common_types.h"
#include "modules/video_coding/codecs/interface/video_error_codes.h"
#include "common_video/interface/video_image.h"
#include "typedefs.h"
namespace webrtc
@ -102,7 +104,7 @@ public:
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0
// otherwise.
virtual WebRtc_Word32 Encode(
const VideoFrame& inputImage,
const I420VideoFrame& inputImage,
const CodecSpecificInfo* codecSpecificInfo,
const std::vector<VideoFrameType>* frame_types) = 0;
@ -167,7 +169,7 @@ public:
// - decodedImage : The decoded image.
//
// Return value : 0 if OK, < 0 otherwise.
virtual WebRtc_Word32 Decoded(VideoFrame& decodedImage) = 0;
virtual WebRtc_Word32 Decoded(I420VideoFrame& decodedImage) = 0;
virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId) {return -1;}

View File

@ -58,18 +58,11 @@ bool VideoProcessorImpl::Init() {
// Calculate a factor used for bit rate calculations:
bit_rate_factor_ = config_.codec_settings->maxFramerate * 0.001 * 8; // bits
int frame_length_in_bytes = frame_reader_->FrameLength();
// Initialize data structures used by the encoder/decoder APIs
int frame_length_in_bytes = frame_reader_->FrameLength();
source_buffer_ = new WebRtc_UWord8[frame_length_in_bytes];
last_successful_frame_buffer_ = new WebRtc_UWord8[frame_length_in_bytes];
// Set fixed properties common for all frames:
source_frame_.SetWidth(config_.codec_settings->width);
source_frame_.SetHeight(config_.codec_settings->height);
source_frame_.VerifyAndAllocate(frame_length_in_bytes);
source_frame_.SetLength(frame_length_in_bytes);
// Set fixed properties common for all frames.
// To keep track of spatial resize actions by encoder.
last_encoder_frame_width_ = config_.codec_settings->width;
last_encoder_frame_height_ = config_.codec_settings->height;
@ -169,15 +162,24 @@ bool VideoProcessorImpl::ProcessFrame(int frame_number) {
}
if (frame_reader_->ReadFrame(source_buffer_)) {
// Copy the source frame to the newly read frame data.
// Length is common for all frames.
source_frame_.CopyFrame(source_frame_.Length(), source_buffer_);
int size_y = config_.codec_settings->width * config_.codec_settings->height;
int half_width = (config_.codec_settings->width + 1) / 2;
int half_height = (config_.codec_settings->height + 1) / 2;
int size_uv = half_width * half_height;
source_frame_.CreateFrame(size_y, source_buffer_,
size_uv, source_buffer_ + size_y,
size_uv, source_buffer_ + size_y + size_uv,
config_.codec_settings->width,
config_.codec_settings->height,
config_.codec_settings->width,
half_width, half_width);
// Ensure we have a new statistics data object we can fill:
FrameStatistic& stat = stats_->NewFrame(frame_number);
encode_start_ = TickTime::Now();
// Use the frame number as "timestamp" to identify frames
source_frame_.SetTimeStamp(frame_number);
source_frame_.set_timestamp(frame_number);
// Decide if we're going to force a keyframe:
std::vector<VideoFrameType> frame_types(1, kDeltaFrame);
@ -273,9 +275,9 @@ void VideoProcessorImpl::FrameEncoded(EncodedImage* encoded_image) {
last_frame_missing_ = encoded_image->_length == 0;
}
void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
void VideoProcessorImpl::FrameDecoded(const I420VideoFrame& image) {
TickTime decode_stop = TickTime::Now();
int frame_number = image.TimeStamp();
int frame_number = image.timestamp();
// Report stats
FrameStatistic& stat = stats_->stats_[frame_number];
stat.decode_time_in_us = GetElapsedTimeMicroseconds(decode_start_,
@ -283,18 +285,18 @@ void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
stat.decoding_successful = true;
// Check for resize action (either down or up):
if (static_cast<int>(image.Width()) != last_encoder_frame_width_ ||
static_cast<int>(image.Height()) != last_encoder_frame_height_ ) {
if (static_cast<int>(image.width()) != last_encoder_frame_width_ ||
static_cast<int>(image.height()) != last_encoder_frame_height_ ) {
++num_spatial_resizes_;
last_encoder_frame_width_ = image.Width();
last_encoder_frame_height_ = image.Height();
last_encoder_frame_width_ = image.width();
last_encoder_frame_height_ = image.height();
}
// Check if codec size is different from native/original size, and if so,
// upsample back to original size: needed for PSNR and SSIM computations.
if (image.Width() != config_.codec_settings->width ||
image.Height() != config_.codec_settings->height) {
VideoFrame up_image;
int ret_val = scaler_.Set(image.Width(), image.Height(),
if (image.width() != config_.codec_settings->width ||
image.height() != config_.codec_settings->height) {
I420VideoFrame up_image;
int ret_val = scaler_.Set(image.width(), image.height(),
config_.codec_settings->width,
config_.codec_settings->height,
kI420, kI420, kScaleBilinear);
@ -309,20 +311,27 @@ void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
fprintf(stderr, "Failed to scale frame: %d, return code: %d\n",
frame_number, ret_val);
}
// TODO(mikhal): Extracting the buffer for now - need to update test.
int length = CalcBufferSize(kI420, up_image.width(), up_image.height());
scoped_array<uint8_t> image_buffer(new uint8_t[length]);
length = ExtractBuffer(up_image, length, image_buffer.get());
// Update our copy of the last successful frame:
memcpy(last_successful_frame_buffer_, up_image.Buffer(), up_image.Length());
bool write_success = frame_writer_->WriteFrame(up_image.Buffer());
memcpy(last_successful_frame_buffer_, image_buffer.get(), length);
bool write_success = frame_writer_->WriteFrame(image_buffer.get());
assert(write_success);
if (!write_success) {
fprintf(stderr, "Failed to write frame %d to disk!", frame_number);
}
up_image.Free();
} else { // No resize.
// Update our copy of the last successful frame:
memcpy(last_successful_frame_buffer_, image.Buffer(), image.Length());
// TODO(mikhal): Add as a member function, so won't be allocated per frame.
int length = CalcBufferSize(kI420,image.width(), image.height());
scoped_array<uint8_t> image_buffer(new uint8_t[length]);
length = ExtractBuffer(image, length, image_buffer.get());
assert(length > 0);
memcpy(last_successful_frame_buffer_, image_buffer.get(), length);
bool write_success = frame_writer_->WriteFrame(image.Buffer());
bool write_success = frame_writer_->WriteFrame(image_buffer.get());
assert(write_success);
if (!write_success) {
fprintf(stderr, "Failed to write frame %d to disk!", frame_number);
@ -379,7 +388,7 @@ VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
}
WebRtc_Word32
VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded(
VideoFrame& image) {
I420VideoFrame& image) {
video_processor_->FrameDecoded(image); // forward to parent class
return 0;
}

View File

@ -15,7 +15,7 @@
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "common_video/libyuv/include/scaler.h"
#include "modules/interface/module_common_types.h"
#include "common_video/interface/i420_video_frame.h"
#include "modules/video_coding/codecs/interface/video_codec_interface.h"
#include "modules/video_coding/codecs/test/packet_manipulator.h"
#include "modules/video_coding/codecs/test/stats.h"
@ -175,7 +175,7 @@ class VideoProcessorImpl : public VideoProcessor {
// Invoked by the callback when a frame has completed encoding.
void FrameEncoded(webrtc::EncodedImage* encodedImage);
// Invoked by the callback when a frame has completed decoding.
void FrameDecoded(const webrtc::VideoFrame& image);
void FrameDecoded(const webrtc::I420VideoFrame& image);
// Used for getting a 32-bit integer representing time
// (checks the size is within signed 32-bit bounds before casting it)
int GetElapsedTimeMicroseconds(const webrtc::TickTime& start,
@ -204,7 +204,7 @@ class VideoProcessorImpl : public VideoProcessor {
// Keep track of the last successful frame, since we need to write that
// when decoding fails:
WebRtc_UWord8* last_successful_frame_buffer_;
webrtc::VideoFrame source_frame_;
webrtc::I420VideoFrame source_frame_;
// To keep track of if we have excluded the first key frame from packet loss:
bool first_key_frame_has_been_excluded_;
// To tell the decoder previous frame have been dropped due to packet loss:
@ -247,7 +247,7 @@ class VideoProcessorImpl : public VideoProcessor {
explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp)
: video_processor_(vp) {
}
WebRtc_Word32 Decoded(webrtc::VideoFrame& image);
WebRtc_Word32 Decoded(webrtc::I420VideoFrame& image);
private:
VideoProcessorImpl* video_processor_;

View File

@ -156,7 +156,8 @@ class VideoProcessorIntegrationTest: public testing::Test {
webrtc::test::ResourcePath("foreman_cif", "yuv");
config_.output_filename = webrtc::test::OutputPath() +
"foreman_cif_short_video_codecs_test_framework_integrationtests.yuv";
config_.frame_length_in_bytes = 3 * kCIFWidth * kCIFHeight / 2;
config_.frame_length_in_bytes = CalcBufferSize(kI420,
kCIFWidth, kCIFHeight);
config_.verbose = false;
// Only allow encoder/decoder to use single core, for predictability.
config_.use_single_core = true;

View File

@ -64,7 +64,7 @@ class VideoProcessorTest: public testing::Test {
EXPECT_CALL(frame_reader_mock_, NumberOfFrames())
.WillOnce(Return(1));
EXPECT_CALL(frame_reader_mock_, FrameLength())
.WillOnce(Return(150000));
.WillOnce(Return(152064));
}
};

View File

@ -230,9 +230,6 @@ Benchmark::PerformNormalTest()
CodecSettings(_target->GetWidth(), _target->GetHeight(), _target->GetFrameRate(), _bitRate);
Setup();
EventWrapper* waitEvent = EventWrapper::Create();
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_encoder->InitEncode(&_inst, 4, 1440);
CodecSpecific_InitBitrate();
_decoder->InitDecode(&_inst,1);
@ -282,9 +279,7 @@ Benchmark::PerformNormalTest()
waitEvent->Wait(5);
}
_inputVideoBuffer.Free();
_encodedVideoBuffer.Free();
_decodedVideoBuffer.Free();
_encoder->Release();
_decoder->Release();

View File

@ -16,6 +16,7 @@
#include <sstream>
#include <vector>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "gtest/gtest.h"
#include "tick_util.h"
#include "testsupport/fileutils.h"
@ -262,16 +263,13 @@ WebRtc_UWord32 VideoDecodeCompleteCallback::DecodedBytes()
}
WebRtc_Word32
VideoDecodeCompleteCallback::Decoded(VideoFrame& image)
VideoDecodeCompleteCallback::Decoded(I420VideoFrame& image)
{
_test.Decoded(image);
_decodedBytes += image.Length();
_decodedBytes += CalcBufferSize(kI420, image.width(), image.height());
if (_decodedFile != NULL)
{
if (fwrite(image.Buffer(), 1, image.Length(),
_decodedFile) != image.Length()) {
return -1;
}
return PrintI420VideoFrame(image, _decodedFile);
}
return 0;
}
@ -300,14 +298,14 @@ NormalAsyncTest::Encoded(const EncodedImage& encodedImage)
}
void
NormalAsyncTest::Decoded(const VideoFrame& decodedImage)
NormalAsyncTest::Decoded(const I420VideoFrame& decodedImage)
{
_decodeCompleteTime = tGetTime();
_decFrameCnt++;
_totalDecodePipeTime += _decodeCompleteTime -
_decodeTimes[decodedImage.TimeStamp()];
_decodedWidth = decodedImage.Width();
_decodedHeight = decodedImage.Height();
_decodeTimes[decodedImage.timestamp()];
_decodedWidth = decodedImage.width();
_decodedHeight = decodedImage.height();
}
void
@ -316,8 +314,6 @@ NormalAsyncTest::Perform()
_inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
CodecSettings(352, 288, 30, _bitRate);
Setup();
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
if(_encoder->InitEncode(&_inst, 1, 1440) < 0)
{
exit(EXIT_FAILURE);
@ -410,17 +406,19 @@ NormalAsyncTest::Encode()
{
_lengthEncFrame = 0;
EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile), 0u);
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
_inputVideoBuffer.SetTimeStamp((unsigned int)
_inputVideoBuffer.CreateFrame(_sizeY, _sourceBuffer,
_sizeUv, _sourceBuffer + _sizeY,
_sizeUv, _sourceBuffer + _sizeY + _sizeUv,
_width, _height,
_width, _halfWidth, _halfWidth);
_inputVideoBuffer.set_timestamp((unsigned int)
(_encFrameCnt * 9e4 / _inst.maxFramerate));
_inputVideoBuffer.SetWidth(_inst.width);
_inputVideoBuffer.SetHeight(_inst.height);
if (feof(_sourceFile) != 0)
{
return true;
}
_encodeCompleteTime = 0;
_encodeTimes[_inputVideoBuffer.TimeStamp()] = tGetTime();
_encodeTimes[_inputVideoBuffer.timestamp()] = tGetTime();
std::vector<VideoFrameType> frame_types(1, kDeltaFrame);
// check SLI queue
@ -474,12 +472,12 @@ NormalAsyncTest::Encode()
if (_encodeCompleteTime > 0)
{
_totalEncodeTime += _encodeCompleteTime -
_encodeTimes[_inputVideoBuffer.TimeStamp()];
_encodeTimes[_inputVideoBuffer.timestamp()];
}
else
{
_totalEncodeTime += tGetTime() -
_encodeTimes[_inputVideoBuffer.TimeStamp()];
_encodeTimes[_inputVideoBuffer.timestamp()];
}
assert(ret >= 0);
return false;

View File

@ -80,7 +80,7 @@ public:
virtual ~NormalAsyncTest() {};
virtual void Perform();
virtual void Encoded(const webrtc::EncodedImage& encodedImage);
virtual void Decoded(const webrtc::VideoFrame& decodedImage);
virtual void Decoded(const webrtc::I420VideoFrame& decodedImage);
virtual webrtc::CodecSpecificInfo*
CopyCodecSpecificInfo(
const webrtc::CodecSpecificInfo* codecSpecificInfo) const;
@ -172,7 +172,7 @@ public:
_decodedBytes(0)
{}
virtual WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
virtual WebRtc_Word32 Decoded(webrtc::I420VideoFrame& decodedImage);
virtual WebRtc_Word32
ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);

View File

@ -14,6 +14,7 @@
#include <sstream>
#include <string.h>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "gtest/gtest.h"
#include "testsupport/fileutils.h"
@ -22,7 +23,13 @@ NormalTest::NormalTest()
CodecTest("Normal Test 1", "A test of normal execution of the codec"),
_testNo(1),
_lengthEncFrame(0),
_appendNext(false)
_appendNext(false),
_width(0),
_halfWidth(0),
_height(0),
_halfHeight(0),
_sizeY(0),
_sizeUv(0)
{
}
@ -33,7 +40,13 @@ CodecTest(name, description),
_requestKeyFrame(false),
_testNo(testNo),
_lengthEncFrame(0),
_appendNext(false)
_appendNext(false),
_width(0),
_halfWidth(0),
_height(0),
_halfHeight(0),
_sizeY(0),
_sizeUv(0)
{
}
@ -108,12 +121,22 @@ NormalTest::Teardown()
void
NormalTest::Perform()
{
_width = 352;
_halfWidth = (_width + 1) / 2;
_height = 288;
_halfHeight = (_height + 1) / 2;
_sizeY = _width * _height;
_sizeUv = _halfWidth * _halfHeight;
_inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
CodecSettings(352, 288, 30, _bitRate);
CodecSettings(_width, _height, 30, _bitRate);
Setup();
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_inputVideoBuffer.CreateEmptyFrame(_width, _height,
_width, _halfWidth, _halfWidth);
_inputVideoBuffer.CreateEmptyFrame(_width, _height,
_width, _halfWidth, _halfWidth);
_decodedVideoBuffer.CreateEmptyFrame(_width, _height,
_width, _halfWidth, _halfWidth);
_encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_encoder->InitEncode(&_inst, 1, 1460);
@ -140,8 +163,7 @@ NormalTest::Perform()
fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
exit(EXIT_FAILURE);
}
if (fwrite(_decodedVideoBuffer.Buffer(), 1, decodeLength,
_decodedFile) != static_cast<unsigned int>(decodeLength)) {
if (PrintI420VideoFrame(_decodedVideoBuffer, _decodedFile) < 0) {
return;
}
CodecSpecific_InitBitrate();
@ -157,8 +179,7 @@ NormalTest::Perform()
fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
exit(EXIT_FAILURE);
}
if (fwrite(_decodedVideoBuffer.Buffer(), 1, decodeLength,
_decodedFile) != static_cast<unsigned int>(decodeLength)) {
if (PrintI420VideoFrame(_decodedVideoBuffer, _decodedFile) < 0) {
return;
}
}
@ -173,8 +194,6 @@ NormalTest::Perform()
(*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
(*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
_inputVideoBuffer.Free();
_encoder->Release();
_decoder->Release();
@ -190,8 +209,13 @@ NormalTest::Encode()
{
return true;
}
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
_inputVideoBuffer.SetTimeStamp(_framecnt);
_inputVideoBuffer.CreateFrame(_sizeY, _sourceBuffer,
_sizeUv, _sourceBuffer + _sizeY,
_sizeUv, _sourceBuffer + _sizeY +
_sizeUv,
_width, _height,
_width, _halfWidth, _halfWidth);
_inputVideoBuffer.set_timestamp(_framecnt);
// This multiple attempt ridiculousness is to accomodate VP7:
// 1. The wrapper can unilaterally reduce the framerate for low bitrates.
@ -204,8 +228,8 @@ NormalTest::Encode()
{
starttime = clock()/(double)CLOCKS_PER_SEC;
_inputVideoBuffer.SetWidth(_inst.width);
_inputVideoBuffer.SetHeight(_inst.height);
_inputVideoBuffer.set_width(_inst.width);
_inputVideoBuffer.set_height(_inst.height);
//_lengthEncFrame = _encoder->Encode(_inputVideoBuffer, _encodedVideoBuffer, _frameInfo,
// _inst.frameRate, _requestKeyFrame && !(_framecnt%50));

View File

@ -40,6 +40,12 @@ protected:
unsigned int _testNo;
int _lengthEncFrame;
bool _appendNext;
int _width;
int _halfWidth;
int _height;
int _halfHeight;
int _sizeY;
int _sizeUv;
};
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_TEST_H_

View File

@ -64,11 +64,11 @@ PacketLossTest::Encoded(const EncodedImage& encodedImage)
}
void
PacketLossTest::Decoded(const VideoFrame& decodedImage)
PacketLossTest::Decoded(const I420VideoFrame& decodedImage)
{
// check the frame queue if any frames have gone missing
assert(!_frameQueue.empty()); // decoded frame is not in the queue
while(_frameQueue.front() < decodedImage.TimeStamp())
while(_frameQueue.front() < decodedImage.timestamp())
{
// this frame is missing
// write previous decoded frame again (frame freeze)
@ -84,20 +84,23 @@ PacketLossTest::Decoded(const VideoFrame& decodedImage)
_frameQueue.pop_front();
}
// Decoded frame is not in the queue.
assert(_frameQueue.front() == decodedImage.TimeStamp());
assert(_frameQueue.front() == decodedImage.timestamp());
// pop the current frame
_frameQueue.pop_front();
// save image for future freeze-frame
if (_lastFrameLength < decodedImage.Length())
unsigned int length = CalcBufferSize(kI420, decodedImage.width(),
decodedImage.height());
if (_lastFrameLength < length)
{
if (_lastFrame) delete [] _lastFrame;
_lastFrame = new WebRtc_UWord8[decodedImage.Length()];
_lastFrame = new WebRtc_UWord8[length];
}
memcpy(_lastFrame, decodedImage.Buffer(), decodedImage.Length());
_lastFrameLength = decodedImage.Length();
// TODO(mikhal): Can't the last frame be a I420VideoFrame?
ExtractBuffer(decodedImage, length, _lastFrame);
_lastFrameLength = length;
NormalAsyncTest::Decoded(decodedImage);
}

View File

@ -21,7 +21,7 @@ public:
PacketLossTest();
virtual ~PacketLossTest() {if(_lastFrame) {delete [] _lastFrame; _lastFrame = NULL;}}
virtual void Encoded(const webrtc::EncodedImage& encodedImage);
virtual void Decoded(const webrtc::VideoFrame& decodedImage);
virtual void Decoded(const webrtc::I420VideoFrame& decodedImage);
protected:
PacketLossTest(std::string name, std::string description);
PacketLossTest(std::string name,

View File

@ -49,11 +49,11 @@ protected:
WebRtc_UWord32 _bitRate;
unsigned int _lengthSourceFrame;
unsigned char* _sourceBuffer;
webrtc::VideoFrame _inputVideoBuffer;
webrtc::I420VideoFrame _inputVideoBuffer;
// TODO(mikhal): For now using VideoFrame for encodedBuffer, should use a
// designated class.
webrtc::VideoFrame _encodedVideoBuffer;
webrtc::VideoFrame _decodedVideoBuffer;
webrtc::I420VideoFrame _decodedVideoBuffer;
webrtc::VideoCodec _inst;
std::fstream* _log;
std::string _inname;

View File

@ -98,7 +98,8 @@ UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
_encodedVideoBuffer->VerifyAndAllocate(encodedImage._size);
_encodedVideoBuffer->CopyFrame(encodedImage._size, encodedImage._buffer);
_encodedVideoBuffer->SetLength(encodedImage._length);
// _encodedVideoBuffer->SetFrameType(encodedImage._frameType);
// TODO(mikhal): Update frame type API.
// _encodedVideoBuffer->SetFrameType(encodedImage._frameType);
_encodedVideoBuffer->SetWidth(
(WebRtc_UWord16)encodedImage._encodedWidth);
_encodedVideoBuffer->SetHeight(
@ -109,12 +110,9 @@ UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
return 0;
}
WebRtc_Word32 UnitTestDecodeCompleteCallback::Decoded(VideoFrame& image)
WebRtc_Word32 UnitTestDecodeCompleteCallback::Decoded(I420VideoFrame& image)
{
_decodedVideoBuffer->CopyFrame(image.Length(), image.Buffer());
_decodedVideoBuffer->SetWidth(image.Width());
_decodedVideoBuffer->SetHeight(image.Height());
_decodedVideoBuffer->SetTimeStamp(image.TimeStamp());
_decodedVideoBuffer->CopyFrame(image);
_decodeComplete = true;
return 0;
}
@ -155,7 +153,7 @@ UnitTest::WaitForEncodedFrame() const
{
if (_encodeCompleteCallback->EncodeComplete())
{
return _encodedVideoBuffer.Length();
return _encodedVideoBuffer.Length();
}
}
return 0;
@ -169,7 +167,8 @@ UnitTest::WaitForDecodedFrame() const
{
if (_decodeCompleteCallback->DecodeComplete())
{
return _decodedVideoBuffer.Length();
return webrtc::CalcBufferSize(kI420, _decodedVideoBuffer.width(),
_decodedVideoBuffer.height());
}
}
return 0;
@ -224,12 +223,16 @@ UnitTest::Setup()
_inst.codecSpecific.VP8.denoisingOn = true;
// Get input frame.
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame, _sourceFile)
== _lengthSourceFrame);
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame);
_inputVideoBuffer.SetWidth(_source->GetWidth());
_inputVideoBuffer.SetHeight(_source->GetHeight());
int size_y = _inst.width * _inst.height;
int size_uv = ((_inst.width + 1) / 2) * ((_inst.height + 1) / 2);
_inputVideoBuffer.CreateFrame(size_y, _refFrame,
size_uv, _refFrame + size_y,
size_uv, _refFrame + size_y + size_uv,
_inst.width, _inst.height,
_inst.width,
(_inst.width + 1) / 2, (_inst.width + 1) / 2);
rewind(_sourceFile);
// Get a reference encoded frame.
@ -244,7 +247,9 @@ UnitTest::Setup()
memcpy(_refEncFrame, _encodedVideoBuffer.Buffer(), _refEncFrameLength);
// Get a reference decoded frame.
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_decodedVideoBuffer.CreateEmptyFrame(_inst.width, _inst.height, _inst.width,
(_inst.width + 1) / 2,
(_inst.width + 1) / 2);
EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
@ -255,12 +260,15 @@ UnitTest::Setup()
if (i > 0)
{
// Insert yet another frame
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame,
_sourceFile) == _lengthSourceFrame);
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame);
_inputVideoBuffer.SetWidth(_source->GetWidth());
_inputVideoBuffer.SetHeight(_source->GetHeight());
_inputVideoBuffer.CreateFrame(size_y, _refFrame,
size_uv, _refFrame + size_y,
size_uv, _refFrame + size_y + size_uv,
_inst.width, _inst.height,
_inst.width,
(_inst.width + 1) / 2,
(_inst.width + 1) / 2);
_encoder->Encode(_inputVideoBuffer, NULL, NULL);
ASSERT_TRUE(WaitForEncodedFrame() > 0);
}
@ -274,7 +282,7 @@ UnitTest::Setup()
}
rewind(_sourceFile);
EXPECT_TRUE(frameLength == _lengthSourceFrame);
memcpy(_refDecFrame, _decodedVideoBuffer.Buffer(), _lengthSourceFrame);
ExtractBuffer(_decodedVideoBuffer, _lengthSourceFrame, _refDecFrame);
}
void
@ -342,7 +350,7 @@ UnitTest::Perform()
{
UnitTest::Setup();
int frameLength;
VideoFrame inputImage;
I420VideoFrame inputImage;
EncodedImage encodedImage;
//----- Encoder parameter tests -----
@ -409,17 +417,20 @@ UnitTest::Perform()
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
//-- Encode() errors --
// inputVideoBuffer unallocated.
_inputVideoBuffer.Free();
inputImage.Free();
inputImage.ResetSize();
EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame);
_inputVideoBuffer.SetWidth(_source->GetWidth());
_inputVideoBuffer.SetHeight(_source->GetHeight());
int width = _source->GetWidth();
int half_width = (width + 1) / 2;
int height = _source->GetHeight();
int half_height = (height + 1) / 2;
int size_y = width * height;
int size_uv = half_width * half_height;
_inputVideoBuffer.CreateFrame(size_y, _refFrame,
size_uv, _refFrame + size_y,
size_uv, _refFrame + size_y + size_uv,
width, height,
width, half_width, half_width);
//----- Encoder stress tests -----
// Vary frame rate and I-frame request.
@ -539,8 +550,12 @@ UnitTest::Perform()
_decoder->Decode(encodedImage, false, NULL);
frameLength = WaitForDecodedFrame();
}
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
_refDecFrame, _lengthSourceFrame) == true);
unsigned int length = CalcBufferSize(kI420, width, height);
scoped_array<uint8_t> decoded_buffer(new uint8_t[length]);
ExtractBuffer(_decodedVideoBuffer, _lengthSourceFrame,
decoded_buffer.get());
EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), frameLength, _refDecFrame,
_lengthSourceFrame) == true);
// Reset then decode.
EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
@ -551,8 +566,10 @@ UnitTest::Perform()
_decoder->Decode(encodedImage, false, NULL);
frameLength = WaitForDecodedFrame();
}
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
_refDecFrame, _lengthSourceFrame) == true);
ExtractBuffer(_decodedVideoBuffer, _lengthSourceFrame,
decoded_buffer.get());
EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), frameLength,
_refDecFrame, _lengthSourceFrame) == true);
// Decode with other size, reset, then decode with original size again
// to verify that decoder is reset to a "fresh" state upon Reset().
@ -565,20 +582,25 @@ UnitTest::Perform()
memcpy(&tempInst, &_inst, sizeof(VideoCodec));
tempInst.width /= 2;
tempInst.height /= 2;
int tmpHalfWidth = (tempInst.width + 1) / 2;
int tmpHalfHeight = (tempInst.height + 1) / 2;
int tmpSizeY = tempInst.width * tempInst.height;
int tmpSizeUv = tmpHalfWidth * tmpHalfHeight;
// Encode reduced (quarter) frame size.
EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
EXPECT_TRUE(_encoder->InitEncode(&tempInst, 1, 1440) ==
WEBRTC_VIDEO_CODEC_OK);
VideoFrame tempInput;
unsigned int tmpLength = _inputVideoBuffer.Length() / 4;
tempInput.CopyFrame(tmpLength, _inputVideoBuffer.Buffer());
tempInput.SetWidth(tempInst.width);
tempInput.SetHeight(tempInst.height);
webrtc::I420VideoFrame tempInput;
tempInput.CreateFrame(tmpSizeY, _inputVideoBuffer.buffer(kYPlane),
tmpSizeUv, _inputVideoBuffer.buffer(kUPlane),
tmpSizeUv, _inputVideoBuffer.buffer(kVPlane),
tempInst.width, tempInst.height,
tempInst.width, tmpHalfWidth, tmpHalfWidth);
_encoder->Encode(tempInput, NULL, NULL);
frameLength = WaitForEncodedFrame();
EXPECT_TRUE(frameLength > 0);
tempInput.Free();
// Reset then decode.
EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
frameLength = 0;
@ -608,9 +630,11 @@ UnitTest::Perform()
}
// check that decoded frame matches with reference
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
_refDecFrame, _lengthSourceFrame) == true);
unsigned int length = CalcBufferSize(kI420, width, height);
scoped_array<uint8_t> decoded_buffer(new uint8_t[length]);
ExtractBuffer(_decodedVideoBuffer, length, decoded_buffer.get());
EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), length,
_refDecFrame, _lengthSourceFrame) == true);
}
// Release then decode.
@ -624,8 +648,9 @@ UnitTest::Perform()
_decoder->Decode(encodedImage, false, NULL);
frameLength = WaitForDecodedFrame();
}
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
_refDecFrame, _lengthSourceFrame) == true);
ExtractBuffer(_decodedVideoBuffer, length, decoded_buffer.get());
EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), frameLength,
_refDecFrame, _lengthSourceFrame) == true);
_encodedVideoBuffer.SetLength(0);
delete [] tmpBuf;
@ -644,19 +669,24 @@ UnitTest::Perform()
frames = 0;
int frameDelay = 0;
int encTimeStamp;
_decodedVideoBuffer.SetTimeStamp(0);
_decodedVideoBuffer.set_timestamp(0);
while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
_lengthSourceFrame)
{
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
_inputVideoBuffer.SetTimeStamp(frames);
_inputVideoBuffer.CreateFrame(size_y, _sourceBuffer,
size_uv, _sourceBuffer + size_y,
size_uv, _sourceBuffer + size_y + size_uv,
width, height,
width, half_width, half_width);
_inputVideoBuffer.set_timestamp(frames);
ASSERT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, NULL) ==
WEBRTC_VIDEO_CODEC_OK);
frameLength = WaitForEncodedFrame();
//ASSERT_TRUE(frameLength);
EXPECT_TRUE(frameLength > 0);
encTimeStamp = _encodedVideoBuffer.TimeStamp();
EXPECT_TRUE(_inputVideoBuffer.TimeStamp() ==
EXPECT_TRUE(_inputVideoBuffer.timestamp() ==
static_cast<unsigned>(encTimeStamp));
frameLength = Decode();
@ -670,7 +700,7 @@ UnitTest::Perform()
{
encTimeStamp = 0;
}
EXPECT_TRUE(_decodedVideoBuffer.TimeStamp() ==
EXPECT_TRUE(_decodedVideoBuffer.timestamp() ==
static_cast<unsigned>(encTimeStamp));
frames++;
}
@ -678,7 +708,6 @@ UnitTest::Perform()
rewind(_sourceFile);
RateControlTests();
inputImage.Free();
Teardown();
}
@ -719,13 +748,22 @@ UnitTest::RateControlTests()
{
CodecSpecific_SetBitrate(_bitRate, _inst.maxFramerate);
}
int width = _source->GetWidth();
int half_width = (width + 1) / 2;
int height = _source->GetHeight();
int half_height = (height + 1) / 2;
int size_y = width * height;
int size_uv = half_width * half_height;
while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
_lengthSourceFrame)
{
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
_inputVideoBuffer.SetTimeStamp(_inputVideoBuffer.TimeStamp() +
static_cast<WebRtc_UWord32>(9e4 /
_inputVideoBuffer.CreateFrame(size_y, _sourceBuffer,
size_uv, _sourceBuffer + size_y,
size_uv, _sourceBuffer + size_y +
size_uv,
width, height,
width, half_width, half_width);
_inputVideoBuffer.set_timestamp(static_cast<WebRtc_UWord32>(9e4 /
static_cast<float>(_inst.maxFramerate)));
ASSERT_EQ(_encoder->Encode(_inputVideoBuffer, NULL, NULL),
WEBRTC_VIDEO_CODEC_OK);

View File

@ -94,12 +94,12 @@ private:
class UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback
{
public:
UnitTestDecodeCompleteCallback(webrtc::VideoFrame* buffer) :
UnitTestDecodeCompleteCallback(webrtc::I420VideoFrame* buffer) :
_decodedVideoBuffer(buffer), _decodeComplete(false) {}
WebRtc_Word32 Decoded(webrtc::VideoFrame& image);
WebRtc_Word32 Decoded(webrtc::I420VideoFrame& image);
bool DecodeComplete();
private:
webrtc::VideoFrame* _decodedVideoBuffer;
webrtc::I420VideoFrame* _decodedVideoBuffer;
bool _decodeComplete;
};

View File

@ -36,8 +36,6 @@ VP8DualDecoderTest::~VP8DualDecoderTest()
_decoder2->Release();
delete _decoder2;
}
_decodedVideoBuffer2.Free();
}
void
@ -46,9 +44,6 @@ VP8DualDecoderTest::Perform()
_inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
CodecSettings(352, 288, 30, _bitRate);
Setup();
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_decodedVideoBuffer2.VerifyAndAllocate(_lengthSourceFrame);
if(_encoder->InitEncode(&_inst, 4, 1460) < 0)
{
exit(EXIT_FAILURE);
@ -171,9 +166,7 @@ VP8DualDecoderTest::Decode(int lossValue)
}
// compare decoded images
if (!CheckIfBitExact(_decodedVideoBuffer.Buffer(),
_decodedVideoBuffer.Length(),
_decodedVideoBuffer2.Buffer(), _decodedVideoBuffer.Length()))
if (!CheckIfBitExactFrames(_decodedVideoBuffer, _decodedVideoBuffer2))
{
fprintf(stderr,"\n\nClone output different from master.\n\n");
exit(EXIT_FAILURE);
@ -185,26 +178,10 @@ VP8DualDecoderTest::Decode(int lossValue)
return ret;
}
bool
VP8DualDecoderTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
const void* ptrB, unsigned int bLengthBytes)
WebRtc_Word32 DualDecoderCompleteCallback::Decoded(webrtc::I420VideoFrame&
image)
{
if (aLengthBytes != bLengthBytes)
{
return false;
}
return memcmp(ptrA, ptrB, aLengthBytes) == 0;
}
WebRtc_Word32 DualDecoderCompleteCallback::Decoded(webrtc::VideoFrame& image)
{
_decodedVideoBuffer->VerifyAndAllocate(image.Length());
_decodedVideoBuffer->CopyFrame(image.Length(), image.Buffer());
_decodedVideoBuffer->SetWidth(image.Width());
_decodedVideoBuffer->SetHeight(image.Height());
_decodedVideoBuffer->SetTimeStamp(image.TimeStamp());
_decodedVideoBuffer->CopyFrame(image);
_decodeComplete = true;
return 0;
}
@ -219,3 +196,20 @@ bool DualDecoderCompleteCallback::DecodeComplete()
return false;
}
bool
VP8DualDecoderTest::CheckIfBitExactFrames(const webrtc::I420VideoFrame& frame1,
const webrtc::I420VideoFrame& frame2) {
for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) {
webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
int allocated_size1 = frame1.allocated_size(plane_type);
int allocated_size2 = frame2.allocated_size(plane_type);
if (allocated_size1 != allocated_size2)
return false;
const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
if (memcmp(plane_buffer1, plane_buffer2, allocated_size1) != 0)
return false;
}
return true;
}

View File

@ -30,21 +30,21 @@ protected:
virtual int Decode(int lossValue = 0);
webrtc::VP8Decoder* _decoder2;
webrtc::VideoFrame _decodedVideoBuffer2;
static bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes,
const void *ptrB, unsigned int bLengthBytes);
webrtc::I420VideoFrame _decodedVideoBuffer2;
static bool CheckIfBitExactFrames(const webrtc::I420VideoFrame& frame1,
const webrtc::I420VideoFrame& frame2);
private:
};
class DualDecoderCompleteCallback : public webrtc::DecodedImageCallback
{
public:
DualDecoderCompleteCallback(webrtc::VideoFrame* buffer)
DualDecoderCompleteCallback(webrtc::I420VideoFrame* buffer)
: _decodedVideoBuffer(buffer), _decodeComplete(false) {}
WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
WebRtc_Word32 Decoded(webrtc::I420VideoFrame& decodedImage);
bool DecodeComplete();
private:
webrtc::VideoFrame* _decodedVideoBuffer;
webrtc::I420VideoFrame* _decodedVideoBuffer;
bool _decodeComplete;
};

View File

@ -34,16 +34,12 @@ VP8RpsTest::~VP8RpsTest() {
decoder2_->Release();
delete decoder2_;
}
decoded_frame2_.Free();
}
void VP8RpsTest::Perform() {
_inname = "test/testFiles/foreman_cif.yuv";
CodecSettings(352, 288, 30, _bitRate);
Setup();
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
decoded_frame2_.VerifyAndAllocate(_lengthSourceFrame);
// Enable RPS functionality
_inst.codecSpecific.VP8.pictureLossIndicationOn = true;
@ -137,16 +133,22 @@ bool VP8RpsTest::EncodeRps(RpsDecodeCompleteCallback* decodeCallback) {
size_t bytes_read = fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile);
if (bytes_read < _lengthSourceFrame)
return true;
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
_inputVideoBuffer.SetTimeStamp((unsigned int)
(_encFrameCnt * 9e4 / _inst.maxFramerate));
_inputVideoBuffer.SetWidth(_inst.width);
_inputVideoBuffer.SetHeight(_inst.height);
int half_width = (_inst.width + 1) / 2;
int half_height = (_inst.height + 1) / 2;
int size_y = _inst.width * _inst.height;
int size_uv = half_width * half_height;
_inputVideoBuffer.CreateFrame(size_y, _sourceBuffer,
size_uv, _sourceBuffer + size_y,
size_uv, _sourceBuffer + size_y + size_uv,
_inst.width, _inst.height,
_inst.width, half_width, half_width);
_inputVideoBuffer.set_timestamp((unsigned int)
(_encFrameCnt * 9e4 / _inst.maxFramerate));
if (feof(_sourceFile) != 0) {
return true;
}
_encodeCompleteTime = 0;
_encodeTimes[_inputVideoBuffer.TimeStamp()] = tGetTime();
_encodeTimes[_inputVideoBuffer.timestamp()] = tGetTime();
webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
codecSpecificInfo->codecSpecific.VP8.pictureIdRPSI =
@ -169,11 +171,11 @@ bool VP8RpsTest::EncodeRps(RpsDecodeCompleteCallback* decodeCallback) {
}
if (_encodeCompleteTime > 0) {
_totalEncodeTime += _encodeCompleteTime -
_encodeTimes[_inputVideoBuffer.TimeStamp()];
_encodeTimes[_inputVideoBuffer.timestamp()];
}
else {
_totalEncodeTime += tGetTime() -
_encodeTimes[_inputVideoBuffer.TimeStamp()];
_encodeTimes[_inputVideoBuffer.timestamp()];
}
return false;
}
@ -219,9 +221,8 @@ int VP8RpsTest::Decode(int lossValue) {
// compare decoded images
#if FRAME_LOSS
if (!_missingFrames) {
if (!CheckIfBitExact(_decodedVideoBuffer.GetBuffer(),
_decodedVideoBuffer.GetLength(),
decoded_frame2_.GetBuffer(), _decodedVideoBuffer.GetLength())) {
if (!CheckIfBitExactFrames(_decodedVideoBuffer,
decoded_frame2_)) {
fprintf(stderr,"\n\nRPS decoder different from master: %u\n\n",
_framecnt);
return -1;
@ -229,9 +230,7 @@ int VP8RpsTest::Decode(int lossValue) {
}
#else
if (_framecnt > 0 && _framecnt % 10 != 0) {
if (!CheckIfBitExact(_decodedVideoBuffer.Buffer(),
_decodedVideoBuffer.Length(),
decoded_frame2_.Buffer(), _decodedVideoBuffer.Length())) {
if (!CheckIfBitExactFrames(_decodedVideoBuffer, decoded_frame2_)) {
fprintf(stderr,"\n\nRPS decoder different from master: %u\n\n",
_framecnt);
return -1;
@ -247,24 +246,30 @@ int VP8RpsTest::Decode(int lossValue) {
return 0;
}
bool
VP8RpsTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
const void* ptrB, unsigned int bLengthBytes) {
if (aLengthBytes != bLengthBytes)
return false;
return memcmp(ptrA, ptrB, aLengthBytes) == 0;
VP8RpsTest::CheckIfBitExactFrames(const webrtc::I420VideoFrame& frame1,
const webrtc::I420VideoFrame& frame2) {
for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) {
webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
int allocated_size1 = frame1.allocated_size(plane_type);
int allocated_size2 = frame2.allocated_size(plane_type);
if (allocated_size1 != allocated_size2)
return false;
const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
if (memcmp(plane_buffer1, plane_buffer2, allocated_size1) != 0)
return false;
}
return true;
}
RpsDecodeCompleteCallback::RpsDecodeCompleteCallback(webrtc::VideoFrame* buffer)
RpsDecodeCompleteCallback::RpsDecodeCompleteCallback(webrtc::I420VideoFrame*
buffer)
: decoded_frame_(buffer),
decode_complete_(false),
last_decoded_picture_id_(0),
last_decoded_ref_picture_id_(0),
updated_ref_picture_id_(false) {
}
decode_complete_(false) {}
WebRtc_Word32 RpsDecodeCompleteCallback::Decoded(webrtc::VideoFrame& image) {
WebRtc_Word32 RpsDecodeCompleteCallback::Decoded(webrtc::I420VideoFrame&
image) {
return decoded_frame_->CopyFrame(image);
decode_complete_ = true;
}

View File

@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_
#include "common_video/interface/i420_video_frame.h"
#include "vp8.h"
#include "normal_async_test.h"
@ -28,18 +29,18 @@ class VP8RpsTest : public VP8NormalAsyncTest {
virtual bool EncodeRps(RpsDecodeCompleteCallback* decodeCallback);
virtual int Decode(int lossValue = 0);
static bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes,
const void *ptrB, unsigned int bLengthBytes);
static bool CheckIfBitExactFrames(const webrtc::I420VideoFrame& frame1,
const webrtc::I420VideoFrame& frame2);
webrtc::VP8Decoder* decoder2_;
webrtc::VideoFrame decoded_frame2_;
webrtc::I420VideoFrame decoded_frame2_;
bool sli_;
};
class RpsDecodeCompleteCallback : public webrtc::DecodedImageCallback {
public:
RpsDecodeCompleteCallback(webrtc::VideoFrame* buffer);
WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
RpsDecodeCompleteCallback(webrtc::I420VideoFrame* buffer);
WebRtc_Word32 Decoded(webrtc::I420VideoFrame& decodedImage);
bool DecodeComplete();
WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 picture_id);
WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 picture_id);
@ -47,7 +48,7 @@ class RpsDecodeCompleteCallback : public webrtc::DecodedImageCallback {
WebRtc_UWord64 LastDecodedRefPictureId(bool *updated);
private:
webrtc::VideoFrame* decoded_frame_;
webrtc::I420VideoFrame* decoded_frame_;
bool decode_complete_;
WebRtc_UWord64 last_decoded_picture_id_;
WebRtc_UWord64 last_decoded_ref_picture_id_;

View File

@ -324,13 +324,13 @@ uint32_t VP8EncoderImpl::MaxIntraTarget(uint32_t optimalBuffersize) {
return (targetPct < minIntraTh) ? minIntraTh: targetPct;
}
int VP8EncoderImpl::Encode(const VideoFrame& input_image,
int VP8EncoderImpl::Encode(const I420VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types) {
if (!inited_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
if (input_image.Buffer() == NULL) {
if (input_image.IsZeroSize()) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
if (encoded_complete_callback_ == NULL) {
@ -344,20 +344,18 @@ int VP8EncoderImpl::Encode(const VideoFrame& input_image,
}
// Check for change in frame size.
if (input_image.Width() != codec_.width ||
input_image.Height() != codec_.height) {
int ret = UpdateCodecFrameSize(input_image.Width(), input_image.Height());
if (input_image.width() != codec_.width ||
input_image.height() != codec_.height) {
int ret = UpdateCodecFrameSize(input_image.width(), input_image.height());
if (ret < 0) {
return ret;
}
}
// Image in vpx_image_t format.
uint8_t* buffer = input_image.Buffer();
uint32_t v_plane_loc = codec_.height * codec_.width +
((codec_.width + 1) >> 1) * ((codec_.height + 1) >> 1);
raw_->planes[PLANE_Y] = buffer;
raw_->planes[PLANE_U] = &buffer[codec_.width * codec_.height];
raw_->planes[PLANE_V] = &buffer[v_plane_loc];
// Input image is const. VP8's raw image is not defined as const.
raw_->planes[PLANE_Y] = const_cast<uint8_t*>(input_image.buffer(kYPlane));
raw_->planes[PLANE_U] = const_cast<uint8_t*>(input_image.buffer(kUPlane));
raw_->planes[PLANE_V] = const_cast<uint8_t*>(input_image.buffer(kVPlane));
int flags = 0;
#if WEBRTC_LIBVPX_VERSION >= 971
@ -379,11 +377,11 @@ int VP8EncoderImpl::Encode(const VideoFrame& input_image,
codec_specific_info->codecSpecific.VP8.pictureIdRPSI);
}
if (codec_specific_info->codecSpecific.VP8.hasReceivedSLI) {
sendRefresh = rps_->ReceivedSLI(input_image.TimeStamp());
sendRefresh = rps_->ReceivedSLI(input_image.timestamp());
}
}
flags = rps_->EncodeFlags(picture_id_, sendRefresh,
input_image.TimeStamp());
input_image.timestamp());
}
// TODO(holmer): Ideally the duration should be the timestamp diff of this
@ -456,7 +454,7 @@ void VP8EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
picture_id_ = (picture_id_ + 1) & 0x7FFF; // prepare next
}
int VP8EncoderImpl::GetEncodedFrame(const VideoFrame& input_image) {
int VP8EncoderImpl::GetEncodedFrame(const I420VideoFrame& input_image) {
vpx_codec_iter_t iter = NULL;
encoded_image_._frameType = kDeltaFrame;
const vpx_codec_cx_pkt_t *pkt= vpx_codec_get_cx_data(encoder_, &iter);
@ -469,7 +467,7 @@ int VP8EncoderImpl::GetEncodedFrame(const VideoFrame& input_image) {
}
} else if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) {
CodecSpecificInfo codecSpecific;
PopulateCodecSpecific(&codecSpecific, *pkt, input_image.TimeStamp());
PopulateCodecSpecific(&codecSpecific, *pkt, input_image.timestamp());
assert(pkt->data.frame.sz <= encoded_image_._size);
memcpy(encoded_image_._buffer, pkt->data.frame.buf, pkt->data.frame.sz);
@ -484,9 +482,9 @@ int VP8EncoderImpl::GetEncodedFrame(const VideoFrame& input_image) {
}
if (encoded_image_._length > 0) {
encoded_image_._timeStamp = input_image.TimeStamp();
encoded_image_._timeStamp = input_image.timestamp();
// TODO(mikhal): Resolve confusion in terms.
encoded_image_.capture_time_ms_ = input_image.RenderTimeMs();
encoded_image_.capture_time_ms_ = input_image.render_time_ms();
// Figure out where partition boundaries are located.
RTPFragmentationHeader fragInfo;
@ -518,7 +516,7 @@ int VP8EncoderImpl::GetEncodedFrame(const VideoFrame& input_image) {
}
#if WEBRTC_LIBVPX_VERSION >= 971
int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image) {
int VP8EncoderImpl::GetEncodedPartitions(const I420VideoFrame& input_image) {
vpx_codec_iter_t iter = NULL;
int part_idx = 0;
encoded_image_._length = 0;
@ -554,13 +552,13 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image) {
encoded_image_._frameType = kKeyFrame;
rps_->EncodedKeyFrame(picture_id_);
}
PopulateCodecSpecific(&codec_specific, *pkt, input_image.TimeStamp());
PopulateCodecSpecific(&codec_specific, *pkt, input_image.timestamp());
break;
}
}
if (encoded_image_._length > 0) {
encoded_image_._timeStamp = input_image.TimeStamp();
encoded_image_.capture_time_ms_ = input_image.RenderTimeMs();
encoded_image_._timeStamp = input_image.timestamp();
encoded_image_.capture_time_ms_ = input_image.render_time_ms();
encoded_image_._encodedHeight = raw_->h;
encoded_image_._encodedWidth = raw_->w;
encoded_complete_callback_->Encoded(encoded_image_, &codec_specific,
@ -873,30 +871,18 @@ int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
// Decoder OK and NULL image => No show frame
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
}
uint32_t required_size = CalcBufferSize(kI420, img->d_w, img->d_h);
decoded_image_.VerifyAndAllocate(required_size);
uint8_t* buf;
uint32_t pos = 0;
uint32_t plane, y;
uint8_t* buffer = decoded_image_.Buffer();
for (plane = 0; plane < 3; plane++) {
unsigned int width = (plane ? (img->d_w + 1) >> 1 : img->d_w);
unsigned int height = (plane ? (img->d_h + 1) >> 1 : img->d_h);
buf = img->planes[plane];
for (y = 0; y < height; y++) {
memcpy(&buffer[pos], buf, width);
pos += width;
buf += img->stride[plane];
}
}
// Set decoded image parameters.
decoded_image_.SetHeight(img->d_h);
decoded_image_.SetWidth(img->d_w);
decoded_image_.SetLength(CalcBufferSize(kI420, img->d_w, img->d_h));
decoded_image_.SetTimeStamp(timestamp);
int size_y = img->stride[VPX_PLANE_Y] * img->d_h;
int size_u = img->stride[VPX_PLANE_U] * ((img->d_h + 1) / 2);
int size_v = img->stride[VPX_PLANE_V] * ((img->d_h + 1) / 2);
// TODO(mikhal): This does a copy - need to SwapBuffers.
decoded_image_.CreateFrame(size_y, img->planes[VPX_PLANE_Y],
size_u, img->planes[VPX_PLANE_U],
size_v, img->planes[VPX_PLANE_V],
img->d_w, img->d_h,
img->stride[VPX_PLANE_Y],
img->stride[VPX_PLANE_U],
img->stride[VPX_PLANE_V]);
decoded_image_.set_timestamp(timestamp);
int ret = decode_complete_callback_->Decoded(decoded_image_);
if (ret != 0)
return ret;
@ -913,7 +899,6 @@ int VP8DecoderImpl::RegisterDecodeCompleteCallback(
}
int VP8DecoderImpl::Release() {
decoded_image_.Free();
if (last_keyframe_._buffer != NULL) {
delete [] last_keyframe_._buffer;
last_keyframe_._buffer = NULL;
@ -941,7 +926,7 @@ VideoDecoder* VP8DecoderImpl::Copy() {
assert(false);
return NULL;
}
if (decoded_image_.Buffer() == NULL) {
if (decoded_image_.IsZeroSize()) {
// Nothing has been decoded before; cannot clone.
return NULL;
}
@ -964,13 +949,13 @@ VideoDecoder* VP8DecoderImpl::Copy() {
return NULL;
}
// Allocate memory for reference image copy
assert(decoded_image_.Width() > 0);
assert(decoded_image_.Height() > 0);
assert(decoded_image_.width() > 0);
assert(decoded_image_.height() > 0);
assert(image_format_ > VPX_IMG_FMT_NONE);
// Check if frame format has changed.
if (ref_frame_ &&
(decoded_image_.Width() != ref_frame_->img.d_w ||
decoded_image_.Height() != ref_frame_->img.d_h ||
(decoded_image_.width() != static_cast<int>(ref_frame_->img.d_w) ||
decoded_image_.height() != static_cast<int>(ref_frame_->img.d_h) ||
image_format_ != ref_frame_->img.fmt)) {
vpx_img_free(&ref_frame_->img);
delete ref_frame_;
@ -982,12 +967,12 @@ VideoDecoder* VP8DecoderImpl::Copy() {
ref_frame_ = new vpx_ref_frame_t;
unsigned int align = 1;
if (decoded_image_.Width() % 32 == 0) {
if (decoded_image_.width() % 32 == 0) {
align = 32;
}
if (!vpx_img_alloc(&ref_frame_->img,
static_cast<vpx_img_fmt_t>(image_format_),
decoded_image_.Width(), decoded_image_.Height(),
decoded_image_.width(), decoded_image_.height(),
align)) {
assert(false);
delete copy;

View File

@ -72,7 +72,7 @@ class VP8EncoderImpl : public VP8Encoder {
// WEBRTC_VIDEO_CODEC_ERROR
// WEBRTC_VIDEO_CODEC_TIMEOUT
virtual int Encode(const VideoFrame& input_image,
virtual int Encode(const I420VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types);
@ -115,9 +115,9 @@ class VP8EncoderImpl : public VP8Encoder {
const vpx_codec_cx_pkt& pkt,
uint32_t timestamp);
int GetEncodedFrame(const VideoFrame& input_image);
int GetEncodedFrame(const I420VideoFrame& input_image);
int GetEncodedPartitions(const VideoFrame& input_image);
int GetEncodedPartitions(const I420VideoFrame& input_image);
// Determine maximum target for Intra frames
//
@ -219,7 +219,7 @@ class VP8DecoderImpl : public VP8Decoder {
int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp);
VideoFrame decoded_image_;
I420VideoFrame decoded_image_;
DecodedImageCallback* decode_complete_callback_;
bool inited_;
bool feedback_mode_;

View File

@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_
#define WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_
#include "common_video/interface/i420_video_frame.h"
#include "modules/interface/module.h"
#include "modules/interface/module_common_types.h"
#include "modules/video_coding/main/interface/video_coding_defines.h"
@ -252,7 +253,7 @@ public:
// Return value : VCM_OK, on success.
// < 0, on error.
virtual WebRtc_Word32 AddVideoFrame(
const VideoFrame& videoFrame,
const I420VideoFrame& videoFrame,
const VideoContentMetrics* contentMetrics = NULL,
const CodecSpecificInfo* codecSpecificInfo = NULL) = 0;

View File

@ -12,6 +12,7 @@
#define WEBRTC_MODULES_INTERFACE_VIDEO_CODING_DEFINES_H_
#include "typedefs.h"
#include "common_video/interface/i420_video_frame.h"
#include "modules/interface/module_common_types.h"
namespace webrtc {
@ -96,7 +97,7 @@ class VCMFrameStorageCallback {
// Callback class used for passing decoded frames which are ready to be rendered.
class VCMReceiveCallback {
public:
virtual WebRtc_Word32 FrameToRender(VideoFrame& videoFrame) = 0;
virtual WebRtc_Word32 FrameToRender(I420VideoFrame& videoFrame) = 0;
virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(
const WebRtc_UWord64 pictureId) {
return -1;

View File

@ -40,13 +40,13 @@ void VCMDecodedFrameCallback::SetUserReceiveCallback(
_receiveCallback = receiveCallback;
}
WebRtc_Word32 VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage)
WebRtc_Word32 VCMDecodedFrameCallback::Decoded(I420VideoFrame& decodedImage)
{
// TODO(holmer): We should improve this so that we can handle multiple
// callbacks from one call to Decode().
CriticalSectionScoped cs(_critSect);
VCMFrameInformation* frameInfo = static_cast<VCMFrameInformation*>(
_timestampMap.Pop(decodedImage.TimeStamp()));
_timestampMap.Pop(decodedImage.timestamp()));
if (frameInfo == NULL)
{
// The map should never be empty or full if this callback is called.
@ -54,14 +54,14 @@ WebRtc_Word32 VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage)
}
_timing.StopDecodeTimer(
decodedImage.TimeStamp(),
decodedImage.timestamp(),
frameInfo->decodeStartTimeMs,
_clock->MillisecondTimestamp());
if (_receiveCallback != NULL)
{
_frame.SwapFrame(decodedImage);
_frame.SetRenderTime(frameInfo->renderTimeMs);
_frame.SwapFrame(&decodedImage);
_frame.set_render_time_ms(frameInfo->renderTimeMs);
WebRtc_Word32 callbackReturn = _receiveCallback->FrameToRender(_frame);
if (callbackReturn < 0)
{

View File

@ -38,7 +38,7 @@ public:
virtual ~VCMDecodedFrameCallback();
void SetUserReceiveCallback(VCMReceiveCallback* receiveCallback);
virtual WebRtc_Word32 Decoded(VideoFrame& decodedImage);
virtual WebRtc_Word32 Decoded(I420VideoFrame& decodedImage);
virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);
@ -50,7 +50,7 @@ public:
private:
CriticalSectionWrapper* _critSect;
TickTimeBase* _clock;
VideoFrame _frame;
I420VideoFrame _frame;
VCMReceiveCallback* _receiveCallback;
VCMTiming& _timing;
VCMTimestampMap _timestampMap;

View File

@ -57,7 +57,7 @@ VCMGenericEncoder::InitEncode(const VideoCodec* settings,
}
WebRtc_Word32
VCMGenericEncoder::Encode(const VideoFrame& inputFrame,
VCMGenericEncoder::Encode(const I420VideoFrame& inputFrame,
const CodecSpecificInfo* codecSpecificInfo,
const std::vector<FrameType>* frameTypes) {
std::vector<VideoFrameType> video_frame_types(frameTypes->size(),
@ -119,7 +119,7 @@ WebRtc_Word32 VCMGenericEncoder::RequestFrame(
if (!frame_types) {
return 0;
}
VideoFrame image;
I420VideoFrame image;
std::vector<VideoFrameType> video_frame_types(kVideoFrameDelta);
if (frame_types) {
VCMEncodedFrame::ConvertFrameTypes(*frame_types, &video_frame_types);

View File

@ -99,7 +99,7 @@ public:
* cameraFrameRate : request or information from the remote side
* frameType : The requested frame type to encode
*/
WebRtc_Word32 Encode(const VideoFrame& inputFrame,
WebRtc_Word32 Encode(const I420VideoFrame& inputFrame,
const CodecSpecificInfo* codecSpecificInfo,
const std::vector<FrameType>* frameTypes);
/**

View File

@ -9,6 +9,7 @@
*/
#include "video_coding_impl.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "common_types.h"
#include "encoded_frame.h"
#include "jitter_buffer.h"
@ -652,7 +653,7 @@ VideoCodingModuleImpl::SetVideoProtection(VCMVideoProtection videoProtection,
// Add one raw video frame to the encoder, blocking.
WebRtc_Word32
VideoCodingModuleImpl::AddVideoFrame(const VideoFrame& videoFrame,
VideoCodingModuleImpl::AddVideoFrame(const I420VideoFrame& videoFrame,
const VideoContentMetrics* contentMetrics,
const CodecSpecificInfo* codecSpecificInfo)
{
@ -685,10 +686,10 @@ VideoCodingModuleImpl::AddVideoFrame(const VideoFrame& videoFrame,
&_nextFrameTypes);
if (_encoderInputFile != NULL)
{
if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
_encoderInputFile) != videoFrame.Length()) {
return -1;
}
if (PrintI420VideoFrame(videoFrame, _encoderInputFile) < 0)
{
return -1;
}
}
if (ret < 0)
{

View File

@ -144,7 +144,7 @@ public:
// Add one raw video frame to the encoder, blocking.
virtual WebRtc_Word32 AddVideoFrame(
const VideoFrame& videoFrame,
const I420VideoFrame& videoFrame,
const VideoContentMetrics* _contentMetrics = NULL,
const CodecSpecificInfo* codecSpecificInfo = NULL);

View File

@ -54,7 +54,6 @@ class TestVideoCodingModule : public ::testing::Test {
virtual void TearDown() {
VideoCodingModule::Destroy(vcm_);
input_frame_.Free();
}
void ExpectIntraRequest(int stream) {
@ -88,7 +87,7 @@ class TestVideoCodingModule : public ::testing::Test {
VideoCodingModule* vcm_;
NiceMock<MockVideoEncoder> encoder_;
VideoFrame input_frame_;
I420VideoFrame input_frame_;
VideoCodec settings_;
};

View File

@ -114,15 +114,20 @@ CodecDataBaseTest::Perform(CmdArgs& args)
// registering the callback - encode and decode with the same vcm (could be later changed)
_encodeCompleteCallback->RegisterReceiverVCM(_vcm);
// preparing a frame to be encoded
VideoFrame sourceFrame;
sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame];
TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
sourceFrame.SetHeight(_height);
sourceFrame.SetWidth(_width);
I420VideoFrame sourceFrame;
int half_width = (_width + 1) / 2;
int half_height = (_height + 1) / 2;
int size_y = _width * _height;
int size_uv = half_width * half_height;
sourceFrame.CreateFrame(size_y, tmpBuffer,
size_uv, tmpBuffer + size_y,
size_uv, tmpBuffer + size_y + size_uv,
_width, _height,
_width, half_width, half_width);
_timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
sourceFrame.SetTimeStamp(_timeStamp);
sourceFrame.set_timestamp(_timeStamp);
// Encoder registration
TEST (VideoCodingModule::NumberOfCodecs() > 0);
TEST(VideoCodingModule::Codec(-1, &sendCodec) < 0);
@ -199,7 +204,7 @@ CodecDataBaseTest::Perform(CmdArgs& args)
TEST(_vcm->Decode() == VCM_OK);
waitEvent->Wait(33);
_timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
sourceFrame.SetTimeStamp(_timeStamp);
sourceFrame.set_timestamp(_timeStamp);
TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
TEST(_vcm->Decode() == VCM_OK);
@ -234,14 +239,14 @@ CodecDataBaseTest::Perform(CmdArgs& args)
TEST(_vcm->ResetDecoder() == VCM_OK);
waitEvent->Wait(33);
_timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
sourceFrame.SetTimeStamp(_timeStamp);
sourceFrame.set_timestamp(_timeStamp);
TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
// Try to decode a delta frame. Should get a warning since we have enabled the "require key frame" setting
// and because no frame type request callback has been registered.
TEST(_vcm->Decode() == VCM_MISSING_CALLBACK);
TEST(_vcm->IntraFrameRequest(0) == VCM_OK);
_timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
sourceFrame.SetTimeStamp(_timeStamp);
sourceFrame.set_timestamp(_timeStamp);
TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
TEST(_vcm->Decode() == VCM_OK);
@ -254,13 +259,13 @@ CodecDataBaseTest::Perform(CmdArgs& args)
TEST(_vcm->IntraFrameRequest(0) == VCM_OK);
waitEvent->Wait(33);
_timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
sourceFrame.SetTimeStamp(_timeStamp);
sourceFrame.set_timestamp(_timeStamp);
TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
TEST(_vcm->Decode() == VCM_OK);
TEST(_vcm->RegisterReceiveCodec(&sendCodec, 1) == VCM_OK);
waitEvent->Wait(33);
_timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
sourceFrame.SetTimeStamp(_timeStamp);
sourceFrame.set_timestamp(_timeStamp);
TEST(_vcm->IntraFrameRequest(0) == VCM_OK);
TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
TEST(_vcm->Decode() == VCM_OK);
@ -280,7 +285,6 @@ CodecDataBaseTest::Perform(CmdArgs& args)
rewind(_sourceFile);
_vcm->InitializeReceiver();
_vcm->InitializeSender();
sourceFrame.Free();
VCMDecodeCompleteCallback* decodeCallCDT = new VCMDecodeCompleteCallback(_decodedFile);
VCMEncodeCompleteCallback* encodeCallCDT = new VCMEncodeCompleteCallback(_encodedFile);
_vcm->RegisterReceiveCallback(decodeCallCDT);
@ -290,8 +294,8 @@ CodecDataBaseTest::Perform(CmdArgs& args)
{
// Register all available decoders.
int i, j;
//double psnr;
sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
sourceFrame.CreateEmptyFrame(_width, _height, _width,
(_width + 1) / 2, (_width + 1) / 2);
_vcm->RegisterReceiveCallback(decodeCallCDT);
for (i=0; i < VideoCodingModule::NumberOfCodecs(); i++)
{
@ -326,17 +330,18 @@ CodecDataBaseTest::Perform(CmdArgs& args)
_vcm->EnableFrameDropper(false);
printf("Encoding with %s \n\n", sendCodec.plName);
for (j=0; j < int(300/VideoCodingModule::NumberOfCodecs()); j++)// assuming 300 frames, NumberOfCodecs <= 10
// Assuming 300 frames, NumberOfCodecs <= 10.
for (j=0; j < int(300/VideoCodingModule::NumberOfCodecs()); j++)
{
frameCnt++;
TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
// building source frame
sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
sourceFrame.SetHeight(_height);
sourceFrame.SetWidth(_width);
sourceFrame.SetLength(_lengthSourceFrame);
sourceFrame.CreateFrame(size_y, tmpBuffer,
size_uv, tmpBuffer + size_y,
size_uv, tmpBuffer + size_y + size_uv,
_width, _height,
_width, half_width, half_width);
_timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
sourceFrame.SetTimeStamp(_timeStamp);
sourceFrame.set_timestamp(_timeStamp);
// send frame to the encoder
TEST (_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
waitEvent->Wait(33); // was 100
@ -373,7 +378,6 @@ CodecDataBaseTest::Perform(CmdArgs& args)
}
} // end: iterate codecs
rewind(_sourceFile);
sourceFrame.Free();
delete [] tmpBuffer;
delete decodeCallCDT;
delete encodeCallCDT;

View File

@ -13,7 +13,7 @@
#include <stdio.h>
#include "../source/event.h"
#include "rtp_rtcp.h"
#include "module_common_types.h"
#include "common_video/interface/i420_video_frame.h"
#include "test_macros.h"
#include "modules/video_coding/main/source/mock/fake_tick_time.h"
@ -122,8 +122,7 @@ GenericCodecTest::Perform(CmdArgs& args)
_vcm->Codec(0, &_sendCodec);
TEST(_vcm->RegisterSendCodec(&_sendCodec, 4, 1440) == VCM_OK);
// sanity on encoder registration
VideoFrame sourceFrame;
sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
I420VideoFrame sourceFrame;
_vcm->InitializeSender();
TEST(_vcm->Codec(kVideoCodecVP8, &sendCodec) == 0);
TEST(_vcm->RegisterSendCodec(&sendCodec, -1, 1440) < 0); // bad number of cores
@ -147,12 +146,16 @@ GenericCodecTest::Perform(CmdArgs& args)
}
WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame];
TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
// building source frame
sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
sourceFrame.SetHeight(_height);
sourceFrame.SetWidth(_width);
sourceFrame.SetTimeStamp(_timeStamp++);
// encode/decode
int half_width = (_width + 1) / 2;
int half_height = (_height + 1) / 2;
int size_y = _width * _height;
int size_uv = half_width * half_height;
sourceFrame.CreateFrame(size_y, tmpBuffer,
size_uv, tmpBuffer + size_y,
size_uv, tmpBuffer + size_y + size_uv,
_width, _height,
_width, half_width, half_width);
sourceFrame.set_timestamp(_timeStamp++);
TEST(_vcm->AddVideoFrame(sourceFrame) < 0 ); // encoder uninitialized
_vcm->InitializeReceiver();
TEST(_vcm->SetChannelParameters(100, 0, 0) < 0);// setting rtt when receiver uninitialized
@ -162,7 +165,6 @@ GenericCodecTest::Perform(CmdArgs& args)
/**************************************/
//Register both encoder and decoder, reset decoder - encode, set up decoder, reset encoder - decode.
rewind(_sourceFile);
sourceFrame.Free();
_vcm->InitializeReceiver();
_vcm->InitializeSender();
NumberOfCodecs = _vcm->NumberOfCodecs();
@ -195,11 +197,13 @@ GenericCodecTest::Perform(CmdArgs& args)
for (i = 0; i < _frameRate; i++)
{
TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
sourceFrame.SetHeight(_height);
sourceFrame.SetWidth(_width);
sourceFrame.CreateFrame(size_y, tmpBuffer,
size_uv, tmpBuffer + size_y,
size_uv, tmpBuffer + size_y + size_uv,
_width, _height,
_width, half_width, half_width);
_timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
sourceFrame.SetTimeStamp(_timeStamp);
sourceFrame.set_timestamp(_timeStamp);
TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
IncrementDebugClock(_frameRate);
_vcm->Process();
@ -245,7 +249,7 @@ GenericCodecTest::Perform(CmdArgs& args)
TEST(_vcm->RegisterReceiveCodec(&sendCodec, 1) == VCM_OK);
TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
_timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
sourceFrame.SetTimeStamp(_timeStamp);
sourceFrame.set_timestamp(_timeStamp);
// First packet of a subsequent frame required before the jitter buffer
// will allow decoding an incomplete frame.
TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
@ -269,8 +273,8 @@ GenericCodecTest::Perform(CmdArgs& args)
_vcm->InitializeSender();
_vcm->InitializeReceiver();
rewind(_sourceFile);
sourceFrame.Free();
sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
sourceFrame.CreateEmptyFrame(_width, _height, _width,
(_width + 1) / 2, (_width + 1) / 2);
const float bitRate[] = {100, 400, 600, 1000, 2000};
const float nBitrates = sizeof(bitRate)/sizeof(*bitRate);
float _bitRate = 0;
@ -315,11 +319,14 @@ GenericCodecTest::Perform(CmdArgs& args)
_lengthSourceFrame)
{
_frameCnt++;
sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
sourceFrame.SetHeight(_height);
sourceFrame.SetWidth(_width);
sourceFrame.CreateFrame(size_y, tmpBuffer,
size_uv, tmpBuffer + size_y,
size_uv, tmpBuffer + size_y + size_uv,
_width, _height,
_width, (_width + 1) / 2,
(_width + 1) / 2);
_timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
sourceFrame.SetTimeStamp(_timeStamp);
sourceFrame.set_timestamp(_timeStamp);
ret = _vcm->AddVideoFrame(sourceFrame);
IncrementDebugClock(_frameRate);
@ -364,8 +371,6 @@ GenericCodecTest::Perform(CmdArgs& args)
/* Encoder Pipeline Delay Test */
/******************************/
_vcm->InitializeSender();
sourceFrame.Free();
sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
NumberOfCodecs = _vcm->NumberOfCodecs();
bool encodeComplete = false;
// going over all available codecs
@ -383,11 +388,13 @@ GenericCodecTest::Perform(CmdArgs& args)
{
TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
_frameCnt++;
sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
sourceFrame.SetHeight(_height);
sourceFrame.SetWidth(_width);
sourceFrame.CreateFrame(size_y, tmpBuffer,
size_uv, tmpBuffer + size_y,
size_uv, tmpBuffer + size_y + size_uv,
_width, _height,
_width, half_width, half_width);
_timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
sourceFrame.SetTimeStamp(_timeStamp);
sourceFrame.set_timestamp(_timeStamp);
_vcm->AddVideoFrame(sourceFrame);
encodeComplete = _encodeCompleteCallback->EncodeComplete();
} // first frame encoded
@ -410,47 +417,6 @@ GenericCodecTest::Perform(CmdArgs& args)
VCMRTPEncodeCompleteCallback encCompleteCallback(&rtpModule);
_vcm->InitializeSender();
// TEST DISABLED FOR NOW SINCE VP8 DOESN'T HAVE THIS FEATURE
// sourceFrame.Free();
// sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
// NumberOfCodecs = _vcm->NumberOfCodecs();
// WebRtc_UWord32 targetPayloadSize = 500;
// rtpModule.SetMaxTransferUnit(targetPayloadSize);
// // going over all available codecs
// for (int k = 0; k < NumberOfCodecs; k++)
// {
// _vcm->Codec(k, &_sendCodec);
// if (strncmp(_sendCodec.plName, "VP8", 3) == 0)
// {
// // Only test with VP8
// continue;
// }
// rtpModule.RegisterSendPayload(_sendCodec.plName, _sendCodec.plType);
// // Make sure we only get one NAL unit per packet
// _vcm->InitializeSender();
// _vcm->RegisterSendCodec(&_sendCodec, 4, targetPayloadSize);
// sendCallback.SetMaxPayloadSize(targetPayloadSize);
// _vcm->RegisterTransportCallback(&encCompleteCallback);
// sendCallback.Reset();
// _frameCnt = 0;
// rewind(_sourceFile);
// while (!feof(_sourceFile))
// {
// fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile);
// _frameCnt++;
// sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
// sourceFrame.SetHeight(_height);
// sourceFrame.SetWidth(_width);
// _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
// sourceFrame.SetTimeStamp(_timeStamp);
// ret = _vcm->AddVideoFrame(sourceFrame);
// } // first frame encoded
// printf ("\n Codec type = %s \n",_sendCodec.plName);
// printf(" Average payload size = %f bytes, target = %u bytes\n", sendCallback.AveragePayloadSize(), targetPayloadSize);
// } // end for all codecs
// Test temporal decimation settings
for (int k = 0; k < NumberOfCodecs; k++)
{
@ -474,13 +440,14 @@ GenericCodecTest::Perform(CmdArgs& args)
_vcm->RegisterSendStatisticsCallback(&sendStats);
rewind(_sourceFile);
while (fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) ==
_lengthSourceFrame)
{
sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
sourceFrame.SetHeight(_height);
sourceFrame.SetWidth(_width);
_lengthSourceFrame) {
sourceFrame.CreateFrame(size_y, tmpBuffer,
size_uv, tmpBuffer + size_y,
size_uv, tmpBuffer + size_y + size_uv,
_width, _height,
_width, half_width, half_width);
_timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
sourceFrame.SetTimeStamp(_timeStamp);
sourceFrame.set_timestamp(_timeStamp);
ret = _vcm->AddVideoFrame(sourceFrame);
if (_vcm->TimeUntilNextProcess() <= 0)
{

View File

@ -290,8 +290,7 @@ MediaOptTest::Perform()
}
// START TEST
VideoFrame sourceFrame;
sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
I420VideoFrame sourceFrame;
WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame];
_vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, (WebRtc_UWord8)_lossRate, _rttMS);
_vcm->RegisterReceiveCallback(&receiveCallback);
@ -299,17 +298,22 @@ MediaOptTest::Perform()
_frameCnt = 0;
_sumEncBytes = 0.0;
_numFramesDropped = 0;
int half_width = (_width + 1) / 2;
int half_height = (_height + 1) / 2;
int size_y = _width * _height;
int size_uv = half_width * half_height;
while (feof(_sourceFile)== 0)
{
TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
_frameCnt++;
sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
sourceFrame.SetHeight(_height);
sourceFrame.SetWidth(_width);
sourceFrame.CreateFrame(size_y, tmpBuffer,
size_uv, tmpBuffer + size_y,
size_uv, tmpBuffer + size_y + size_uv,
_width, _height,
_width, half_width, half_width);
_timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
sourceFrame.SetTimeStamp(_timeStamp);
sourceFrame.set_timestamp(_timeStamp);
TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
// inform RTP Module of error resilience features
//_rtp->SetFECCodeRate(protectionCallback.FECKeyRate(),protectionCallback.FECDeltaRate());
@ -331,8 +335,7 @@ MediaOptTest::Perform()
else
{
// write frame to file
if (fwrite(sourceFrame.Buffer(), 1, sourceFrame.Length(),
_actualSourceFile) != sourceFrame.Length()) {
if (PrintI420VideoFrame(sourceFrame, _actualSourceFile) < 0) {
return -1;
}
}

View File

@ -34,12 +34,11 @@ MainSenderThread(void* obj)
SendSharedState* state = static_cast<SendSharedState*>(obj);
EventWrapper& waitEvent = *EventWrapper::Create();
// preparing a frame for encoding
VideoFrame sourceFrame;
I420VideoFrame sourceFrame;
WebRtc_Word32 width = state->_args.width;
WebRtc_Word32 height = state->_args.height;
float frameRate = state->_args.frameRate;
WebRtc_Word32 lengthSourceFrame = 3*width*height/2;
sourceFrame.VerifyAndAllocate(lengthSourceFrame);
WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[lengthSourceFrame];
if (state->_sourceFile == NULL)
@ -58,11 +57,17 @@ MainSenderThread(void* obj)
TEST(fread(tmpBuffer, 1, lengthSourceFrame,state->_sourceFile) > 0 ||
feof(state->_sourceFile));
state->_frameCnt++;
sourceFrame.CopyFrame(lengthSourceFrame, tmpBuffer);
sourceFrame.SetHeight(height);
sourceFrame.SetWidth(width);
int size_y = width * height;
int half_width = (width + 1) / 2;
int half_height = (height + 1) / 2;
int size_uv = half_width * half_height;
sourceFrame.CreateFrame(size_y, tmpBuffer,
size_uv, tmpBuffer + size_y,
size_uv, tmpBuffer + size_y + size_uv,
width, height,
width, half_width, half_width);
state->_timestamp += (WebRtc_UWord32)(9e4 / frameRate);
sourceFrame.SetTimeStamp(state->_timestamp);
sourceFrame.set_timestamp(state->_timestamp);
WebRtc_Word32 ret = state->_vcm.AddVideoFrame(sourceFrame);
if (ret < 0)

View File

@ -16,6 +16,7 @@
#include <time.h>
#include "../source/event.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "common_types.h"
#include "modules/video_coding/main/source/mock/fake_tick_time.h"
#include "test_callbacks.h"
@ -152,13 +153,13 @@ VCMNTDecodeCompleCallback::~VCMNTDecodeCompleCallback()
fclose(_decodedFile);
}
WebRtc_Word32
VCMNTDecodeCompleCallback::FrameToRender(webrtc::VideoFrame& videoFrame)
VCMNTDecodeCompleCallback::FrameToRender(webrtc::I420VideoFrame& videoFrame)
{
if (videoFrame.Width() != _currentWidth ||
videoFrame.Height() != _currentHeight)
if (videoFrame.width() != _currentWidth ||
videoFrame.height() != _currentHeight)
{
_currentWidth = videoFrame.Width();
_currentHeight = videoFrame.Height();
_currentWidth = videoFrame.width();
_currentHeight = videoFrame.height();
if (_decodedFile != NULL)
{
fclose(_decodedFile);
@ -166,11 +167,11 @@ VCMNTDecodeCompleCallback::FrameToRender(webrtc::VideoFrame& videoFrame)
}
_decodedFile = fopen(_outname.c_str(), "wb");
}
if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
_decodedFile) != videoFrame.Length()) {
if (PrintI420VideoFrame(videoFrame, _decodedFile) < 0) {
return -1;
}
_decodedBytes+= videoFrame.Length();
_decodedBytes+= webrtc::CalcBufferSize(webrtc::kI420,
videoFrame.width(), videoFrame.height());
return VCM_OK;
}
@ -270,8 +271,13 @@ NormalTest::Perform(CmdArgs& args)
///////////////////////
/// Start Test
///////////////////////
VideoFrame sourceFrame;
sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
I420VideoFrame sourceFrame;
int size_y = _width * _height;
int half_width = (_width + 1) / 2;
int half_height = (_height + 1) / 2;
int size_uv = half_width * half_height;
sourceFrame.CreateEmptyFrame(_width, _height,
_width, half_width, half_width);
WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame];
double startTime = clock()/(double)CLOCKS_PER_SEC;
_vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, 0, 0);
@ -288,23 +294,29 @@ NormalTest::Perform(CmdArgs& args)
TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0 ||
feof(_sourceFile));
_frameCnt++;
sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
sourceFrame.SetHeight(_height);
sourceFrame.SetWidth(_width);
sourceFrame.CreateFrame(size_y, tmpBuffer,
size_uv, tmpBuffer + size_y,
size_uv, tmpBuffer + size_y + size_uv,
_width, _height,
_width, half_width, half_width);
_timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_sendCodec.maxFramerate));
sourceFrame.SetTimeStamp(_timeStamp);
_encodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC;
sourceFrame.set_timestamp(_timeStamp);
_encodeTimes[int(sourceFrame.timestamp())] =
clock()/(double)CLOCKS_PER_SEC;
WebRtc_Word32 ret = _vcm->AddVideoFrame(sourceFrame);
double encodeTime = clock()/(double)CLOCKS_PER_SEC - _encodeTimes[int(sourceFrame.TimeStamp())];
double encodeTime = clock()/(double)CLOCKS_PER_SEC -
_encodeTimes[int(sourceFrame.timestamp())];
_totalEncodeTime += encodeTime;
if (ret < 0)
{
printf("Error in AddFrame: %d\n", ret);
//exit(1);
}
_decodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC; // same timestamp value for encode and decode
_decodeTimes[int(sourceFrame.timestamp())] =
clock()/(double)CLOCKS_PER_SEC;
ret = _vcm->Decode();
_totalDecodeTime += clock()/(double)CLOCKS_PER_SEC - _decodeTimes[int(sourceFrame.TimeStamp())];
_totalDecodeTime += clock()/(double)CLOCKS_PER_SEC -
_decodeTimes[int(sourceFrame.timestamp())];
if (ret < 0)
{
printf("Error in Decode: %d\n", ret);

View File

@ -68,14 +68,14 @@ public:
virtual ~VCMNTDecodeCompleCallback();
void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback);
// will write decoded frame into file
WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame);
WebRtc_Word32 FrameToRender(webrtc::I420VideoFrame& videoFrame);
WebRtc_Word32 DecodedBytes();
private:
FILE* _decodedFile;
std::string _outname;
WebRtc_UWord32 _decodedBytes;
WebRtc_UWord32 _currentWidth;
WebRtc_UWord32 _currentHeight;
int _decodedBytes;
int _currentWidth;
int _currentHeight;
}; // end of VCMDecodeCompleCallback class
@ -89,8 +89,8 @@ public:
static int RunTest(CmdArgs& args);
WebRtc_Word32 Perform(CmdArgs& args);
// option:: turn into private and call from perform
WebRtc_UWord32 Width() const { return _width; };
WebRtc_UWord32 Height() const { return _height; };
int Width() const { return _width; };
int Height() const { return _height; };
webrtc::VideoCodecType VideoType() const { return _videoType; };
@ -118,8 +118,8 @@ protected:
FILE* _decodedFile;
FILE* _encodedFile;
std::fstream _log;
WebRtc_UWord32 _width;
WebRtc_UWord32 _height;
int _width;
int _height;
float _frameRate;
float _bitRate;
WebRtc_UWord32 _lengthSourceFrame;

View File

@ -15,11 +15,11 @@
#include <time.h>
#include "../source/event.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_coding/main/source/tick_time_base.h"
#include "test_callbacks.h"
#include "test_macros.h"
#include "testsupport/metrics/video_metrics.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
using namespace webrtc;
@ -215,9 +215,8 @@ QualityModesTest::Perform()
// disabling internal VCM frame dropper
_vcm->EnableFrameDropper(false);
VideoFrame sourceFrame;
VideoFrame *decimatedFrame = NULL;
sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
I420VideoFrame sourceFrame;
I420VideoFrame *decimatedFrame = NULL;
WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame];
double startTime = clock()/(double)CLOCKS_PER_SEC;
_vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, 0, 0);
@ -238,18 +237,22 @@ QualityModesTest::Perform()
WebRtc_Word32 ret = 0;
_numFramesDroppedVPM = 0;
_numFramesDroppedVPM = 0;
while (feof(_sourceFile)== 0)
{
TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
_frameCnt++;
sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
sourceFrame.SetHeight(_nativeHeight);
sourceFrame.SetWidth(_nativeWidth);
int size_y = _nativeWidth * _nativeHeight;
int size_uv = ((_nativeWidth + 1) / 2) * ((_nativeHeight + 1) / 2);
sourceFrame.CreateFrame(size_y, tmpBuffer,
size_uv, tmpBuffer + size_y,
size_uv, tmpBuffer + size_y + size_uv,
_nativeWidth, _nativeHeight,
_nativeWidth, (_nativeWidth + 1) / 2,
(_nativeWidth + 1) / 2);
_timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(codec.maxFramerate));
sourceFrame.SetTimeStamp(_timeStamp);
sourceFrame.set_timestamp(_timeStamp);
ret = _vpm->PreprocessFrame(sourceFrame, &decimatedFrame);
if (ret == 1)
@ -270,20 +273,24 @@ QualityModesTest::Perform()
}
// counting only encoding time
_encodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC;
_encodeTimes[int(sourceFrame.timestamp())] =
clock()/(double)CLOCKS_PER_SEC;
WebRtc_Word32 ret = _vcm->AddVideoFrame(*decimatedFrame, contentMetrics);
_totalEncodeTime += clock()/(double)CLOCKS_PER_SEC - _encodeTimes[int(sourceFrame.TimeStamp())];
_totalEncodeTime += clock()/(double)CLOCKS_PER_SEC -
_encodeTimes[int(sourceFrame.timestamp())];
if (ret < 0)
{
printf("Error in AddFrame: %d\n", ret);
//exit(1);
}
_decodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC; // same timestamp value for encode and decode
_decodeTimes[int(sourceFrame.timestamp())] = clock() /
(double)CLOCKS_PER_SEC - _decodeTimes[int(sourceFrame.timestamp())];
ret = _vcm->Decode();
_totalDecodeTime += clock()/(double)CLOCKS_PER_SEC - _decodeTimes[int(sourceFrame.TimeStamp())];
_totalDecodeTime += clock()/(double)CLOCKS_PER_SEC -
_decodeTimes[int(sourceFrame.timestamp())];
if (ret < 0)
{
printf("Error in Decode: %d\n", ret);
@ -308,7 +315,7 @@ QualityModesTest::Perform()
_frameRate = frameRateUpdate[change];
codec.startBitrate = (int)_bitRate;
codec.maxFramerate = (WebRtc_UWord8) _frameRate;
TEST(_vcm->RegisterSendCodec(&codec, 2, 1440) == VCM_OK);// will also set and init the desired codec
TEST(_vcm->RegisterSendCodec(&codec, 2, 1440) == VCM_OK);
change++;
}
}
@ -326,8 +333,6 @@ QualityModesTest::Perform()
return 0;
}
// implementing callback to be called from VCM to update VPM of frame rate and size
QMTestVideoSettingsCallback::QMTestVideoSettingsCallback():
_vpm(NULL),
_vcm(NULL)
@ -415,48 +420,32 @@ VCMQMDecodeCompleCallback::~VCMQMDecodeCompleCallback()
}
}
WebRtc_Word32
VCMQMDecodeCompleCallback::FrameToRender(VideoFrame& videoFrame)
VCMQMDecodeCompleCallback::FrameToRender(I420VideoFrame& videoFrame)
{
if ((_origWidth == videoFrame.Width()) && (_origHeight == videoFrame.Height()))
if ((_origWidth == videoFrame.width()) &&
(_origHeight == videoFrame.height()))
{
if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
_decodedFile) != videoFrame.Length()) {
if (PrintI420VideoFrame(videoFrame, _decodedFile) < 0) {
return -1;
}
_frameCnt++;
//printf("frame dec # %d", _frameCnt);
// no need for interpolator and decBuffer
if (_decBuffer != NULL)
{
delete [] _decBuffer;
_decBuffer = NULL;
}
// if (_interpolator != NULL)
// {
// deleteInterpolator(_interpolator);
// _interpolator = NULL;
// }
_decWidth = 0;
_decHeight = 0;
}
else
{
if ((_decWidth != videoFrame.Width()) || (_decHeight != videoFrame.Height()))
{
_decWidth = videoFrame.Width();
_decHeight = videoFrame.Height();
buildInterpolator();
}
// interpolateFrame(_interpolator, videoFrame.Buffer(),_decBuffer);
if (fwrite(_decBuffer, 1, _origWidth*_origHeight * 3/2,
_decodedFile) != _origWidth*_origHeight * 3/2) {
return -1;
}
_frameCnt++;
// TODO(mikhal): Add support for scaling.
return -1;
}
_decodedBytes += videoFrame.Length();
_decodedBytes += CalcBufferSize(kI420, videoFrame.width(),
videoFrame.height());
return VCM_OK;
}
@ -467,7 +456,8 @@ VCMQMDecodeCompleCallback::DecodedBytes()
}
void
VCMQMDecodeCompleCallback::SetOriginalFrameDimensions(WebRtc_Word32 width, WebRtc_Word32 height)
VCMQMDecodeCompleCallback::SetOriginalFrameDimensions(WebRtc_Word32 width,
WebRtc_Word32 height)
{
_origWidth = width;
_origHeight = height;

View File

@ -34,11 +34,11 @@ private:
webrtc::VideoProcessingModule* _vpm;
WebRtc_UWord32 _width;
WebRtc_UWord32 _height;
int _width;
int _height;
float _frameRate;
WebRtc_UWord32 _nativeWidth;
WebRtc_UWord32 _nativeHeight;
int _nativeWidth;
int _nativeHeight;
float _nativeFrameRate;
WebRtc_UWord32 _numFramesDroppedVPM;
@ -54,7 +54,7 @@ public:
virtual ~VCMQMDecodeCompleCallback();
void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback);
// will write decoded frame into file
WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame);
WebRtc_Word32 FrameToRender(webrtc::I420VideoFrame& videoFrame);
WebRtc_Word32 DecodedBytes();
void SetOriginalFrameDimensions(WebRtc_Word32 width, WebRtc_Word32 height);
WebRtc_Word32 buildInterpolator();
@ -62,10 +62,10 @@ private:
FILE* _decodedFile;
WebRtc_UWord32 _decodedBytes;
// QualityModesTest& _test;
WebRtc_UWord32 _origWidth;
WebRtc_UWord32 _origHeight;
WebRtc_UWord32 _decWidth;
WebRtc_UWord32 _decHeight;
int _origWidth;
int _origHeight;
int _decWidth;
int _decHeight;
// VideoInterpolator* _interpolator;
WebRtc_UWord8* _decBuffer;
WebRtc_UWord32 _frameCnt; // debug

View File

@ -47,7 +47,7 @@ public:
virtual ~FrameReceiveCallback();
WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame);
WebRtc_Word32 FrameToRender(webrtc::I420VideoFrame& videoFrame);
private:
static void SplitFilename(std::string filename, std::string* basename,

View File

@ -12,6 +12,7 @@
#include <cmath>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_coding/main/source/tick_time_base.h"
#include "rtp_dump.h"
#include "test_macros.h"
@ -187,13 +188,13 @@ VCMRTPEncodeCompleteCallback::EncodeComplete()
// Decoded Frame Callback Implementation
WebRtc_Word32
VCMDecodeCompleteCallback::FrameToRender(VideoFrame& videoFrame)
VCMDecodeCompleteCallback::FrameToRender(I420VideoFrame& videoFrame)
{
if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
_decodedFile) != videoFrame.Length()) {
if (PrintI420VideoFrame(videoFrame, _decodedFile) < 0) {
return -1;
}
_decodedBytes+= videoFrame.Length();
_decodedBytes+= CalcBufferSize(kI420, videoFrame.width(),
videoFrame.height());
return VCM_OK;
}

View File

@ -142,7 +142,7 @@ public:
_decodedFile(decodedFile), _decodedBytes(0) {}
virtual ~VCMDecodeCompleteCallback() {}
// Write decoded frame into file
WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame);
WebRtc_Word32 FrameToRender(webrtc::I420VideoFrame& videoFrame);
WebRtc_Word32 DecodedBytes();
private:
FILE* _decodedFile;

View File

@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "receiver_tests.h"
#include "video_coding.h"
#include "rtp_rtcp.h"
@ -45,7 +46,7 @@ FrameReceiveCallback::~FrameReceiveCallback()
}
WebRtc_Word32
FrameReceiveCallback::FrameToRender(VideoFrame& videoFrame)
FrameReceiveCallback::FrameToRender(I420VideoFrame& videoFrame)
{
if (_timingFile == NULL)
{
@ -56,15 +57,16 @@ FrameReceiveCallback::FrameToRender(VideoFrame& videoFrame)
return -1;
}
}
if (_outFile == NULL || videoFrame.Width() != width_ ||
videoFrame.Height() != height_)
if (_outFile == NULL ||
videoFrame.width() != static_cast<int>(width_) ||
videoFrame.height() != static_cast<int>(height_))
{
if (_outFile) {
fclose(_outFile);
}
printf("New size: %ux%u\n", videoFrame.Width(), videoFrame.Height());
width_ = videoFrame.Width();
height_ = videoFrame.Height();
printf("New size: %ux%u\n", videoFrame.width(), videoFrame.height());
width_ = videoFrame.width();
height_ = videoFrame.height();
std::string filename_with_width_height = AppendWidthAndHeight(
_outFilename, width_, height_);
_outFile = fopen(filename_with_width_height.c_str(), "wb");
@ -74,10 +76,9 @@ FrameReceiveCallback::FrameToRender(VideoFrame& videoFrame)
}
}
fprintf(_timingFile, "%u, %u\n",
videoFrame.TimeStamp(),
MaskWord64ToUWord32(videoFrame.RenderTimeMs()));
if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
_outFile) != videoFrame.Length()) {
videoFrame.timestamp(),
MaskWord64ToUWord32(videoFrame.render_time_ms()));
if (PrintI420VideoFrame(videoFrame, _outFile) < 0) {
return -1;
}
return 0;

View File

@ -18,6 +18,7 @@
#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_H
#define WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_H
#include "common_video/interface/i420_video_frame.h"
#include "module.h"
#include "module_common_types.h"
#include "video_processing_defines.h"
@ -120,7 +121,7 @@ public:
\return 0 on success, -1 on failure.
*/
static WebRtc_Word32 GetFrameStats(FrameStats* stats,
const VideoFrame& frame);
const I420VideoFrame& frame);
/**
Checks the validity of a FrameStats struct. Currently, valid implies only
@ -148,7 +149,7 @@ public:
\param[in,out] frame
Pointer to the video frame.
*/
static WebRtc_Word32 ColorEnhancement(VideoFrame* frame);
static WebRtc_Word32 ColorEnhancement(I420VideoFrame* frame);
/**
Increases/decreases the luminance value.
@ -162,7 +163,7 @@ public:
\return 0 on success, -1 on failure.
*/
static WebRtc_Word32 Brighten(VideoFrame* frame, int delta);
static WebRtc_Word32 Brighten(I420VideoFrame* frame, int delta);
/**
Detects and removes camera flicker from a video stream. Every frame from
@ -179,7 +180,7 @@ public:
\return 0 on success, -1 on failure.
*/
virtual WebRtc_Word32 Deflickering(VideoFrame* frame,
virtual WebRtc_Word32 Deflickering(I420VideoFrame* frame,
FrameStats* stats) = 0;
/**
@ -191,7 +192,7 @@ public:
\return The number of modified pixels on success, -1 on failure.
*/
virtual WebRtc_Word32 Denoising(VideoFrame* frame) = 0;
virtual WebRtc_Word32 Denoising(I420VideoFrame* frame) = 0;
/**
Detects if a video frame is excessively bright or dark. Returns a
@ -206,7 +207,7 @@ public:
\return A member of BrightnessWarning on success, -1 on error
*/
virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame,
virtual WebRtc_Word32 BrightnessDetection(const I420VideoFrame& frame,
const FrameStats& stats) = 0;
/**
@ -283,8 +284,8 @@ public:
\return VPM_OK on success, a negative value on error (see error codes)
*/
virtual WebRtc_Word32 PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame) = 0;
virtual WebRtc_Word32 PreprocessFrame(const I420VideoFrame& frame,
I420VideoFrame** processedFrame) = 0;
/**
Return content metrics for the last processed frame

View File

@ -17,21 +17,21 @@
namespace webrtc {
namespace VideoProcessing {
WebRtc_Word32 Brighten(VideoFrame* frame, int delta) {
WebRtc_Word32 Brighten(I420VideoFrame* frame, int delta) {
assert(frame);
if (frame->Buffer() == NULL) {
if (frame->IsZeroSize()) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Null frame pointer");
"zero size frame");
return VPM_PARAMETER_ERROR;
}
if (frame->Width() <= 0 || frame->Height() <= 0) {
if (frame->width() <= 0 || frame->height() <= 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Invalid frame size");
return VPM_PARAMETER_ERROR;
}
int numPixels = frame->Width() * frame->Height();
int numPixels = frame->width() * frame->height();
int lookUp[256];
for (int i = 0; i < 256; i++) {
@ -39,7 +39,7 @@ WebRtc_Word32 Brighten(VideoFrame* frame, int delta) {
lookUp[i] = ((((val < 0) ? 0 : val) > 255) ? 255 : val);
}
WebRtc_UWord8* tempPtr = frame->Buffer();
WebRtc_UWord8* tempPtr = frame->buffer(kYPlane);
for (int i = 0; i < numPixels; i++) {
*tempPtr = static_cast<WebRtc_UWord8>(lookUp[*tempPtr]);

View File

@ -17,7 +17,7 @@
namespace webrtc {
namespace VideoProcessing {
WebRtc_Word32 Brighten(VideoFrame* frame, int delta);
WebRtc_Word32 Brighten(I420VideoFrame* frame, int delta);
} // namespace VideoProcessing
} // namespace webrtc

View File

@ -41,25 +41,18 @@ VPMBrightnessDetection::Reset()
}
WebRtc_Word32
VPMBrightnessDetection::ProcessFrame(const VideoFrame& frame,
VPMBrightnessDetection::ProcessFrame(const I420VideoFrame& frame,
const VideoProcessingModule::FrameStats&
stats)
{
if (frame.Buffer() == NULL)
if (frame.IsZeroSize())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Null frame pointer");
return VPM_PARAMETER_ERROR;
}
int width = frame.Width();
int height = frame.Height();
if (width == 0 || height == 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame size");
return VPM_PARAMETER_ERROR;
}
int width = frame.width();
int height = frame.height();
if (!VideoProcessingModule::ValidFrameStats(stats))
{
@ -93,11 +86,11 @@ VPMBrightnessDetection::ProcessFrame(const VideoFrame& frame,
if (stats.mean < 90 || stats.mean > 170)
{
// Standard deviation of Y
const uint8_t* buffer = frame.buffer(kYPlane);
float stdY = 0;
uint8_t* buffer = frame.Buffer();
for (int h = 0; h < height; h += (1 << stats.subSamplHeight))
{
WebRtc_UWord32 row = h*width;
int row = h*width;
for (int w = 0; w < width; w += (1 << stats.subSamplWidth))
{
stdY += (buffer[w + row] - stats.mean) * (buffer[w + row] -

View File

@ -29,7 +29,7 @@ public:
void Reset();
WebRtc_Word32 ProcessFrame(const VideoFrame& frame,
WebRtc_Word32 ProcessFrame(const I420VideoFrame& frame,
const VideoProcessingModule::FrameStats& stats);
private:

View File

@ -18,26 +18,22 @@ namespace webrtc {
namespace VideoProcessing
{
WebRtc_Word32
ColorEnhancement(VideoFrame* frame)
ColorEnhancement(I420VideoFrame* frame)
{
assert(frame);
// pointers to U and V color pixels
WebRtc_UWord8* ptrU;
WebRtc_UWord8* ptrV;
WebRtc_UWord8 tempChroma;
const unsigned int size_y = frame->Width() * frame->Height();
const unsigned int size_uv = ((frame->Width() + 1) / 2) *
((frame->Height() + 1 ) / 2);
if (frame->Buffer() == NULL)
if (frame->IsZeroSize())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
-1, "Null frame pointer");
return VPM_GENERAL_ERROR;
}
if (frame->Width() == 0 || frame->Height() == 0)
if (frame->width() == 0 || frame->height() == 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
-1, "Invalid frame size");
@ -45,11 +41,12 @@ namespace VideoProcessing
}
// set pointers to first U and V pixels (skip luminance)
ptrU = frame->Buffer() + size_y;
ptrV = ptrU + size_uv;
ptrU = frame->buffer(kUPlane);
ptrV = frame->buffer(kVPlane);
int size_uv = ((frame->width() + 1) / 2) * ((frame->height() + 1) / 2);
// loop through all chrominance pixels and modify color
for (unsigned int ix = 0; ix < size_uv; ix++)
for (int ix = 0; ix < size_uv; ix++)
{
tempChroma = colorTable[*ptrU][*ptrV];
*ptrV = colorTable[*ptrV][*ptrU];

View File

@ -21,7 +21,7 @@ namespace webrtc {
namespace VideoProcessing
{
WebRtc_Word32 ColorEnhancement(VideoFrame* frame);
WebRtc_Word32 ColorEnhancement(I420VideoFrame* frame);
}
} //namespace

View File

@ -56,25 +56,23 @@ VPMContentAnalysis::~VPMContentAnalysis()
VideoContentMetrics*
VPMContentAnalysis::ComputeContentMetrics(const VideoFrame& inputFrame)
VPMContentAnalysis::ComputeContentMetrics(const I420VideoFrame& inputFrame)
{
if (inputFrame.Buffer() == NULL)
if (inputFrame.IsZeroSize())
{
return NULL;
}
// Init if needed (native dimension change)
if (_width != static_cast<int>(inputFrame.Width()) ||
_height != static_cast<int>(inputFrame.Height()))
if (_width != inputFrame.width() || _height != inputFrame.height())
{
if (VPM_OK != Initialize(static_cast<int>(inputFrame.Width()),
static_cast<int>(inputFrame.Height())))
if (VPM_OK != Initialize(inputFrame.width(), inputFrame.height()))
{
return NULL;
}
}
_origFrame = inputFrame.Buffer();
// Only interested in the Y plane.
_origFrame = inputFrame.buffer(kYPlane);
// compute spatial metrics: 3 spatial prediction errors
(this->*ComputeSpatialMetrics)();

View File

@ -11,6 +11,7 @@
#ifndef VPM_CONTENT_ANALYSIS_H
#define VPM_CONTENT_ANALYSIS_H
#include "common_video/interface/i420_video_frame.h"
#include "typedefs.h"
#include "module_common_types.h"
#include "video_processing_defines.h"
@ -35,7 +36,8 @@ public:
// Input: new frame
// Return value: pointer to structure containing content Analysis
// metrics or NULL value upon error
VideoContentMetrics* ComputeContentMetrics(const VideoFrame& inputFrame);
VideoContentMetrics* ComputeContentMetrics(const I420VideoFrame&
inputFrame);
// Release all allocated memory
// Output: 0 if OK, negative value upon error

View File

@ -89,7 +89,7 @@ VPMDeflickering::Reset()
}
WebRtc_Word32
VPMDeflickering::ProcessFrame(VideoFrame* frame,
VPMDeflickering::ProcessFrame(I420VideoFrame* frame,
VideoProcessingModule::FrameStats* stats)
{
assert(frame);
@ -103,10 +103,10 @@ VPMDeflickering::ProcessFrame(VideoFrame* frame,
WebRtc_UWord16 tmpUW16;
WebRtc_UWord32 tmpUW32;
int width = frame->Width();
int height = frame->Height();
int width = frame->width();
int height = frame->height();
if (frame->Buffer() == NULL)
if (frame->IsZeroSize())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Null frame pointer");
@ -114,7 +114,7 @@ VPMDeflickering::ProcessFrame(VideoFrame* frame,
}
// Stricter height check due to subsampling size calculation below.
if (width == 0 || height < 2)
if (height < 2)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame size");
@ -128,7 +128,7 @@ VPMDeflickering::ProcessFrame(VideoFrame* frame,
return VPM_GENERAL_ERROR;
}
if (PreDetection(frame->TimeStamp(), *stats) == -1)
if (PreDetection(frame->timestamp(), *stats) == -1)
{
return VPM_GENERAL_ERROR;
}
@ -154,7 +154,7 @@ VPMDeflickering::ProcessFrame(VideoFrame* frame,
for (int i = 0; i < height; i += kDownsamplingFactor)
{
memcpy(ySorted + sortRowIdx * width,
frame->Buffer() + i * width, width);
frame->buffer(kYPlane) + i * width, width);
sortRowIdx++;
}
@ -258,7 +258,7 @@ VPMDeflickering::ProcessFrame(VideoFrame* frame,
}
// Map to the output frame.
uint8_t* buffer = frame->Buffer();
uint8_t* buffer = frame->buffer(kYPlane);
for (WebRtc_UWord32 i = 0; i < ySize; i++)
{
buffer[i] = mapUW8[buffer[i]];

View File

@ -32,7 +32,7 @@ public:
void Reset();
WebRtc_Word32 ProcessFrame(VideoFrame* frame,
WebRtc_Word32 ProcessFrame(I420VideoFrame* frame,
VideoProcessingModule::FrameStats* stats);
private:
WebRtc_Word32 PreDetection(WebRtc_UWord32 timestamp,

View File

@ -72,7 +72,7 @@ VPMDenoising::Reset()
}
WebRtc_Word32
VPMDenoising::ProcessFrame(VideoFrame* frame)
VPMDenoising::ProcessFrame(I420VideoFrame* frame)
{
assert(frame);
WebRtc_Word32 thevar;
@ -84,21 +84,15 @@ VPMDenoising::ProcessFrame(VideoFrame* frame)
WebRtc_UWord32 tmp;
WebRtc_Word32 numPixelsChanged = 0;
if (frame->Buffer() == NULL)
if (frame->IsZeroSize())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Null frame pointer");
"zero size frame");
return VPM_GENERAL_ERROR;
}
int width = frame->Width();
int height = frame->Height();
if (width == 0 || height == 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame size");
return VPM_GENERAL_ERROR;
}
int width = frame->width();
int height = frame->height();
/* Size of luminance component */
const WebRtc_UWord32 ysize = height * width;
@ -127,7 +121,7 @@ VPMDenoising::ProcessFrame(VideoFrame* frame)
}
/* Apply de-noising on each pixel, but update variance sub-sampled */
uint8_t* buffer = frame->Buffer();
uint8_t* buffer = frame->buffer(kYPlane);
for (int i = 0; i < height; i++)
{ // Collect over height
k = i * width;

View File

@ -29,7 +29,7 @@ public:
void Reset();
WebRtc_Word32 ProcessFrame(VideoFrame* frame);
WebRtc_Word32 ProcessFrame(I420VideoFrame* frame);
private:
WebRtc_Word32 _id;

View File

@ -32,7 +32,6 @@ VPMFramePreprocessor::~VPMFramePreprocessor()
delete _spatialResampler;
delete _ca;
delete _vd;
_resampledFrame.Free(); // is this needed?
}
WebRtc_Word32
@ -136,10 +135,10 @@ VPMFramePreprocessor::DecimatedHeight() const
WebRtc_Word32
VPMFramePreprocessor::PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame)
VPMFramePreprocessor::PreprocessFrame(const I420VideoFrame& frame,
I420VideoFrame** processedFrame)
{
if (frame.Buffer() == NULL || frame.Height() == 0 || frame.Width() == 0)
if (frame.IsZeroSize())
{
return VPM_PARAMETER_ERROR;
}
@ -157,9 +156,9 @@ VPMFramePreprocessor::PreprocessFrame(const VideoFrame& frame,
// Note that we must make a copy of it.
// We are not allowed to resample the input frame.
*processedFrame = NULL;
if (_spatialResampler->ApplyResample(frame.Width(), frame.Height())) {
if (_spatialResampler->ApplyResample(frame.width(), frame.height())) {
WebRtc_Word32 ret = _spatialResampler->ResampleFrame(frame,
_resampledFrame);
&_resampledFrame);
if (ret != VPM_OK)
return ret;
*processedFrame = &_resampledFrame;

View File

@ -62,8 +62,8 @@ public:
WebRtc_UWord32 DecimatedHeight() const;
//Preprocess output:
WebRtc_Word32 PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame);
WebRtc_Word32 PreprocessFrame(const I420VideoFrame& frame,
I420VideoFrame** processedFrame);
VideoContentMetrics* ContentMetrics() const;
private:
@ -74,7 +74,7 @@ private:
WebRtc_Word32 _id;
VideoContentMetrics* _contentMetrics;
WebRtc_UWord32 _maxFrameRate;
VideoFrame _resampledFrame;
I420VideoFrame _resampledFrame;
VPMSpatialResampler* _spatialResampler;
VPMContentAnalysis* _ca;
VPMVideoDecimator* _vd;

View File

@ -62,32 +62,32 @@ VPMSimpleSpatialResampler::Reset()
}
WebRtc_Word32
VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame,
VideoFrame& outFrame)
VPMSimpleSpatialResampler::ResampleFrame(const I420VideoFrame& inFrame,
I420VideoFrame* outFrame)
{
if (_resamplingMode == kNoRescaling)
return outFrame.CopyFrame(inFrame);
return outFrame->CopyFrame(inFrame);
// Check if re-sampling is needed
if ((inFrame.Width() == (WebRtc_UWord32)_targetWidth) &&
(inFrame.Height() == (WebRtc_UWord32)_targetHeight)) {
return outFrame.CopyFrame(inFrame);
if ((inFrame.width() == _targetWidth) &&
(inFrame.height() == _targetHeight)) {
return outFrame->CopyFrame(inFrame);
}
// Setting scaler
// TODO(mikhal/marpan): Should we allow for setting the filter mode in
// _scale.Set() with |_resamplingMode|?
int retVal = 0;
retVal = _scaler.Set(inFrame.Width(), inFrame.Height(),
retVal = _scaler.Set(inFrame.width(), inFrame.height(),
_targetWidth, _targetHeight, kI420, kI420, kScaleBox);
if (retVal < 0)
return retVal;
// Setting time parameters to the output frame - all the rest will be
// set by the scaler.
outFrame.SetTimeStamp(inFrame.TimeStamp());
outFrame.SetRenderTime(inFrame.RenderTimeMs());
outFrame->set_timestamp(inFrame.timestamp());
outFrame->set_render_time_ms(inFrame.render_time_ms());
retVal = _scaler.Scale(inFrame, &outFrame);
retVal = _scaler.Scale(inFrame, outFrame);
if (retVal == 0)
return VPM_OK;
else

View File

@ -34,8 +34,8 @@ public:
virtual void SetInputFrameResampleMode(VideoFrameResampling
resamplingMode) = 0;
virtual void Reset() = 0;
virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
VideoFrame& outFrame) = 0;
virtual WebRtc_Word32 ResampleFrame(const I420VideoFrame& inFrame,
I420VideoFrame* outFrame) = 0;
virtual WebRtc_Word32 TargetWidth() = 0;
virtual WebRtc_Word32 TargetHeight() = 0;
virtual bool ApplyResample(WebRtc_Word32 width, WebRtc_Word32 height) = 0;
@ -50,8 +50,8 @@ public:
WebRtc_Word32 height);
virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode);
virtual void Reset();
virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
VideoFrame& outFrame);
virtual WebRtc_Word32 ResampleFrame(const I420VideoFrame& inFrame,
I420VideoFrame* outFrame);
virtual WebRtc_Word32 TargetWidth();
virtual WebRtc_Word32 TargetHeight();
virtual bool ApplyResample(WebRtc_Word32 width, WebRtc_Word32 height);

View File

@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_processing_impl.h"
#include "critical_section_wrapper.h"
#include "trace.h"
@ -115,29 +116,22 @@ VideoProcessingModuleImpl::Reset()
WebRtc_Word32
VideoProcessingModule::GetFrameStats(FrameStats* stats,
const VideoFrame& frame)
const I420VideoFrame& frame)
{
if (frame.Buffer() == NULL)
if (frame.IsZeroSize())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Null frame pointer");
"zero size frame");
return VPM_PARAMETER_ERROR;
}
int width = frame.Width();
int height = frame.Height();
if (width == 0 || height == 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Invalid frame size");
return VPM_PARAMETER_ERROR;
}
int width = frame.width();
int height = frame.height();
ClearFrameStats(stats); // The histogram needs to be zeroed out.
SetSubSampling(stats, width, height);
uint8_t* buffer = frame.Buffer();
const uint8_t* buffer = frame.buffer(kYPlane);
// Compute histogram and sum of frame
for (int i = 0; i < height; i += (1 << stats->subSamplHeight))
{
@ -182,33 +176,34 @@ VideoProcessingModule::ClearFrameStats(FrameStats* stats)
}
WebRtc_Word32
VideoProcessingModule::ColorEnhancement(VideoFrame* frame)
VideoProcessingModule::ColorEnhancement(I420VideoFrame* frame)
{
return VideoProcessing::ColorEnhancement(frame);
}
WebRtc_Word32
VideoProcessingModule::Brighten(VideoFrame* frame, int delta)
VideoProcessingModule::Brighten(I420VideoFrame* frame, int delta)
{
return VideoProcessing::Brighten(frame, delta);
}
WebRtc_Word32
VideoProcessingModuleImpl::Deflickering(VideoFrame* frame, FrameStats* stats)
VideoProcessingModuleImpl::Deflickering(I420VideoFrame* frame,
FrameStats* stats)
{
CriticalSectionScoped mutex(&_mutex);
return _deflickering.ProcessFrame(frame, stats);
}
WebRtc_Word32
VideoProcessingModuleImpl::Denoising(VideoFrame* frame)
VideoProcessingModuleImpl::Denoising(I420VideoFrame* frame)
{
CriticalSectionScoped mutex(&_mutex);
return _denoising.ProcessFrame(frame);
}
WebRtc_Word32
VideoProcessingModuleImpl::BrightnessDetection(const VideoFrame& frame,
VideoProcessingModuleImpl::BrightnessDetection(const I420VideoFrame& frame,
const FrameStats& stats)
{
CriticalSectionScoped mutex(&_mutex);
@ -273,8 +268,8 @@ VideoProcessingModuleImpl::DecimatedHeight() const
}
WebRtc_Word32
VideoProcessingModuleImpl::PreprocessFrame(const VideoFrame& frame,
VideoFrame **processedFrame)
VideoProcessingModuleImpl::PreprocessFrame(const I420VideoFrame& frame,
I420VideoFrame **processedFrame)
{
CriticalSectionScoped mutex(&_mutex);
return _framePreProcessor.PreprocessFrame(frame, processedFrame);

View File

@ -36,12 +36,12 @@ public:
virtual void Reset();
virtual WebRtc_Word32 Deflickering(VideoFrame* frame,
virtual WebRtc_Word32 Deflickering(I420VideoFrame* frame,
FrameStats* stats);
virtual WebRtc_Word32 Denoising(VideoFrame* frame);
virtual WebRtc_Word32 Denoising(I420VideoFrame* frame);
virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame,
virtual WebRtc_Word32 BrightnessDetection(const I420VideoFrame& frame,
const FrameStats& stats);
//Frame pre-processor functions
@ -72,8 +72,8 @@ public:
// Pre-process incoming frame: Sample when needed and compute content
// metrics when enabled.
// If no resampling takes place - processedFrame is set to NULL.
virtual WebRtc_Word32 PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame);
virtual WebRtc_Word32 PreprocessFrame(const I420VideoFrame& frame,
I420VideoFrame** processedFrame);
virtual VideoContentMetrics* ContentMetrics() const;
private:

View File

@ -18,9 +18,16 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
WebRtc_UWord32 frameNum = 0;
WebRtc_Word32 brightnessWarning = 0;
WebRtc_UWord32 warningCount = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) ==
_frameLength)
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
frameNum++;
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
@ -42,15 +49,21 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
rewind(_sourceFile);
frameNum = 0;
warningCount = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) ==
_frameLength &&
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length &&
frameNum < 300)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
frameNum++;
WebRtc_UWord8* frame = _videoFrame.Buffer();
WebRtc_UWord8* frame = _videoFrame.buffer(kYPlane);
WebRtc_UWord32 yTmp = 0;
for (WebRtc_UWord32 yIdx = 0; yIdx < _width * _height; yIdx++)
for (int yIdx = 0; yIdx < _width * _height; yIdx++)
{
yTmp = frame[yIdx] << 1;
if (yTmp > 255)
@ -80,17 +93,23 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
rewind(_sourceFile);
frameNum = 0;
warningCount = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength &&
frameNum < 300)
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length && frameNum < 300)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
frameNum++;
WebRtc_UWord8* frame = _videoFrame.Buffer();
WebRtc_UWord8* y_plane = _videoFrame.buffer(kYPlane);
WebRtc_Word32 yTmp = 0;
for (WebRtc_UWord32 yIdx = 0; yIdx < _width * _height; yIdx++)
for (int yIdx = 0; yIdx < _width * _height; yIdx++)
{
yTmp = frame[yIdx] >> 1;
frame[yIdx] = static_cast<WebRtc_UWord8>(yTmp);
yTmp = y_plane[yIdx] >> 1;
y_plane[yIdx] = static_cast<WebRtc_UWord8>(yTmp);
}
VideoProcessingModule::FrameStats stats;

View File

@ -39,15 +39,22 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
ASSERT_TRUE(modFile != NULL) << "Could not open output file.\n";
WebRtc_UWord32 frameNum = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
frameNum++;
t0 = TickTime::Now();
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&_videoFrame));
t1 = TickTime::Now();
accTicks += t1 - t0;
if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
modFile) != _frameLength) {
if (PrintI420VideoFrame(_videoFrame, modFile) < 0) {
return;
}
}
@ -76,44 +83,70 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
rewind(modFile);
ASSERT_EQ(refLen, testLen) << "File lengths differ.";
VideoFrame refVideoFrame;
refVideoFrame.VerifyAndAllocate(_frameLength);
refVideoFrame.SetWidth(_width);
refVideoFrame.SetHeight(_height);
I420VideoFrame refVideoFrame;
// Compare frame-by-frame.
while (fread(_videoFrame.Buffer(), 1, _frameLength, modFile) == _frameLength)
scoped_array<uint8_t> ref_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, modFile) ==
_frame_length)
{
ASSERT_EQ(_frameLength, fread(refVideoFrame.Buffer(), 1, _frameLength, refFile));
EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), refVideoFrame.Buffer(), _frameLength));
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
ASSERT_EQ(_frame_length, fread(ref_buffer.get(), 1, _frame_length,
refFile));
refVideoFrame.CreateFrame(_size_y, ref_buffer.get(),
_size_uv, ref_buffer.get() + _size_y,
_size_uv, ref_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
EXPECT_EQ(0, memcmp(_videoFrame.buffer(kYPlane),
refVideoFrame.buffer(kYPlane),
_size_y));
EXPECT_EQ(0, memcmp(_videoFrame.buffer(kUPlane),
refVideoFrame.buffer(kUPlane),
_size_uv));
EXPECT_EQ(0, memcmp(_videoFrame.buffer(kVPlane),
refVideoFrame.buffer(kVPlane),
_size_uv));
}
ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
// Verify that all color pixels are enhanced, and no luminance values are
// altered.
WebRtc_UWord8 *testFrame = new WebRtc_UWord8[_frameLength];
scoped_array<uint8_t> testFrame(new WebRtc_UWord8[_frame_length]);
// Use value 128 as probe value, since we know that this will be changed
// in the enhancement.
memset(testFrame, 128, _frameLength);
memset(testFrame.get(), 128, _frame_length);
I420VideoFrame testVideoFrame;
testVideoFrame.CreateFrame(_size_y, testFrame.get(),
_size_uv, testFrame.get() + _size_y,
_size_uv, testFrame.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
VideoFrame testVideoFrame;
testVideoFrame.CopyFrame(_frameLength, testFrame);
testVideoFrame.SetWidth(_width);
testVideoFrame.SetHeight(_height);
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testVideoFrame));
EXPECT_EQ(0, memcmp(testVideoFrame.Buffer(), testFrame, _width * _height))
EXPECT_EQ(0, memcmp(testVideoFrame.buffer(kYPlane), testFrame.get(),
_size_y))
<< "Function is modifying the luminance.";
EXPECT_NE(0, memcmp(testVideoFrame.Buffer() + _width * _height,
&testFrame[_width * _height], _width * _height / 2)) <<
"Function is not modifying all chrominance pixels";
EXPECT_NE(0, memcmp(testVideoFrame.buffer(kUPlane),
testFrame.get() + _size_y, _size_uv)) <<
"Function is not modifying all chrominance pixels";
EXPECT_NE(0, memcmp(testVideoFrame.buffer(kVPlane),
testFrame.get() + _size_y + _size_uv, _size_uv)) <<
"Function is not modifying all chrominance pixels";
ASSERT_EQ(0, fclose(refFile));
ASSERT_EQ(0, fclose(modFile));
delete [] testFrame;
}
} // namespace webrtc

View File

@ -23,9 +23,16 @@ TEST_F(VideoProcessingModuleTest, ContentAnalysis)
_ca_c.Initialize(_width,_height);
_ca_sse.Initialize(_width,_height);
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile)
== _frameLength)
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile)
== _frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
_cM_c = _ca_c.ComputeContentMetrics(_videoFrame);
_cM_SSE = _ca_sse.ComputeContentMetrics(_videoFrame);

View File

@ -11,6 +11,7 @@
#include <cstdio>
#include <cstdlib>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_processing/main/interface/video_processing.h"
#include "modules/video_processing/main/test/unit_test/unit_test.h"
#include "system_wrappers/interface/tick_util.h"
@ -42,6 +43,7 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
"Could not open output file: " << output_file << "\n";
printf("\nRun time [us / frame]:\n");
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
for (WebRtc_UWord32 runIdx = 0; runIdx < NumRuns; runIdx++)
{
TickTime t0;
@ -50,10 +52,17 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
WebRtc_UWord32 timeStamp = 1;
frameNum = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length)
{
frameNum++;
_videoFrame.SetTimeStamp(timeStamp);
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
_videoFrame.set_timestamp(timeStamp);
t0 = TickTime::Now();
VideoProcessingModule::FrameStats stats;
@ -64,8 +73,7 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
if (runIdx == 0)
{
if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
deflickerFile) != _frameLength) {
if (PrintI420VideoFrame(_videoFrame, deflickerFile) < 0) {
return;
}
}

View File

@ -11,6 +11,7 @@
#include <cstdio>
#include <cstdlib>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_processing/main/interface/video_processing.h"
#include "modules/video_processing/main/test/unit_test/unit_test.h"
#include "system_wrappers/interface/tick_util.h"
@ -47,21 +48,27 @@ TEST_F(VideoProcessingModuleTest, Denoising)
WebRtc_Word32 modifiedPixels = 0;
frameNum = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv,
video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
frameNum++;
WebRtc_UWord8* sourceBuffer = _videoFrame.Buffer();
WebRtc_UWord8* sourceBuffer = _videoFrame.buffer(kYPlane);
// Add noise to a part in video stream
// Random noise
// TODO: investigate the effectiveness of this test.
//for(WebRtc_UWord32 ir = 0; ir < _frameLength; ir++)
// sourceBuffer[ir] = 128
for (WebRtc_UWord32 ir = 0; ir < _height; ir++)
for (int ir = 0; ir < _height; ir++)
{
WebRtc_UWord32 ik = ir * _width;
for (WebRtc_UWord32 ic = 0; ic < _width; ic++)
for (int ic = 0; ic < _width; ic++)
{
WebRtc_UWord8 r = rand() % 16;
r -= 8;
@ -92,8 +99,7 @@ TEST_F(VideoProcessingModuleTest, Denoising)
if (runIdx == 0)
{
if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
noiseFile) != _frameLength) {
if (PrintI420VideoFrame(_videoFrame, noiseFile) < 0) {
return;
}
}
@ -105,8 +111,7 @@ TEST_F(VideoProcessingModuleTest, Denoising)
if (runIdx == 0)
{
if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
denoiseFile) != _frameLength) {
if (PrintI420VideoFrame(_videoFrame, noiseFile) < 0) {
return;
}
}

Some files were not shown because too many files have changed in this diff Show More