diff --git a/webrtc/common_video/common_video.gyp b/webrtc/common_video/common_video.gyp index 67db5d120..d8f2a3f56 100644 --- a/webrtc/common_video/common_video.gyp +++ b/webrtc/common_video/common_video.gyp @@ -28,6 +28,9 @@ 'jpeg/include', 'libyuv/include', ], + 'dependencies': [ + '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', + ], 'direct_dependent_settings': { 'include_dirs': [ 'interface', diff --git a/webrtc/common_video/i420_video_frame.cc b/webrtc/common_video/i420_video_frame.cc index def6452da..25893acd9 100644 --- a/webrtc/common_video/i420_video_frame.cc +++ b/webrtc/common_video/i420_video_frame.cc @@ -35,6 +35,9 @@ int I420VideoFrame::CreateEmptyFrame(int width, int height, y_plane_.CreateEmptyPlane(size_y, stride_y, size_y); u_plane_.CreateEmptyPlane(size_u, stride_u, size_u); v_plane_.CreateEmptyPlane(size_v, stride_v, size_v); + // Creating empty frame - reset all values. + timestamp_ = 0; + render_time_ms_ = 0; return 0; } @@ -128,7 +131,7 @@ int I420VideoFrame::set_height(int height) { return 0; } -bool I420VideoFrame::IsZeroSize() { +bool I420VideoFrame::IsZeroSize() const { return (y_plane_.IsZeroSize() && u_plane_.IsZeroSize() && v_plane_.IsZeroSize()); } diff --git a/webrtc/common_video/interface/i420_video_frame.h b/webrtc/common_video/interface/i420_video_frame.h index 8d2a0509d..0e528c1e4 100644 --- a/webrtc/common_video/interface/i420_video_frame.h +++ b/webrtc/common_video/interface/i420_video_frame.h @@ -24,7 +24,7 @@ enum PlaneType { kYPlane = 0, kUPlane = 1, kVPlane = 2, - KNumOfPlanes = 3 + kNumOfPlanes = 3 }; class I420VideoFrame { @@ -94,7 +94,7 @@ class I420VideoFrame { int64_t render_time_ms() const {return render_time_ms_;} // Return true if underlying plane buffers are of zero size, false if not. - bool IsZeroSize(); + bool IsZeroSize() const; // Reset underlying plane buffers sizes to 0. This function doesn't // clear memory. diff --git a/webrtc/common_video/jpeg/include/jpeg.h b/webrtc/common_video/jpeg/include/jpeg.h index 3bb10931c..f65c8f8f3 100644 --- a/webrtc/common_video/jpeg/include/jpeg.h +++ b/webrtc/common_video/jpeg/include/jpeg.h @@ -12,7 +12,7 @@ #define WEBRTC_COMMON_VIDEO_JPEG #include "typedefs.h" -#include "modules/interface/module_common_types.h" // VideoFrame +#include "common_video/interface/i420_video_frame.h" #include "common_video/interface/video_image.h" // EncodedImage // jpeg forward declaration @@ -21,7 +21,7 @@ struct jpeg_compress_struct; namespace webrtc { -// TODO(mikhal): Move this to LibYuv wrappar, when LibYuv will have a JPG +// TODO(mikhal): Move this to LibYuv wrapper, when LibYuv will have a JPG // Encode. class JpegEncoder { @@ -46,7 +46,7 @@ public: // Output: // - 0 : OK // - (-1) : Error - WebRtc_Word32 Encode(const VideoFrame& inputImage); + WebRtc_Word32 Encode(const I420VideoFrame& inputImage); private: @@ -67,6 +67,6 @@ private: // - (-1) : Error // - (-2) : Unsupported format int ConvertJpegToI420(const EncodedImage& input_image, - VideoFrame* output_image); + I420VideoFrame* output_image); } #endif /* WEBRTC_COMMON_VIDEO_JPEG */ diff --git a/webrtc/common_video/jpeg/jpeg.cc b/webrtc/common_video/jpeg/jpeg.cc index b0d3a6264..a6a346b57 100644 --- a/webrtc/common_video/jpeg/jpeg.cc +++ b/webrtc/common_video/jpeg/jpeg.cc @@ -84,21 +84,21 @@ JpegEncoder::SetFileName(const char* fileName) WebRtc_Word32 -JpegEncoder::Encode(const VideoFrame& inputImage) +JpegEncoder::Encode(const I420VideoFrame& inputImage) { - if (inputImage.Buffer() == NULL || inputImage.Size() == 0) + if (inputImage.IsZeroSize()) { return -1; } - if (inputImage.Width() < 1 || inputImage.Height() < 1) + if (inputImage.width() < 1 || inputImage.height() < 1) { return -1; } FILE* outFile = NULL; - const WebRtc_UWord32 width = inputImage.Width(); - const WebRtc_UWord32 height = inputImage.Height(); + const int width = inputImage.width(); + const int height = inputImage.height(); // Set error handler myErrorMgr jerr; @@ -141,9 +141,15 @@ JpegEncoder::Encode(const VideoFrame& inputImage) _cinfo->comp_info[2].h_samp_factor = 1; // V _cinfo->comp_info[2].v_samp_factor = 1; _cinfo->raw_data_in = TRUE; + // Converting to a buffer + // TODO(mikhal): This is a tmp implementation. Will update to use LibYuv + // Encode when that becomes available. + unsigned int length = CalcBufferSize(kI420, width, height); + scoped_array image_buffer(new uint8_t[length]); + ExtractBuffer(inputImage, length, image_buffer.get()); + int height16 = (height + 15) & ~15; + WebRtc_UWord8* imgPtr = image_buffer.get(); - WebRtc_UWord32 height16 = (height + 15) & ~15; - WebRtc_UWord8* imgPtr = inputImage.Buffer(); WebRtc_UWord8* origImagePtr = NULL; if (height16 != height) { @@ -151,7 +157,7 @@ JpegEncoder::Encode(const VideoFrame& inputImage) WebRtc_UWord32 requiredSize = CalcBufferSize(kI420, width, height16); origImagePtr = new WebRtc_UWord8[requiredSize]; memset(origImagePtr, 0, requiredSize); - memcpy(origImagePtr, inputImage.Buffer(), inputImage.Length()); + memcpy(origImagePtr, image_buffer.get(), length); imgPtr = origImagePtr; } @@ -164,7 +170,7 @@ JpegEncoder::Encode(const VideoFrame& inputImage) data[1] = u; data[2] = v; - WebRtc_UWord32 i, j; + int i, j; for (j = 0; j < height; j += 16) { @@ -197,7 +203,7 @@ JpegEncoder::Encode(const VideoFrame& inputImage) } int ConvertJpegToI420(const EncodedImage& input_image, - VideoFrame* output_image) { + I420VideoFrame* output_image) { if (output_image == NULL) return -1; @@ -211,11 +217,8 @@ int ConvertJpegToI420(const EncodedImage& input_image, return -2; // not supported. int width = jpeg_decoder.GetWidth(); int height = jpeg_decoder.GetHeight(); - int req_size = CalcBufferSize(kI420, width, height); - output_image->VerifyAndAllocate(req_size); - output_image->SetWidth(width); - output_image->SetHeight(height); - output_image->SetLength(req_size); + output_image->CreateEmptyFrame(width, height, width, + (width + 1) / 2, (width + 1) / 2); return ConvertToI420(kMJPG, input_image._buffer, 0, 0, // no cropping diff --git a/webrtc/common_video/jpeg/jpeg_unittest.cc b/webrtc/common_video/jpeg/jpeg_unittest.cc index a7c912fc5..4d2890724 100644 --- a/webrtc/common_video/jpeg/jpeg_unittest.cc +++ b/webrtc/common_video/jpeg/jpeg_unittest.cc @@ -11,16 +11,17 @@ #include #include -#include "common_video/jpeg/include/jpeg.h" #include "common_video/interface/video_image.h" +#include "common_video/jpeg/include/jpeg.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "gtest/gtest.h" #include "testsupport/fileutils.h" #include "modules/interface/module_common_types.h" namespace webrtc { -const unsigned int kImageWidth = 640; -const unsigned int kImageHeight = 480; +const int kImageWidth = 640; +const int kImageHeight = 480; class JpegTest: public testing::Test { protected: @@ -72,38 +73,30 @@ class JpegTest: public testing::Test { TEST_F(JpegTest, Decode) { encoded_buffer_ = ReadEncodedImage(input_filename_); - VideoFrame image_buffer; + I420VideoFrame image_buffer; EXPECT_EQ(0, ConvertJpegToI420(*encoded_buffer_, &image_buffer)); - EXPECT_GT(image_buffer.Length(), 0u); - EXPECT_EQ(kImageWidth, image_buffer.Width()); - EXPECT_EQ(kImageHeight, image_buffer.Height()); - image_buffer.Free(); + EXPECT_FALSE(image_buffer.IsZeroSize()); + EXPECT_EQ(kImageWidth, image_buffer.width()); + EXPECT_EQ(kImageHeight, image_buffer.height()); } TEST_F(JpegTest, EncodeInvalidInputs) { - VideoFrame empty; - empty.SetWidth(164); - empty.SetHeight(164); + I420VideoFrame empty; + empty.set_width(164); + empty.set_height(164); EXPECT_EQ(-1, encoder_->SetFileName(0)); + // Test empty (null) frame. EXPECT_EQ(-1, encoder_->Encode(empty)); - - empty.VerifyAndAllocate(0); + // Create empty frame (allocate memory) - arbitrary dimensions. + empty.CreateEmptyFrame(10, 10, 10, 5, 5); + empty.ResetSize(); EXPECT_EQ(-1, encoder_->Encode(empty)); - - empty.VerifyAndAllocate(10); - empty.SetHeight(0); - EXPECT_EQ(-1, encoder_->Encode(empty)); - - empty.SetHeight(164); - empty.SetWidth(0); - EXPECT_EQ(-1, encoder_->Encode(empty)); - empty.Free(); } TEST_F(JpegTest, Encode) { // Decode our input image then encode it again to a new file: encoded_buffer_ = ReadEncodedImage(input_filename_); - VideoFrame image_buffer; + I420VideoFrame image_buffer; EXPECT_EQ(0, ConvertJpegToI420(*encoded_buffer_, &image_buffer)); EXPECT_EQ(0, encoder_->SetFileName(encoded_filename_.c_str())); @@ -111,13 +104,11 @@ TEST_F(JpegTest, Encode) { // Save decoded image to file. FILE* save_file = fopen(decoded_filename_.c_str(), "wb"); - if (fwrite(image_buffer.Buffer(), 1, - image_buffer.Length(), save_file) != image_buffer.Length()) { + if (PrintI420VideoFrame(image_buffer, save_file)) { return; } fclose(save_file); - image_buffer.Free(); } } // namespace webrtc diff --git a/webrtc/common_video/libyuv/include/scaler.h b/webrtc/common_video/libyuv/include/scaler.h index 8838844b6..37d441349 100644 --- a/webrtc/common_video/libyuv/include/scaler.h +++ b/webrtc/common_video/libyuv/include/scaler.h @@ -15,6 +15,7 @@ #ifndef WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_ #define WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_ +#include "common_video/interface/i420_video_frame.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "typedefs.h" @@ -47,8 +48,8 @@ class Scaler { // Return value: 0 - OK, // -1 - parameter error // -2 - scaler not set - int Scale(const VideoFrame& src_frame, - VideoFrame* dst_frame); + int Scale(const I420VideoFrame& src_frame, + I420VideoFrame* dst_frame); private: // Determine if the VideoTypes are currently supported. diff --git a/webrtc/common_video/libyuv/include/webrtc_libyuv.h b/webrtc/common_video/libyuv/include/webrtc_libyuv.h index bd8ff0bd3..4b1b2a58f 100644 --- a/webrtc/common_video/libyuv/include/webrtc_libyuv.h +++ b/webrtc/common_video/libyuv/include/webrtc_libyuv.h @@ -15,15 +15,14 @@ #ifndef WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_ #define WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_ +#include + #include "common_types.h" // RawVideoTypes. -#include "modules/interface/module_common_types.h" // VideoFrame +#include "common_video/interface/i420_video_frame.h" #include "typedefs.h" namespace webrtc { -// TODO(mikhal): 1. Sync libyuv and WebRtc meaning of stride. -// 2. Reorder parameters for consistency. - // Supported video types. enum VideoType { kUnknown, @@ -73,6 +72,24 @@ int AlignInt(int value, int alignment); // video frame or -1 in case of an error . int CalcBufferSize(VideoType type, int width, int height); +// TODO(mikhal): Add unit test for these two functions and determine location. +// Print I420VideoFrame to file +// Input: +// - frame : Reference to video frame. +// - file : pointer to file object. It is assumed that the file is +// already open for writing. +// Return value: 0 if OK, < 0 otherwise. +int PrintI420VideoFrame(const I420VideoFrame& frame, FILE* file); + +// Extract buffer from I420VideoFrame (consecutive planes, no stride) +// Input: +// - frame : Reference to video frame. +// - size : pointer to the size of the allocated buffer. If size is +// insufficient, an error will be returned. +// - buffer : Pointer to buffer +// Return value: length of buffer if OK, < 0 otherwise. +int ExtractBuffer(const I420VideoFrame& input_frame, + int size, uint8_t* buffer); // Convert To I420 // Input: // - src_video_type : Type of input video. @@ -92,25 +109,23 @@ int ConvertToI420(VideoType src_video_type, int src_width, int src_height, int sample_size, VideoRotationMode rotation, - VideoFrame* dst_frame); + I420VideoFrame* dst_frame); // Convert From I420 // Input: -// - src_frame : Pointer to a source frame. -// - src_stride : Number of bytes in a row of the src Y plane. +// - src_frame : Reference to a source frame. // - dst_video_type : Type of output video. // - dst_sample_size : Required only for the parsing of MJPG. // - dst_frame : Pointer to a destination frame. // Return value: 0 if OK, < 0 otherwise. // It is assumed that source and destination have equal height. -int ConvertFromI420(const VideoFrame& src_frame, int src_stride, +int ConvertFromI420(const I420VideoFrame& src_frame, VideoType dst_video_type, int dst_sample_size, uint8_t* dst_frame); // ConvertFrom YV12. // Interface - same as above. -int ConvertFromYV12(const uint8_t* src_frame, int src_stride, +int ConvertFromYV12(const I420VideoFrame& src_frame, VideoType dst_video_type, int dst_sample_size, - int width, int height, uint8_t* dst_frame); // The following list describes designated conversion functions which @@ -133,17 +148,17 @@ int ConvertNV12ToRGB565(const uint8_t* src_frame, // - dst_frame : Pointer to a destination frame. // Return value: 0 if OK, < 0 otherwise. // It is assumed that src and dst frames have equal dimensions. -int MirrorI420LeftRight(const VideoFrame* src_frame, - VideoFrame* dst_frame); -int MirrorI420UpDown(const VideoFrame* src_frame, - VideoFrame* dst_frame); +int MirrorI420LeftRight(const I420VideoFrame* src_frame, + I420VideoFrame* dst_frame); +int MirrorI420UpDown(const I420VideoFrame* src_frame, + I420VideoFrame* dst_frame); // Compute PSNR for an I420 frame (all planes). -double I420PSNR(const VideoFrame* ref_frame, - const VideoFrame* test_frame); +double I420PSNR(const I420VideoFrame* ref_frame, + const I420VideoFrame* test_frame); // Compute SSIM for an I420 frame (all planes). -double I420SSIM(const VideoFrame* ref_frame, - const VideoFrame* test_frame); +double I420SSIM(const I420VideoFrame* ref_frame, + const I420VideoFrame* test_frame); // TODO(mikhal): Remove these functions and keep only the above functionality. // Compute PSNR for an I420 buffer (all planes). diff --git a/webrtc/common_video/libyuv/libyuv_unittest.cc b/webrtc/common_video/libyuv/libyuv_unittest.cc index 6c2bba87c..24e34aca6 100644 --- a/webrtc/common_video/libyuv/libyuv_unittest.cc +++ b/webrtc/common_video/libyuv/libyuv_unittest.cc @@ -11,22 +11,26 @@ #include #include +#include "common_video/interface/i420_video_frame.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "gtest/gtest.h" -#include "modules/interface/module_common_types.h" // VideoFrame #include "system_wrappers/interface/tick_util.h" +#include "system_wrappers/interface/scoped_ptr.h" #include "testsupport/fileutils.h" namespace webrtc { -int PrintBuffer(const uint8_t* buffer, int width, int height) { +int PrintBuffer(const uint8_t* buffer, int width, int height, int stride) { if (buffer == NULL) return -1; - int k = 0; + int k; + const uint8_t* tmp_buffer = buffer; for (int i = 0; i < height; i++) { + k = 0; for (int j = 0; j < width; j++) { - printf("%d ", buffer[k++]); + printf("%d ", tmp_buffer[k++]); } + tmp_buffer += stride; printf(" \n"); } printf(" \n"); @@ -34,54 +38,40 @@ int PrintBuffer(const uint8_t* buffer, int width, int height) { } -int PrintFrame(const VideoFrame* frame, const char* str) { +int PrintFrame(const I420VideoFrame* frame, const char* str) { if (frame == NULL) return -1; - printf("%s %dx%d \n", str, frame->Width(), frame->Height()); + printf("%s %dx%d \n", str, frame->width(), frame->height()); int ret = 0; - int width = frame->Width(); - int height = frame->Height(); - ret += PrintBuffer(frame->Buffer(), width, height); - int half_width = (frame->Width() + 1) / 2; - int half_height = (frame->Height() + 1) / 2; - ret += PrintBuffer(frame->Buffer() + width * height, half_width, half_height); - ret += PrintBuffer(frame->Buffer() + width * height + - half_width * half_height, half_width, half_height); + for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) { + PlaneType plane_type = static_cast(plane_num); + int width = (plane_num ? (frame->width() + 1) / 2 : frame->width()); + int height = (plane_num ? (frame->height() + 1) / 2 : frame->height()); + ret += PrintBuffer(frame->buffer(plane_type), width, height, + frame->stride(plane_type)); + } return ret; } // Create an image from on a YUV frame. Every plane value starts with a start // value, and will be set to increasing values. -// plane_offset - prep for PlaneType. -void CreateImage(VideoFrame* frame, int plane_offset[3]) { +void CreateImage(I420VideoFrame* frame, int plane_offset[kNumOfPlanes]) { if (frame == NULL) return; - int width = frame->Width(); - int height = frame->Height(); - int half_width = (frame->Width() + 1) / 2; - int half_height = (frame->Height() + 1) / 2; - uint8_t *data = frame->Buffer(); - // Y plane. - for (int i = 0; i < height; i++) { - for (int j = 0; j < width; j++) { - *data = static_cast((i + plane_offset[0]) + j); - data++; - } - } - // U plane. - for (int i = 0; i < half_height; i++) { - for (int j = 0; j < half_width; j++) { - *data = static_cast((i + plane_offset[1]) + j); - data++; - } - } - // V Plane. - for (int i = 0; i < half_height; i++) { - for (int j = 0; j < half_width; j++) { - *data = static_cast((i + plane_offset[2]) + j); - data++; + for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) { + int width = (plane_num != kYPlane ? (frame->width() + 1) / 2 : + frame->width()); + int height = (plane_num != kYPlane ? (frame->height() + 1) / 2 : + frame->height()); + PlaneType plane_type = static_cast(plane_num); + uint8_t *data = frame->buffer(plane_type); + for (int i = 0; i < height; i++) { + for (int j = 0; j < width; j++) { + data[j] = static_cast(i + plane_offset[plane_num] + j); + } + data += frame->stride(plane_type); } } } @@ -98,7 +88,6 @@ class TestLibYuv : public ::testing::Test { const int frame_length_; }; -// TODO (mikhal): Use scoped_ptr when handling buffers. TestLibYuv::TestLibYuv() : source_file_(NULL), width_(352), @@ -135,179 +124,162 @@ TEST_F(TestLibYuv, ConvertTest) { double psnr = 0; - VideoFrame orig_frame; - orig_frame.VerifyAndAllocate(frame_length_); - orig_frame.SetWidth(width_); - orig_frame.SetHeight(height_); - EXPECT_GT(fread(orig_frame.Buffer(), 1, frame_length_, source_file_), 0U); - orig_frame.SetLength(frame_length_); + I420VideoFrame orig_frame; + scoped_array orig_buffer(new uint8_t[frame_length_]); + EXPECT_EQ(fread(orig_buffer.get(), 1, frame_length_, source_file_), + static_cast(frame_length_)); + int size_y = width_ * height_; + int size_uv = ((width_ + 1 ) / 2) * ((height_ + 1) / 2); + orig_frame.CreateFrame(size_y, orig_buffer.get(), + size_uv, orig_buffer.get() + size_y, + size_uv, orig_buffer.get() + size_y + size_uv, + width_, height_, + width_, (width_ + 1) / 2, (width_ + 1) / 2); + printf("\nConvert #%d I420 <-> RGB24\n", j); + scoped_array res_rgb_buffer2(new uint8_t[width_ * height_ * 3]); + I420VideoFrame res_i420_frame; + res_i420_frame.CreateEmptyFrame(width_, height_, width_, + (width_ + 1) / 2, (width_ + 1) / 2); + EXPECT_EQ(0, ConvertFromI420(orig_frame, kRGB24, 0, res_rgb_buffer2.get())); - // printf("\nConvert #%d I420 <-> RGB24\n", j); - uint8_t* res_rgb_buffer2 = new uint8_t[width_ * height_ * 3]; - VideoFrame res_i420_frame; - res_i420_frame.VerifyAndAllocate(frame_length_); - res_i420_frame.SetHeight(height_); - res_i420_frame.SetWidth(width_); - EXPECT_EQ(0, ConvertFromI420(orig_frame, width_, kRGB24, 0, - res_rgb_buffer2)); + EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2.get(), 0, 0, width_, + height_, 0, kRotateNone, &res_i420_frame)); - EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2, 0, 0, width_, height_, - 0, kRotateNone, &res_i420_frame)); - - if (fwrite(res_i420_frame.Buffer(), 1, frame_length_, - output_file) != static_cast(frame_length_)) { + if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { return; } psnr = I420PSNR(&orig_frame, &res_i420_frame); + // Optimization Speed- quality trade-off => 45 dB only (platform dependant). EXPECT_GT(ceil(psnr), 44); j++; - delete [] res_rgb_buffer2; - // printf("\nConvert #%d I420 <-> UYVY\n", j); - uint8_t* out_uyvy_buffer = new uint8_t[width_ * height_ * 2]; - EXPECT_EQ(0, ConvertFromI420(orig_frame, width_, - kUYVY, 0, out_uyvy_buffer)); - EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer, 0, 0, width_, height_, - 0, kRotateNone, &res_i420_frame)); + printf("\nConvert #%d I420 <-> UYVY\n", j); + scoped_array out_uyvy_buffer(new uint8_t[width_ * height_ * 2]); + EXPECT_EQ(0, ConvertFromI420(orig_frame, kUYVY, 0, out_uyvy_buffer.get())); + EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_, + height_, 0, kRotateNone, &res_i420_frame)); psnr = I420PSNR(&orig_frame, &res_i420_frame); EXPECT_EQ(48.0, psnr); - if (fwrite(res_i420_frame.Buffer(), 1, frame_length_, - output_file) != static_cast(frame_length_)) { - return; + if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { + return; } j++; - delete [] out_uyvy_buffer; - // printf("\nConvert #%d I420 <-> I420 \n", j); - uint8_t* out_i420_buffer = new uint8_t[width_ * height_ * 3 / 2 ]; - EXPECT_EQ(0, ConvertToI420(kI420, orig_frame.Buffer(), 0, 0, width_, height_, + printf("\nConvert #%d I420 <-> I420 \n", j); + scoped_array out_i420_buffer(new uint8_t[width_ * height_ * 3 / 2]); + EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer.get(), 0, 0, width_, height_, 0, kRotateNone, &res_i420_frame)); - EXPECT_EQ(0, ConvertFromI420(res_i420_frame, width_, kI420, 0, - out_i420_buffer)); - if (fwrite(res_i420_frame.Buffer(), 1, frame_length_, + EXPECT_EQ(0, ConvertFromI420(res_i420_frame, kI420, 0, + out_i420_buffer.get())); + if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { + return; + } + psnr = I420PSNR(orig_buffer.get(), out_i420_buffer.get(), width_, height_); + EXPECT_EQ(48.0, psnr); + j++; + + printf("\nConvert #%d I420 <-> YV12\n", j); + scoped_array outYV120Buffer(new uint8_t[frame_length_]); + scoped_array res_i420_buffer(new uint8_t[frame_length_]); + I420VideoFrame yv12_frame; + EXPECT_EQ(0, ConvertFromI420(orig_frame, kYV12, 0, outYV120Buffer.get())); + yv12_frame.CreateFrame(size_y, outYV120Buffer.get(), + size_uv, outYV120Buffer.get() + size_y, + size_uv, outYV120Buffer.get() + size_y + size_uv, + width_, height_, + width_, (width_ + 1) / 2, (width_ + 1) / 2); + EXPECT_EQ(0, ConvertFromYV12(yv12_frame, kI420, 0, res_i420_buffer.get())); + if (fwrite(res_i420_buffer.get(), 1, frame_length_, output_file) != static_cast(frame_length_)) { return; } - psnr = I420PSNR(orig_frame.Buffer(), out_i420_buffer, width_, height_); + + psnr = I420PSNR(orig_buffer.get(), res_i420_buffer.get(), width_, height_); EXPECT_EQ(48.0, psnr); j++; - delete [] out_i420_buffer; - // printf("\nConvert #%d I420 <-> YV12\n", j); - uint8_t* outYV120Buffer = new uint8_t[frame_length_]; + printf("\nConvert #%d I420 <-> YUY2\n", j); + scoped_array out_yuy2_buffer(new uint8_t[width_ * height_ * 2]); + EXPECT_EQ(0, ConvertFromI420(orig_frame, kYUY2, 0, out_yuy2_buffer.get())); - EXPECT_EQ(0, ConvertFromI420(orig_frame, width_, kYV12, 0, - outYV120Buffer)); - EXPECT_EQ(0, ConvertFromYV12(outYV120Buffer, width_, - kI420, 0, - width_, height_, - res_i420_frame.Buffer())); - if (fwrite(res_i420_frame.Buffer(), 1, frame_length_, - output_file) != static_cast(frame_length_)) { - return; + EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_, + height_, 0, kRotateNone, &res_i420_frame)); + + if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { + return; } psnr = I420PSNR(&orig_frame, &res_i420_frame); EXPECT_EQ(48.0, psnr); + printf("\nConvert #%d I420 <-> RGB565\n", j); + scoped_array out_rgb565_buffer(new uint8_t[width_ * height_ * 2]); + EXPECT_EQ(0, ConvertFromI420(orig_frame, kRGB565, 0, + out_rgb565_buffer.get())); + + EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer.get(), 0, 0, width_, + height_, 0, kRotateNone, &res_i420_frame)); + + if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { + return; + } j++; - delete [] outYV120Buffer; - // printf("\nConvert #%d I420 <-> YUY2\n", j); - uint8_t* out_yuy2_buffer = new uint8_t[width_ * height_ * 2]; - EXPECT_EQ(0, ConvertFromI420(orig_frame, width_, - kYUY2, 0, out_yuy2_buffer)); - - EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer, 0, 0, width_, height_, - 0, kRotateNone, &res_i420_frame)); - - if (fwrite(res_i420_frame.Buffer(), 1, frame_length_, - output_file) != static_cast(frame_length_)) { - return; - } - psnr = I420PSNR(&orig_frame, &res_i420_frame); - EXPECT_EQ(48.0, psnr); - - // printf("\nConvert #%d I420 <-> RGB565\n", j); - uint8_t* out_rgb565_buffer = new uint8_t[width_ * height_ * 2]; - EXPECT_EQ(0, ConvertFromI420(orig_frame, width_, - kRGB565, 0, out_rgb565_buffer)); - - EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer, 0, 0, width_, height_, - 0, kRotateNone, &res_i420_frame)); - - if (fwrite(res_i420_frame.Buffer(), 1, frame_length_, - output_file) != static_cast(frame_length_)) { - return; - } psnr = I420PSNR(&orig_frame, &res_i420_frame); // TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565, // Another example is I420ToRGB24, the psnr is 44 EXPECT_GT(ceil(psnr), 40); - // printf("\nConvert #%d I420 <-> ARGB8888\n", j); - uint8_t* out_argb8888_buffer = new uint8_t[width_ * height_ * 4]; - EXPECT_EQ(0, ConvertFromI420(orig_frame, width_, - kARGB, 0, out_argb8888_buffer)); + printf("\nConvert #%d I420 <-> ARGB8888\n", j); + scoped_array out_argb8888_buffer(new uint8_t[width_ * height_ * 4]); + EXPECT_EQ(0, ConvertFromI420(orig_frame, kARGB, 0, + out_argb8888_buffer.get())); - EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer, 0, 0, width_, height_, - 0, kRotateNone, &res_i420_frame)); + EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer.get(), 0, 0, width_, + height_, 0, kRotateNone, &res_i420_frame)); - if (fwrite(res_i420_frame.Buffer(), 1, frame_length_, - output_file) != static_cast(frame_length_)) { + if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { return; } + psnr = I420PSNR(&orig_frame, &res_i420_frame); // TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888, EXPECT_GT(ceil(psnr), 42); ASSERT_EQ(0, fclose(output_file)); - - res_i420_frame.Free(); - orig_frame.Free(); - delete [] out_argb8888_buffer; - delete [] out_rgb565_buffer; - delete [] out_yuy2_buffer; } -// TODO(holmer): Disabled for now due to crashes on Linux 32 bit. The theory -// is that it crashes due to the fact that the buffers are not 16 bit aligned. -// See http://code.google.com/p/webrtc/issues/detail?id=335 for more info. -TEST_F(TestLibYuv, DISABLED_MirrorTest) { +TEST_F(TestLibYuv, MirrorTest) { // TODO (mikhal): Add an automated test to confirm output. - // TODO(mikhal): Update to new I420VideoFrame and align values. Until then, - // this test is disabled, only insuring build. std::string str; int width = 16; + int half_width = (width + 1) / 2; int height = 8; - int length = webrtc::CalcBufferSize(kI420, width, height); + int half_height = (height + 1) / 2; - VideoFrame test_frame; - test_frame.VerifyAndAllocate(length); - test_frame.SetWidth(width); - test_frame.SetHeight(height); - memset(test_frame.Buffer(), 255, length); + I420VideoFrame test_frame; + test_frame.CreateEmptyFrame(width, height, width, + half_width, half_width); + memset(test_frame.buffer(kYPlane), 255, width * height); + memset(test_frame.buffer(kUPlane), 255, half_width * half_height); + memset(test_frame.buffer(kVPlane), 255, half_width * half_height); // Create input frame. - VideoFrame in_frame, test_in_frame; - in_frame.VerifyAndAllocate(length); - in_frame.SetWidth(width); - in_frame.SetHeight(height); - in_frame.SetLength(length); - int plane_offset[3]; // prep for kNumPlanes. - plane_offset[0] = 10; - plane_offset[1] = 100; - plane_offset[2] = 200; + I420VideoFrame in_frame, test_in_frame; + in_frame.CreateEmptyFrame(width, height, width, + half_width ,half_width); + int plane_offset[kNumOfPlanes]; + plane_offset[kYPlane] = 10; + plane_offset[kUPlane] = 100; + plane_offset[kVPlane] = 200; CreateImage(&in_frame, plane_offset); - test_in_frame.CopyFrame(in_frame); EXPECT_EQ(0, PrintFrame(&in_frame, "InputFrame")); + test_in_frame.CopyFrame(in_frame); - VideoFrame out_frame, test_out_frame; - out_frame.VerifyAndAllocate(length); - out_frame.SetWidth(width); - out_frame.SetHeight(height); - out_frame.SetLength(length); + I420VideoFrame out_frame, test_out_frame; + out_frame.CreateEmptyFrame(width, height, width, + half_width ,half_width); CreateImage(&out_frame, plane_offset); test_out_frame.CopyFrame(out_frame); @@ -317,22 +289,28 @@ TEST_F(TestLibYuv, DISABLED_MirrorTest) { EXPECT_EQ(0, PrintFrame(&out_frame, "OutputFrame")); EXPECT_EQ(0, MirrorI420LeftRight(&out_frame, &in_frame)); - EXPECT_EQ(0, memcmp(in_frame.Buffer(), test_in_frame.Buffer(), length)); + EXPECT_EQ(0, memcmp(in_frame.buffer(kYPlane), + test_in_frame.buffer(kYPlane), width * height)); + EXPECT_EQ(0, memcmp(in_frame.buffer(kUPlane), + test_in_frame.buffer(kUPlane), half_width * half_height)); + EXPECT_EQ(0, memcmp(in_frame.buffer(kVPlane), + test_in_frame.buffer(kVPlane), half_width * half_height)); // UpDown std::cout << "Test Mirror function: UpDown" << std::endl; EXPECT_EQ(0, MirrorI420UpDown(&in_frame, &out_frame)); EXPECT_EQ(0, PrintFrame(&test_out_frame, "OutputFrame")); EXPECT_EQ(0, MirrorI420UpDown(&out_frame, &test_frame)); - EXPECT_EQ(0, memcmp(in_frame.Buffer(), test_frame.Buffer(), length)); + EXPECT_EQ(0, memcmp(in_frame.buffer(kYPlane), + test_in_frame.buffer(kYPlane), width * height)); + EXPECT_EQ(0, memcmp(in_frame.buffer(kUPlane), + test_in_frame.buffer(kUPlane), half_width * half_height)); + EXPECT_EQ(0, memcmp(in_frame.buffer(kVPlane), + test_in_frame.buffer(kVPlane), half_width * half_height)); // TODO(mikhal): Write to a file, and ask to look at the file. std::cout << "Do the mirrored frames look correct?" << std::endl; - in_frame.Free(); - test_in_frame.Free(); - out_frame.Free(); - test_out_frame.Free(); } TEST_F(TestLibYuv, alignment) { diff --git a/webrtc/common_video/libyuv/scaler.cc b/webrtc/common_video/libyuv/scaler.cc index 1304e0dfa..c97b7067a 100644 --- a/webrtc/common_video/libyuv/scaler.cc +++ b/webrtc/common_video/libyuv/scaler.cc @@ -44,42 +44,34 @@ int Scaler::Set(int src_width, int src_height, return 0; } -int Scaler::Scale(const VideoFrame& src_frame, - VideoFrame* dst_frame) { +int Scaler::Scale(const I420VideoFrame& src_frame, + I420VideoFrame* dst_frame) { assert(dst_frame); - if (src_frame.Buffer() == NULL || src_frame.Length() == 0) + if (src_frame.IsZeroSize()) return -1; if (!set_) return -2; // Making sure that destination frame is of sufficient size. - int required_dst_size = CalcBufferSize(kI420, dst_width_, dst_height_); - dst_frame->VerifyAndAllocate(required_dst_size); - // Set destination length and dimensions. - dst_frame->SetLength(required_dst_size); - dst_frame->SetWidth(dst_width_); - dst_frame->SetHeight(dst_height_); + // Aligning stride values based on width. - int src_half_width = (src_width_ + 1) >> 1; - int src_half_height = (src_height_ + 1) >> 1; - int dst_half_width = (dst_width_ + 1) >> 1; - int dst_half_height = (dst_height_ + 1) >> 1; - // Converting to planes: - const uint8_t* src_yplane = src_frame.Buffer(); - const uint8_t* src_uplane = src_yplane + src_width_ * src_height_; - const uint8_t* src_vplane = src_uplane + src_half_width * src_half_height; + dst_frame->CreateEmptyFrame(dst_width_, dst_height_, + dst_width_, (dst_width_ + 1) / 2, + (dst_width_ + 1) / 2); - uint8_t* dst_yplane = dst_frame->Buffer(); - uint8_t* dst_uplane = dst_yplane + dst_width_ * dst_height_; - uint8_t* dst_vplane = dst_uplane + dst_half_width * dst_half_height; - - return libyuv::I420Scale(src_yplane, src_width_, - src_uplane, src_half_width, - src_vplane, src_half_width, + return libyuv::I420Scale(src_frame.buffer(kYPlane), + src_frame.stride(kYPlane), + src_frame.buffer(kUPlane), + src_frame.stride(kUPlane), + src_frame.buffer(kVPlane), + src_frame.stride(kVPlane), src_width_, src_height_, - dst_yplane, dst_width_, - dst_uplane, dst_half_width, - dst_vplane, dst_half_width, + dst_frame->buffer(kYPlane), + dst_frame->stride(kYPlane), + dst_frame->buffer(kUPlane), + dst_frame->stride(kUPlane), + dst_frame->buffer(kVPlane), + dst_frame->stride(kVPlane), dst_width_, dst_height_, libyuv::FilterMode(method_)); } diff --git a/webrtc/common_video/libyuv/scaler_unittest.cc b/webrtc/common_video/libyuv/scaler_unittest.cc index b8e7a305b..d65b380a1 100644 --- a/webrtc/common_video/libyuv/scaler_unittest.cc +++ b/webrtc/common_video/libyuv/scaler_unittest.cc @@ -39,19 +39,25 @@ class TestScaler : public ::testing::Test { Scaler test_scaler_; FILE* source_file_; - VideoFrame test_frame_; + I420VideoFrame test_frame_; const int width_; + const int half_width_; const int height_; + const int half_height_; + const int size_y_; + const int size_uv_; const int frame_length_; }; - -// TODO (mikhal): Use scoped_ptr when handling buffers. TestScaler::TestScaler() : source_file_(NULL), width_(352), + half_width_(width_ / 2), height_(288), - frame_length_(CalcBufferSize(kI420, 352, 288)) { + half_height_(height_ / 2), + size_y_(width_ * height_), + size_uv_(half_width_ * half_height_), + frame_length_(CalcBufferSize(kI420, width_, height_)) { } void TestScaler::SetUp() { @@ -60,8 +66,8 @@ void TestScaler::SetUp() { source_file_ = fopen(input_file_name.c_str(), "rb"); ASSERT_TRUE(source_file_ != NULL) << "Cannot read file: "<< input_file_name << "\n"; - test_frame_.VerifyAndAllocate(frame_length_); - test_frame_.SetLength(frame_length_); + test_frame_.CreateEmptyFrame(width_, height_, + width_, half_width_, half_width_); } void TestScaler::TearDown() { @@ -69,7 +75,6 @@ void TestScaler::TearDown() { ASSERT_EQ(0, fclose(source_file_)); } source_file_ = NULL; - test_frame_.Free(); } TEST_F(TestScaler, ScaleWithoutSettingValues) { @@ -85,22 +90,30 @@ TEST_F(TestScaler, ScaleBadInitialValues) { } TEST_F(TestScaler, ScaleSendingNullSourcePointer) { - VideoFrame null_src_frame; + I420VideoFrame null_src_frame; EXPECT_EQ(-1, test_scaler_.Scale(null_src_frame, &test_frame_)); } TEST_F(TestScaler, ScaleSendingBufferTooSmall) { // Sending a buffer which is too small (should reallocate and update size) - EXPECT_EQ(0, test_scaler_.Set(352, 288, 144, 288, kI420, kI420, kScalePoint)); - VideoFrame test_frame2; - EXPECT_GT(fread(test_frame_.Buffer(), 1, frame_length_, source_file_), 0U); + EXPECT_EQ(0, test_scaler_.Set(width_, height_, + half_width_, half_height_, + kI420, kI420, + kScalePoint)); + I420VideoFrame test_frame2; + scoped_array orig_buffer(new uint8_t[frame_length_]); + EXPECT_GT(fread(orig_buffer.get(), 1, frame_length_, source_file_), 0U); + test_frame_.CreateFrame(size_y_, orig_buffer.get(), + size_uv_, orig_buffer.get() + size_y_, + size_uv_, orig_buffer.get() + size_y_ + size_uv_, + width_, height_, + width_, half_width_, half_width_); EXPECT_EQ(0, test_scaler_.Scale(test_frame_, &test_frame2)); - EXPECT_EQ(CalcBufferSize(kI420, 144, 288), - static_cast(test_frame2.Size())); - EXPECT_EQ(144u, test_frame2.Width()); - EXPECT_EQ(288u, test_frame2.Height()); - EXPECT_EQ(CalcBufferSize(kI420, 144, 288), - static_cast(test_frame2.Length())); + EXPECT_GT(width_ * height_, test_frame2.allocated_size(kYPlane)); + EXPECT_GT(size_uv_, test_frame2.allocated_size(kUPlane)); + EXPECT_GT(size_uv_, test_frame2.allocated_size(kVPlane)); + EXPECT_EQ(half_width_, test_frame2.width()); + EXPECT_EQ(half_height_, test_frame2.height()); } //TODO (mikhal): Converge the test into one function that accepts the method. @@ -113,7 +126,7 @@ TEST_F(TestScaler, PointScaleTest) { ScaleSequence(method, source_file_, out_name, width_, height_, - width_ / 2, height_ / 2); + half_width_, half_height_); // Upsample back up and check PSNR. source_file2 = fopen(out_name.c_str(), "rb"); out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_352_288_" @@ -422,31 +435,34 @@ void TestScaler::ScaleSequence(ScaleMethod method, rewind(source_file); - int out_required_size = CalcBufferSize(kI420, dst_width, dst_height); - int in_required_size = CalcBufferSize(kI420, src_width, src_height); - - VideoFrame input_frame, output_frame; - input_frame.VerifyAndAllocate(in_required_size); - input_frame.SetLength(in_required_size); - output_frame.VerifyAndAllocate(out_required_size); - output_frame.SetLength(out_required_size); - + I420VideoFrame input_frame; + I420VideoFrame output_frame; int64_t start_clock, total_clock; total_clock = 0; int frame_count = 0; + int src_required_size = CalcBufferSize(kI420, src_width, src_height); + scoped_array frame_buffer(new uint8_t[src_required_size]); + int size_y = src_width * src_height; + int size_uv = ((src_width + 1) / 2) * ((src_height + 1) / 2); // Running through entire sequence. while (feof(source_file) == 0) { - if ((size_t)in_required_size != - fread(input_frame.Buffer(), 1, in_required_size, source_file)) - break; + if ((size_t)src_required_size != + fread(frame_buffer.get(), 1, src_required_size, source_file)) + break; + + input_frame.CreateFrame(size_y, frame_buffer.get(), + size_uv, frame_buffer.get() + size_y, + size_uv, frame_buffer.get() + size_y + size_uv, + src_width, src_height, + src_width, (src_width + 1) / 2, + (src_width + 1) / 2); start_clock = TickTime::MillisecondTimestamp(); EXPECT_EQ(0, test_scaler_.Scale(input_frame, &output_frame)); total_clock += TickTime::MillisecondTimestamp() - start_clock; - if (fwrite(output_frame.Buffer(), 1, output_frame.Size(), - output_file) != static_cast(output_frame.Size())) { - return; + if (PrintI420VideoFrame(output_frame, output_file) < 0) { + return; } frame_count++; } diff --git a/webrtc/common_video/libyuv/webrtc_libyuv.cc b/webrtc/common_video/libyuv/webrtc_libyuv.cc index 7a7d1d123..816fa8f6a 100644 --- a/webrtc/common_video/libyuv/webrtc_libyuv.cc +++ b/webrtc/common_video/libyuv/webrtc_libyuv.cc @@ -11,6 +11,7 @@ #include "common_video/libyuv/include/webrtc_libyuv.h" #include +#include #include "libyuv.h" @@ -91,6 +92,57 @@ int CalcBufferSize(VideoType type, int width, int height) { return buffer_size; } +int PrintI420VideoFrame(const I420VideoFrame& frame, FILE* file) { + if (file == NULL) + return -1; + if (frame.IsZeroSize()) + return -1; + for (int planeNum = 0; planeNum < kNumOfPlanes; ++planeNum) { + int width = (planeNum ? (frame.width() + 1) / 2 : frame.width()); + int height = (planeNum ? (frame.height() + 1) / 2 : frame.height()); + PlaneType plane_type = static_cast(planeNum); + const uint8_t* plane_buffer = frame.buffer(plane_type); + for (int y = 0; y < height; y++) { + if (fwrite(plane_buffer, 1, width, file) != + static_cast(width)) { + return -1; + } + plane_buffer += frame.stride(plane_type); + } + } + return 0; +} + +int ExtractBuffer(const I420VideoFrame& input_frame, + int size, uint8_t* buffer) { + assert(buffer); + if (input_frame.IsZeroSize()) + return -1; + int length = CalcBufferSize(kI420, input_frame.width(), input_frame.height()); + if (size < length) { + return -1; + } + + int pos = 0; + uint8_t* buffer_ptr = buffer; + + for (int plane = 0; plane < kNumOfPlanes; ++plane) { + int width = (plane ? (input_frame.width() + 1) / 2 : + input_frame.width()); + int height = (plane ? (input_frame.height() + 1) / 2 : + input_frame.height()); + const uint8_t* plane_ptr = input_frame.buffer( + static_cast(plane)); + for (int y = 0; y < height; y++) { + memcpy(&buffer_ptr[pos], plane_ptr, width); + pos += width; + plane_ptr += input_frame.stride(static_cast(plane)); + } + } + return length; +} + + int ConvertNV12ToRGB565(const uint8_t* src_frame, uint8_t* dst_frame, int width, int height) { @@ -172,179 +224,148 @@ int ConvertToI420(VideoType src_video_type, int src_width, int src_height, int sample_size, VideoRotationMode rotation, - VideoFrame* dst_frame) { - // All sanity tests are conducted within LibYuv. - int dst_height = dst_frame->Height(); - int dst_width = dst_frame->Width(); - // TODO(mikhal): When available, use actual stride value. - int dst_stride = dst_frame->Width(); - int half_dst_width = (dst_width + 1) >> 1; - int half_dst_height = (dst_height + 1) >> 1; - uint8_t* dst_yplane = dst_frame->Buffer(); - uint8_t* dst_uplane = dst_yplane + dst_width * dst_height; - uint8_t* dst_vplane = dst_uplane + half_dst_width * half_dst_height; + I420VideoFrame* dst_frame) { return libyuv::ConvertToI420(src_frame, sample_size, - dst_yplane, dst_stride, - dst_uplane, (dst_stride + 1) / 2, - dst_vplane, (dst_stride + 1) / 2, + dst_frame->buffer(kYPlane), + dst_frame->stride(kYPlane), + dst_frame->buffer(kUPlane), + dst_frame->stride(kUPlane), + dst_frame->buffer(kVPlane), + dst_frame->stride(kVPlane), crop_x, crop_y, src_width, src_height, - dst_width, dst_height, + dst_frame->width(), dst_frame->height(), ConvertRotationMode(rotation), ConvertVideoType(src_video_type)); } -int ConvertFromI420(const VideoFrame& src_frame, int src_stride, +int ConvertFromI420(const I420VideoFrame& src_frame, VideoType dst_video_type, int dst_sample_size, uint8_t* dst_frame) { - int height = src_frame.Height(); - int width = src_frame.Width(); - int abs_height = (height < 0) ? -height : height; - int half_width = (width + 1) >> 1; - int half_height = (abs_height + 1) >> 1; - const uint8_t* src_yplane = src_frame.Buffer(); - const uint8_t* src_uplane = src_yplane + width * abs_height; - const uint8_t* src_vplane = src_uplane + half_width * half_height; - return libyuv::ConvertFromI420(src_yplane, src_stride, - src_uplane, (src_stride + 1) / 2, - src_vplane, (src_stride + 1) / 2, + return libyuv::ConvertFromI420(src_frame.buffer(kYPlane), + src_frame.stride(kYPlane), + src_frame.buffer(kUPlane), + src_frame.stride(kUPlane), + src_frame.buffer(kVPlane), + src_frame.stride(kVPlane), dst_frame, dst_sample_size, - width, height, + src_frame.width(), src_frame.height(), ConvertVideoType(dst_video_type)); } -int ConvertFromYV12(const uint8_t* src_frame, int src_stride, +// TODO(mikhal): Create a designated VideoFrame for non I420. +int ConvertFromYV12(const I420VideoFrame& src_frame, VideoType dst_video_type, int dst_sample_size, - int width, int height, uint8_t* dst_frame) { - int half_src_stride = (src_stride + 1) >> 1; - int abs_height = (height < 0) ? -height : height; - int half_height = (abs_height + 1) >> 1; - const uint8_t* src_yplane = src_frame; - const uint8_t* src_uplane = src_yplane + width * abs_height; - const uint8_t* src_vplane = src_uplane + half_src_stride * half_height; // YV12 = Y, V, U - return libyuv::ConvertFromI420(src_yplane, src_stride, - src_vplane, half_src_stride, - src_uplane, half_src_stride, + return libyuv::ConvertFromI420(src_frame.buffer(kYPlane), + src_frame.stride(kYPlane), + src_frame.buffer(kVPlane), + src_frame.stride(kVPlane), + src_frame.buffer(kUPlane), + src_frame.stride(kUPlane), dst_frame, dst_sample_size, - width, height, + src_frame.width(), src_frame.height(), ConvertVideoType(dst_video_type)); } -int MirrorI420LeftRight(const VideoFrame* src_frame, - VideoFrame* dst_frame) { +int MirrorI420LeftRight(const I420VideoFrame* src_frame, + I420VideoFrame* dst_frame) { // Source and destination frames should have equal resolution. - if (src_frame->Width() != dst_frame->Width() || - src_frame->Height() != dst_frame->Height()) + if (src_frame->width() != dst_frame->width() || + src_frame->height() != dst_frame->height()) return -1; - int width = src_frame->Width(); - int height = src_frame->Height(); - int half_width = (width + 1) >> 1; - int half_height = (height + 1) >> 1; - const uint8_t* src_yplane = src_frame->Buffer(); - const uint8_t* src_uplane = src_yplane + width * height; - const uint8_t* src_vplane = src_uplane + half_width * half_height; - uint8_t* dst_yplane = dst_frame->Buffer(); - uint8_t* dst_uplane = dst_yplane + width * height; - uint8_t* dst_vplane = dst_uplane + half_width * half_height; - return libyuv::I420Mirror(src_yplane, width, - src_uplane, half_width, - src_vplane, half_width, - dst_yplane, width, - dst_uplane, half_width, - dst_vplane, half_width, - width, height); + return libyuv::I420Mirror(src_frame->buffer(kYPlane), + src_frame->stride(kYPlane), + src_frame->buffer(kUPlane), + src_frame->stride(kUPlane), + src_frame->buffer(kVPlane), + src_frame->stride(kVPlane), + dst_frame->buffer(kYPlane), + dst_frame->stride(kYPlane), + dst_frame->buffer(kUPlane), + dst_frame->stride(kUPlane), + dst_frame->buffer(kVPlane), + dst_frame->stride(kVPlane), + src_frame->width(), src_frame->height()); } -int MirrorI420UpDown(const VideoFrame* src_frame, - VideoFrame* dst_frame) { +int MirrorI420UpDown(const I420VideoFrame* src_frame, + I420VideoFrame* dst_frame) { // Source and destination frames should have equal resolution - if (src_frame->Width() != dst_frame->Width() || - src_frame->Height() != dst_frame->Height()) + if (src_frame->width() != dst_frame->width() || + src_frame->height() != dst_frame->height()) return -1; - int width = src_frame->Width(); - int height = src_frame->Height(); - int half_width = (width + 1) >> 1; - int half_height = (height + 1) >> 1; - const uint8_t* src_yplane = src_frame->Buffer(); - const uint8_t* src_uplane = src_yplane + width * height; - const uint8_t* src_vplane = src_uplane + half_width * half_height; - uint8_t* dst_yplane = dst_frame->Buffer(); - uint8_t* dst_uplane = dst_yplane + width * height; - uint8_t* dst_vplane = dst_uplane + half_width * half_height; // Inserting negative height flips the frame. - return libyuv::I420Copy(src_yplane, width, - src_uplane, half_width, - src_vplane, half_width, - dst_yplane, width, - dst_uplane, half_width, - dst_vplane, half_width, - width, -height); + return libyuv::I420Copy(src_frame->buffer(kYPlane), + src_frame->stride(kYPlane), + src_frame->buffer(kUPlane), + src_frame->stride(kUPlane), + src_frame->buffer(kVPlane), + src_frame->stride(kVPlane), + dst_frame->buffer(kYPlane), + dst_frame->stride(kYPlane), + dst_frame->buffer(kUPlane), + dst_frame->stride(kUPlane), + dst_frame->buffer(kVPlane), + dst_frame->stride(kVPlane), + src_frame->width(), -(src_frame->height())); } // Compute PSNR for an I420 frame (all planes) -double I420PSNR(const VideoFrame* ref_frame, - const VideoFrame* test_frame) { +double I420PSNR(const I420VideoFrame* ref_frame, + const I420VideoFrame* test_frame) { if (!ref_frame || !test_frame) return -1; - else if ((ref_frame->Width() != test_frame->Width()) || - (ref_frame->Height() != test_frame->Height())) + else if ((ref_frame->width() != test_frame->width()) || + (ref_frame->height() != test_frame->height())) return -1; - else if (ref_frame->Width() == 0u || ref_frame->Height() == 0u) + else if (ref_frame->width() < 0 || ref_frame->height() < 0) return -1; - int height = ref_frame->Height() ; - int width = ref_frame->Width(); - int half_width = (width + 1) >> 1; - int half_height = (height + 1) >> 1; - const uint8_t* src_y_a = ref_frame->Buffer(); - const uint8_t* src_u_a = src_y_a + width * height; - const uint8_t* src_v_a = src_u_a + half_width * half_height; - const uint8_t* src_y_b = test_frame->Buffer(); - const uint8_t* src_u_b = src_y_b + width * height; - const uint8_t* src_v_b = src_u_b + half_width * half_height; - // In the following: stride is determined by width. - double psnr = libyuv::I420Psnr(src_y_a, width, - src_u_a, half_width, - src_v_a, half_width, - src_y_b, width, - src_u_b, half_width, - src_v_b, half_width, - width, height); + + double psnr = libyuv::I420Psnr(ref_frame->buffer(kYPlane), + ref_frame->stride(kYPlane), + ref_frame->buffer(kUPlane), + ref_frame->stride(kUPlane), + ref_frame->buffer(kVPlane), + ref_frame->stride(kVPlane), + test_frame->buffer(kYPlane), + test_frame->stride(kYPlane), + test_frame->buffer(kUPlane), + test_frame->stride(kUPlane), + test_frame->buffer(kVPlane), + test_frame->stride(kVPlane), + test_frame->width(), test_frame->height()); // LibYuv sets the max psnr value to 128, we restrict it to 48. // In case of 0 mse in one frame, 128 can skew the results significantly. return (psnr > 48.0) ? 48.0 : psnr; } + // Compute SSIM for an I420 frame (all planes) -double I420SSIM(const VideoFrame* ref_frame, - const VideoFrame* test_frame) { +double I420SSIM(const I420VideoFrame* ref_frame, + const I420VideoFrame* test_frame) { if (!ref_frame || !test_frame) return -1; - else if ((ref_frame->Width() != test_frame->Width()) || - (ref_frame->Height() != test_frame->Height())) + else if ((ref_frame->width() != test_frame->width()) || + (ref_frame->height() != test_frame->height())) return -1; - else if (ref_frame->Width() == 0u || ref_frame->Height() == 0u) + else if (ref_frame->width() < 0 || ref_frame->height() < 0) return -1; - int height = ref_frame->Height() ; - int width = ref_frame->Width(); - int half_width = (width + 1) >> 1; - int half_height = (height + 1) >> 1; - const uint8_t* src_y_a = ref_frame->Buffer(); - const uint8_t* src_u_a = src_y_a + width * height; - const uint8_t* src_v_a = src_u_a + half_width * half_height; - const uint8_t* src_y_b = test_frame->Buffer(); - const uint8_t* src_u_b = src_y_b + width * height; - const uint8_t* src_v_b = src_u_b + half_width * half_height; - int stride_y = width; - int stride_uv = half_width; - return libyuv::I420Ssim(src_y_a, stride_y, - src_u_a, stride_uv, - src_v_a, stride_uv, - src_y_b, stride_y, - src_u_b, stride_uv, - src_v_b, stride_uv, - width, height); + + return libyuv::I420Ssim(ref_frame->buffer(kYPlane), + ref_frame->stride(kYPlane), + ref_frame->buffer(kUPlane), + ref_frame->stride(kUPlane), + ref_frame->buffer(kVPlane), + ref_frame->stride(kVPlane), + test_frame->buffer(kYPlane), + test_frame->stride(kYPlane), + test_frame->buffer(kUPlane), + test_frame->stride(kUPlane), + test_frame->buffer(kVPlane), + test_frame->stride(kVPlane), + test_frame->width(), test_frame->height()); } // Compute PSNR for an I420 frame (all planes) diff --git a/webrtc/common_video/plane.cc b/webrtc/common_video/plane.cc index dc20c6270..2362c4d82 100644 --- a/webrtc/common_video/plane.cc +++ b/webrtc/common_video/plane.cc @@ -30,8 +30,9 @@ int Plane::CreateEmptyPlane(int allocated_size, int stride, int plane_size) { if (allocated_size < 1 || stride < 1 || plane_size < 1) return -1; stride_ = stride; + if (MaybeResize(allocated_size) < 0) + return -1; plane_size_ = plane_size; - MaybeResize(allocated_size); return 0; } diff --git a/webrtc/common_video/plane.h b/webrtc/common_video/plane.h index c6743398d..0b94f99d7 100644 --- a/webrtc/common_video/plane.h +++ b/webrtc/common_video/plane.h @@ -44,10 +44,10 @@ class Plane { int allocated_size() const {return allocated_size_;} // Set actual size. - void ResetSize() {plane_size_ = 0;}; + void ResetSize() {plane_size_ = 0;} // Return true is plane size is zero, false if not. - bool IsZeroSize() {return plane_size_ == 0;}; + bool IsZeroSize() const {return plane_size_ == 0;} // Get stride value. int stride() const {return stride_;} diff --git a/webrtc/modules/utility/interface/file_player.h b/webrtc/modules/utility/interface/file_player.h index ee7be64dd..5b7af49c0 100644 --- a/webrtc/modules/utility/interface/file_player.h +++ b/webrtc/modules/utility/interface/file_player.h @@ -12,6 +12,7 @@ #define WEBRTC_MODULES_UTILITY_INTERFACE_FILE_PLAYER_H_ #include "common_types.h" +#include "common_video/interface/i420_video_frame.h" #include "engine_configurations.h" #include "module_common_types.h" #include "typedefs.h" @@ -93,12 +94,12 @@ public: virtual WebRtc_Word32 video_codec_info(VideoCodec& /*videoCodec*/) const {return -1;} - virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& /*videoFrame*/) + virtual WebRtc_Word32 GetVideoFromFile(I420VideoFrame& /*videoFrame*/) { return -1;} // Same as GetVideoFromFile(). videoFrame will have the resolution specified // by the width outWidth and height outHeight in pixels. - virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& /*videoFrame*/, + virtual WebRtc_Word32 GetVideoFromFile(I420VideoFrame& /*videoFrame*/, const WebRtc_UWord32 /*outWidth*/, const WebRtc_UWord32 /*outHeight*/) {return -1;} diff --git a/webrtc/modules/utility/interface/file_recorder.h b/webrtc/modules/utility/interface/file_recorder.h index 01299465a..01d4600d5 100644 --- a/webrtc/modules/utility/interface/file_recorder.h +++ b/webrtc/modules/utility/interface/file_recorder.h @@ -11,6 +11,7 @@ #ifndef WEBRTC_MODULES_UTILITY_INTERFACE_FILE_RECORDER_H_ #define WEBRTC_MODULES_UTILITY_INTERFACE_FILE_RECORDER_H_ +#include "common_video/interface/i420_video_frame.h" #include "common_types.h" #include "engine_configurations.h" #include "modules/audio_coding/main/interface/audio_coding_module_typedefs.h" @@ -78,7 +79,8 @@ public: bool videoOnly = false) = 0; // Record the video frame in videoFrame to AVI file. - virtual WebRtc_Word32 RecordVideoToFile(const VideoFrame& videoFrame) = 0; + virtual WebRtc_Word32 RecordVideoToFile( + const I420VideoFrame& videoFrame) = 0; protected: virtual ~FileRecorder() {} diff --git a/webrtc/modules/utility/source/file_player_impl.cc b/webrtc/modules/utility/source/file_player_impl.cc index 574fd7e4e..f76da75ed 100644 --- a/webrtc/modules/utility/source/file_player_impl.cc +++ b/webrtc/modules/utility/source/file_player_impl.cc @@ -536,7 +536,7 @@ WebRtc_Word32 VideoFilePlayerImpl::StopPlayingFile() return FilePlayerImpl::StopPlayingFile(); } -WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame, +WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(I420VideoFrame& videoFrame, WebRtc_UWord32 outWidth, WebRtc_UWord32 outHeight) { @@ -547,7 +547,7 @@ WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame, { return retVal; } - if( videoFrame.Length() > 0) + if (!videoFrame.IsZeroSize()) { retVal = _frameScaler.ResizeFrameIfNeeded(&videoFrame, outWidth, outHeight); @@ -555,22 +555,32 @@ WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame, return retVal; } -WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame) +WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(I420VideoFrame& videoFrame) { CriticalSectionScoped lock( _critSec); // No new video data read from file. if(_encodedData.payloadSize == 0) { - videoFrame.SetLength(0); + videoFrame.ResetSize(); return -1; } WebRtc_Word32 retVal = 0; if(strncmp(video_codec_info_.plName, "I420", 5) == 0) { - videoFrame.CopyFrame(_encodedData.payloadSize,_encodedData.payloadData); - videoFrame.SetLength(_encodedData.payloadSize); - videoFrame.SetWidth(video_codec_info_.width); - videoFrame.SetHeight(video_codec_info_.height); + int size_y = video_codec_info_.width * video_codec_info_.height; + int half_width = (video_codec_info_.width + 1) / 2; + int half_height = (video_codec_info_.height + 1) / 2; + int size_uv = half_width * half_height; + + // TODO(mikhal): Do we need to align the stride here? + const uint8_t* buffer_y = _encodedData.payloadData; + const uint8_t* buffer_u = buffer_y + size_y; + const uint8_t* buffer_v = buffer_u + size_uv; + videoFrame.CreateFrame(size_y, buffer_y, + size_uv, buffer_u, + size_uv, buffer_v, + video_codec_info_.width, video_codec_info_.height, + video_codec_info_.height, half_width, half_width); }else { // Set the timestamp manually since there is no timestamp in the file. @@ -580,7 +590,7 @@ WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame) } WebRtc_Word64 renderTimeMs = TickTime::MillisecondTimestamp(); - videoFrame.SetRenderTime(renderTimeMs); + videoFrame.set_render_time_ms(renderTimeMs); // Indicate that the current frame in the encoded buffer is old/has // already been read. diff --git a/webrtc/modules/utility/source/file_player_impl.h b/webrtc/modules/utility/source/file_player_impl.h index 9ae383851..c188e23b1 100644 --- a/webrtc/modules/utility/source/file_player_impl.h +++ b/webrtc/modules/utility/source/file_player_impl.h @@ -93,8 +93,8 @@ public: bool videoOnly); virtual WebRtc_Word32 StopPlayingFile(); virtual WebRtc_Word32 video_codec_info(VideoCodec& videoCodec) const; - virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& videoFrame); - virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& videoFrame, + virtual WebRtc_Word32 GetVideoFromFile(I420VideoFrame& videoFrame); + virtual WebRtc_Word32 GetVideoFromFile(I420VideoFrame& videoFrame, const WebRtc_UWord32 outWidth, const WebRtc_UWord32 outHeight); diff --git a/webrtc/modules/utility/source/file_recorder_impl.cc b/webrtc/modules/utility/source/file_recorder_impl.cc index b7ec08488..bb9acba7a 100644 --- a/webrtc/modules/utility/source/file_recorder_impl.cc +++ b/webrtc/modules/utility/source/file_recorder_impl.cc @@ -8,6 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "engine_configurations.h" #include "file_recorder_impl.h" #include "media_file.h" @@ -481,11 +482,10 @@ WebRtc_Word32 AviRecorder::SetUpVideoEncoder() return 0; } -WebRtc_Word32 AviRecorder::RecordVideoToFile(const VideoFrame& videoFrame) +WebRtc_Word32 AviRecorder::RecordVideoToFile(const I420VideoFrame& videoFrame) { CriticalSectionScoped lock(_critSec); - - if(!IsRecording() || ( videoFrame.Length() == 0)) + if(!IsRecording() || !videoFrame.IsZeroSize()) { return -1; } @@ -548,7 +548,7 @@ WebRtc_Word32 AviRecorder::ProcessAudio() // Get the most recent frame that is due for writing to file. Since // frames are unencoded it's safe to throw away frames if necessary // for synchronizing audio and video. - VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord(); + I420VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord(); if(frameToProcess) { // Syncronize audio to the current frame to process by throwing away @@ -563,7 +563,7 @@ WebRtc_Word32 AviRecorder::ProcessAudio() { if(TickTime::TicksToMilliseconds( frameInfo->_playoutTS.Ticks()) < - frameToProcess->RenderTimeMs()) + frameToProcess->render_time_ms()) { delete frameInfo; _audioFramesToWrite.PopFront(); @@ -622,7 +622,7 @@ bool AviRecorder::Process() // Get the most recent frame to write to file (if any). Synchronize it with // the audio stream (if any). Synchronization the video based on its render // timestamp (i.e. VideoFrame::RenderTimeMS()) - VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord(); + I420VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord(); if( frameToProcess == NULL) { return true; @@ -692,9 +692,9 @@ bool AviRecorder::Process() return error == 0; } -WebRtc_Word32 AviRecorder::EncodeAndWriteVideoToFile(VideoFrame& videoFrame) +WebRtc_Word32 AviRecorder::EncodeAndWriteVideoToFile(I420VideoFrame& videoFrame) { - if(!IsRecording() || (videoFrame.Length() == 0)) + if (!IsRecording() || videoFrame.IsZeroSize()) { return -1; } @@ -709,14 +709,18 @@ WebRtc_Word32 AviRecorder::EncodeAndWriteVideoToFile(VideoFrame& videoFrame) if( STR_CASE_CMP(_videoCodecInst.plName, "I420") == 0) { - _videoEncodedData.VerifyAndAllocate(videoFrame.Length()); + int length = CalcBufferSize(kI420, videoFrame.width(), + videoFrame.height()); + _videoEncodedData.VerifyAndAllocate(length); // I420 is raw data. No encoding needed (each sample is represented by // 1 byte so there is no difference depending on endianness). - memcpy(_videoEncodedData.payloadData, videoFrame.Buffer(), - videoFrame.Length()); + int ret_length = ExtractBuffer(videoFrame, length, + _videoEncodedData.payloadData); + if (ret_length < 0) + return -1; - _videoEncodedData.payloadSize = videoFrame.Length(); + _videoEncodedData.payloadSize = ret_length; _videoEncodedData.frameType = kVideoFrameKey; }else { if( _videoEncoder->Encode(videoFrame, _videoEncodedData) != 0) diff --git a/webrtc/modules/utility/source/file_recorder_impl.h b/webrtc/modules/utility/source/file_recorder_impl.h index 6e5697201..60d3b5c55 100644 --- a/webrtc/modules/utility/source/file_recorder_impl.h +++ b/webrtc/modules/utility/source/file_recorder_impl.h @@ -74,7 +74,7 @@ public: { return -1; } - virtual WebRtc_Word32 RecordVideoToFile(const VideoFrame& videoFrame) + virtual WebRtc_Word32 RecordVideoToFile(const I420VideoFrame& videoFrame) { return -1; } @@ -117,7 +117,7 @@ public: ACMAMRPackingFormat amrFormat = AMRFileStorage, bool videoOnly = false); virtual WebRtc_Word32 StopRecording(); - virtual WebRtc_Word32 RecordVideoToFile(const VideoFrame& videoFrame); + virtual WebRtc_Word32 RecordVideoToFile(const I420VideoFrame& videoFrame); protected: virtual WebRtc_Word32 WriteEncodedAudioData( @@ -132,7 +132,7 @@ private: bool StartThread(); bool StopThread(); - WebRtc_Word32 EncodeAndWriteVideoToFile(VideoFrame& videoFrame); + WebRtc_Word32 EncodeAndWriteVideoToFile(I420VideoFrame& videoFrame); WebRtc_Word32 ProcessAudio(); WebRtc_Word32 CalcI420FrameSize() const; diff --git a/webrtc/modules/utility/source/frame_scaler.cc b/webrtc/modules/utility/source/frame_scaler.cc index 048792b3c..06689471d 100644 --- a/webrtc/modules/utility/source/frame_scaler.cc +++ b/webrtc/modules/utility/source/frame_scaler.cc @@ -23,26 +23,26 @@ FrameScaler::FrameScaler() FrameScaler::~FrameScaler() {} -int FrameScaler::ResizeFrameIfNeeded(VideoFrame* video_frame, - WebRtc_UWord32 out_width, - WebRtc_UWord32 out_height) { - if (video_frame->Length() == 0) { +int FrameScaler::ResizeFrameIfNeeded(I420VideoFrame* video_frame, + int out_width, + int out_height) { + if (video_frame->IsZeroSize()) { return -1; } - if ((video_frame->Width() != out_width) || - (video_frame->Height() != out_height)) { + if ((video_frame->width() != out_width) || + (video_frame->height() != out_height)) { // Set correct scale settings and scale |video_frame| into |scaled_frame_|. - scaler_->Set(video_frame->Width(), video_frame->Height(), out_width, + scaler_->Set(video_frame->width(), video_frame->height(), out_width, out_height, kI420, kI420, kScaleBox); int ret = scaler_->Scale(*video_frame, &scaled_frame_); if (ret < 0) { return ret; } - scaled_frame_.SetRenderTime(video_frame->RenderTimeMs()); - scaled_frame_.SetTimeStamp(video_frame->TimeStamp()); - video_frame->SwapFrame(scaled_frame_); + scaled_frame_.set_render_time_ms(video_frame->render_time_ms()); + scaled_frame_.set_timestamp(video_frame->timestamp()); + video_frame->SwapFrame(&scaled_frame_); } return 0; } diff --git a/webrtc/modules/utility/source/frame_scaler.h b/webrtc/modules/utility/source/frame_scaler.h index f86a93382..4b2deae24 100644 --- a/webrtc/modules/utility/source/frame_scaler.h +++ b/webrtc/modules/utility/source/frame_scaler.h @@ -15,6 +15,7 @@ #ifdef WEBRTC_MODULE_UTILITY_VIDEO +#include "common_video/interface/i420_video_frame.h" #include "engine_configurations.h" #include "modules/interface/module_common_types.h" #include "system_wrappers/interface/scoped_ptr.h" @@ -31,13 +32,13 @@ class FrameScaler { // Re-sizes |video_frame| so that it has the width |out_width| and height // |out_height|. - int ResizeFrameIfNeeded(VideoFrame* video_frame, - WebRtc_UWord32 out_width, - WebRtc_UWord32 out_height); + int ResizeFrameIfNeeded(I420VideoFrame* video_frame, + int out_width, + int out_height); private: scoped_ptr scaler_; - VideoFrame scaled_frame_; + I420VideoFrame scaled_frame_; }; } // namespace webrtc diff --git a/webrtc/modules/utility/source/video_coder.cc b/webrtc/modules/utility/source/video_coder.cc index 727672374..3f9bffa6a 100644 --- a/webrtc/modules/utility/source/video_coder.cc +++ b/webrtc/modules/utility/source/video_coder.cc @@ -74,10 +74,10 @@ WebRtc_Word32 VideoCoder::SetDecodeCodec(VideoCodec& videoCodecInst, return 0; } -WebRtc_Word32 VideoCoder::Decode(VideoFrame& decodedVideo, +WebRtc_Word32 VideoCoder::Decode(I420VideoFrame& decodedVideo, const EncodedVideoData& encodedData) { - decodedVideo.SetLength(0); + decodedVideo.ResetSize(); if(encodedData.payloadSize <= 0) { return -1; @@ -92,7 +92,7 @@ WebRtc_Word32 VideoCoder::Decode(VideoFrame& decodedVideo, } -WebRtc_Word32 VideoCoder::Encode(const VideoFrame& videoFrame, +WebRtc_Word32 VideoCoder::Encode(const I420VideoFrame& videoFrame, EncodedVideoData& videoEncodedData) { // The AddVideoFrame(..) call will (indirectly) call SendData(). Store a @@ -121,7 +121,7 @@ WebRtc_Word8 VideoCoder::DefaultPayloadType(const char* plName) return -1; } -WebRtc_Word32 VideoCoder::FrameToRender(VideoFrame& videoFrame) +WebRtc_Word32 VideoCoder::FrameToRender(I420VideoFrame& videoFrame) { return _decodedVideo->CopyFrame(videoFrame); } diff --git a/webrtc/modules/utility/source/video_coder.h b/webrtc/modules/utility/source/video_coder.h index 7e7762a58..b1d8c7d42 100644 --- a/webrtc/modules/utility/source/video_coder.h +++ b/webrtc/modules/utility/source/video_coder.h @@ -35,10 +35,10 @@ public: WebRtc_Word32 SetDecodeCodec(VideoCodec& videoCodecInst, WebRtc_Word32 numberOfCores); - WebRtc_Word32 Decode(VideoFrame& decodedVideo, + WebRtc_Word32 Decode(I420VideoFrame& decodedVideo, const EncodedVideoData& encodedData); - WebRtc_Word32 Encode(const VideoFrame& videoFrame, + WebRtc_Word32 Encode(const I420VideoFrame& videoFrame, EncodedVideoData& videoEncodedData); WebRtc_Word8 DefaultPayloadType(const char* plName); @@ -46,7 +46,7 @@ public: private: // VCMReceiveCallback function. // Note: called by VideoCodingModule when decoding finished. - WebRtc_Word32 FrameToRender(VideoFrame& videoFrame); + WebRtc_Word32 FrameToRender(I420VideoFrame& videoFrame); // VCMPacketizationCallback function. // Note: called by VideoCodingModule when encoding finished. @@ -61,7 +61,7 @@ private: const RTPVideoHeader* rtpTypeHdr); VideoCodingModule* _vcm; - VideoFrame* _decodedVideo; + I420VideoFrame* _decodedVideo; EncodedVideoData* _videoEncodedData; }; } // namespace webrtc diff --git a/webrtc/modules/utility/source/video_frames_queue.cc b/webrtc/modules/utility/source/video_frames_queue.cc index ab590c451..22ad5e760 100644 --- a/webrtc/modules/utility/source/video_frames_queue.cc +++ b/webrtc/modules/utility/source/video_frames_queue.cc @@ -32,9 +32,9 @@ VideoFramesQueue::~VideoFramesQueue() ListItem* item = _incomingFrames.First(); if (item) { - VideoFrame* ptrFrame = static_cast(item->GetItem()); + I420VideoFrame* ptrFrame = + static_cast(item->GetItem()); assert(ptrFrame != NULL); - ptrFrame->Free(); delete ptrFrame; } _incomingFrames.Erase(item); @@ -42,27 +42,20 @@ VideoFramesQueue::~VideoFramesQueue() while (!_emptyFrames.Empty()) { ListItem* item = _emptyFrames.First(); - if (item) - { - VideoFrame* ptrFrame = static_cast(item->GetItem()); - assert(ptrFrame != NULL); - ptrFrame->Free(); - delete ptrFrame; - } _emptyFrames.Erase(item); } } -WebRtc_Word32 VideoFramesQueue::AddFrame(const VideoFrame& newFrame) +WebRtc_Word32 VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) { - VideoFrame* ptrFrameToAdd = NULL; + I420VideoFrame* ptrFrameToAdd = NULL; // Try to re-use a VideoFrame. Only allocate new memory if it is necessary. if (!_emptyFrames.Empty()) { ListItem* item = _emptyFrames.First(); if (item) { - ptrFrameToAdd = static_cast(item->GetItem()); + ptrFrameToAdd = static_cast(item->GetItem()); _emptyFrames.Erase(item); } } @@ -81,7 +74,7 @@ WebRtc_Word32 VideoFramesQueue::AddFrame(const VideoFrame& newFrame) "%s: allocating buffer %d", __FUNCTION__, _emptyFrames.GetSize() + _incomingFrames.GetSize()); - ptrFrameToAdd = new VideoFrame(); + ptrFrameToAdd = new I420VideoFrame(); if (!ptrFrameToAdd) { WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, @@ -98,15 +91,15 @@ WebRtc_Word32 VideoFramesQueue::AddFrame(const VideoFrame& newFrame) // lower than current time in ms (TickTime::MillisecondTimestamp()). // Note _incomingFrames is sorted so that the oldest frame is first. // Recycle all frames that are older than the most recent frame. -VideoFrame* VideoFramesQueue::FrameToRecord() +I420VideoFrame* VideoFramesQueue::FrameToRecord() { - VideoFrame* ptrRenderFrame = NULL; + I420VideoFrame* ptrRenderFrame = NULL; ListItem* item = _incomingFrames.First(); while(item) { - VideoFrame* ptrOldestFrameInList = - static_cast(item->GetItem()); - if (ptrOldestFrameInList->RenderTimeMs() <= + I420VideoFrame* ptrOldestFrameInList = + static_cast(item->GetItem()); + if (ptrOldestFrameInList->render_time_ms() <= TickTime::MillisecondTimestamp() + _renderDelayMs) { if (ptrRenderFrame) @@ -129,13 +122,13 @@ VideoFrame* VideoFramesQueue::FrameToRecord() return ptrRenderFrame; } -WebRtc_Word32 VideoFramesQueue::ReturnFrame(VideoFrame* ptrOldFrame) +WebRtc_Word32 VideoFramesQueue::ReturnFrame(I420VideoFrame* ptrOldFrame) { - ptrOldFrame->SetTimeStamp(0); - ptrOldFrame->SetWidth(0); - ptrOldFrame->SetHeight(0); - ptrOldFrame->SetRenderTime(0); - ptrOldFrame->SetLength(0); + ptrOldFrame->set_timestamp(0); + ptrOldFrame->set_width(0); + ptrOldFrame->set_height(0); + ptrOldFrame->set_render_time_ms(0); + ptrOldFrame->ResetSize(); _emptyFrames.PushBack(ptrOldFrame); return 0; } diff --git a/webrtc/modules/utility/source/video_frames_queue.h b/webrtc/modules/utility/source/video_frames_queue.h index 6c9be1c87..e1105382b 100644 --- a/webrtc/modules/utility/source/video_frames_queue.h +++ b/webrtc/modules/utility/source/video_frames_queue.h @@ -13,12 +13,12 @@ #ifdef WEBRTC_MODULE_UTILITY_VIDEO +#include "common_video/interface/i420_video_frame.h" #include "engine_configurations.h" #include "list_wrapper.h" #include "typedefs.h" namespace webrtc { -class VideoFrame; class VideoFramesQueue { @@ -27,12 +27,12 @@ public: ~VideoFramesQueue(); // Put newFrame (last) in the queue. - WebRtc_Word32 AddFrame(const VideoFrame& newFrame); + WebRtc_Word32 AddFrame(const I420VideoFrame& newFrame); // Return the most current frame. I.e. the frame with the highest // VideoFrame::RenderTimeMs() that is lower than // TickTime::MillisecondTimestamp(). - VideoFrame* FrameToRecord(); + I420VideoFrame* FrameToRecord(); // Set the render delay estimate to renderDelay ms. WebRtc_Word32 SetRenderDelay(WebRtc_UWord32 renderDelay); @@ -40,7 +40,7 @@ public: protected: // Make ptrOldFrame available for re-use. I.e. put it in the empty frames // queue. - WebRtc_Word32 ReturnFrame(VideoFrame* ptrOldFrame); + WebRtc_Word32 ReturnFrame(I420VideoFrame* ptrOldFrame); private: // Don't allow the buffer to expand beyond KMaxNumberOfFrames VideoFrames. diff --git a/webrtc/modules/utility/test/testAPI.cc b/webrtc/modules/utility/test/testAPI.cc index 166d48327..3408a86a6 100644 --- a/webrtc/modules/utility/test/testAPI.cc +++ b/webrtc/modules/utility/test/testAPI.cc @@ -122,10 +122,11 @@ int main(int /*argc*/, char** /*argv*/) assert(fileRecorder.IsRecording()); - WebRtc_UWord32 videoReadSize = static_cast( (videoCodec.width * videoCodec.height * 3.0) / 2.0); - - webrtc::VideoFrame videoFrame; - videoFrame.VerifyAndAllocate(videoReadSize); + webrtc::I420VideoFrame videoFrame; + videoFrame.CreateEmptyFrame(videoCodec.width, videoCodec.height, + videoCodec.width, + (videoCodec.width + 1) / 2, + (videoCodec.width + 1) / 2); int frameCount = 0; bool audioNotDone = true; @@ -142,7 +143,7 @@ int main(int /*argc*/, char** /*argv*/) break; } frameCount++; - videoNotDone = ( videoFrame.Length() > 0); + videoNotDone = !videoFrame.IsZeroSize(); videoFrame.SetRenderTime(TickTime::MillisecondTimestamp()); if( videoNotDone) { @@ -219,12 +220,14 @@ int main(int /*argc*/, char** /*argv*/) audioFrame.sample_rate_hz_ = 8000; // prepare the video frame - videoFrame.VerifyAndAllocate(KVideoWriteSize); - memset(videoFrame.Buffer(), 127, videoCodec.width * videoCodec.height); - memset(videoFrame.Buffer() +(videoCodec.width * videoCodec.height), 0, videoCodec.width * videoCodec.height/2); - videoFrame.SetLength(KVideoWriteSize); - videoFrame.SetHeight(videoCodec.height); - videoFrame.SetWidth(videoCodec.width); + int half_width = (videoCodec.width + 1) / 2; + int half_height = (videoCodec.height + 1) / 2; + videoFrame.CreateEmptyFrame(videoCodec.width, videoCodec.height, + videoCodec.width, half_width, half_width); + memset(videoFrame.buffer(kYPlane), 127, + videoCodec.width * videoCodec.height); + memset(videoFrame.buffer(kUPlane), 0, half_width * half_height); + memset(videoFrame.buffer(kVPlane), 0, half_width * half_height); // write avi file, with 20 video frames const int KWriteNumFrames = 20; @@ -310,10 +313,9 @@ int main(int /*argc*/, char** /*argv*/) assert(fileRecorder.IsRecording()); - WebRtc_UWord32 videoReadSize = static_cast( (videoCodec.width * videoCodec.height * 3.0) / 2.0); - - webrtc::VideoFrame videoFrame; - videoFrame.VerifyAndAllocate(videoReadSize); + webrtc::I420VideoFrame videoFrame; + videoFrame.CreateEmptyFrame(videoCodec.width, videoCodec.height, + videoCodec.width, half_width,half_width); int videoFrameCount = 0; int audioFrameCount = 0; @@ -325,12 +327,12 @@ int main(int /*argc*/, char** /*argv*/) { if(filePlayer.TimeUntilNextVideoFrame() <= 0) { - if(filePlayer.GetVideoFromFile( videoFrame) != 0) + if(filePlayer.GetVideoFromFile(videoFrame) != 0) { break; } videoFrameCount++; - videoNotDone = ( videoFrame.Length() > 0); + videoNotDone = !videoFrame.IsZeroSize(); if( videoNotDone) { assert(fileRecorder.RecordVideoToFile(videoFrame) == 0); diff --git a/webrtc/modules/video_capture/main/interface/video_capture_defines.h b/webrtc/modules/video_capture/main/interface/video_capture_defines.h index 72188df05..38017e784 100644 --- a/webrtc/modules/video_capture/main/interface/video_capture_defines.h +++ b/webrtc/modules/video_capture/main/interface/video_capture_defines.h @@ -14,6 +14,7 @@ // Includes #include "typedefs.h" #include "modules/interface/module_common_types.h" +#include "common_video/interface/i420_video_frame.h" namespace webrtc { @@ -132,8 +133,11 @@ class VideoCaptureDataCallback { public: virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id, - VideoFrame& videoFrame, + I420VideoFrame& videoFrame, VideoCodecType codecType) = 0; + virtual void OnIncomingCapturedEncodedFrame(const WebRtc_Word32 id, + VideoFrame& videoFrame, + VideoCodecType codecType) = 0; virtual void OnCaptureDelayChanged(const WebRtc_Word32 id, const WebRtc_Word32 delay) = 0; protected: diff --git a/webrtc/modules/video_capture/main/source/video_capture_impl.cc b/webrtc/modules/video_capture/main/source/video_capture_impl.cc index c326f5c51..95654635c 100644 --- a/webrtc/modules/video_capture/main/source/video_capture_impl.cc +++ b/webrtc/modules/video_capture/main/source/video_capture_impl.cc @@ -181,8 +181,8 @@ WebRtc_Word32 VideoCaptureImpl::CaptureDelay() return _setCaptureDelay; } -WebRtc_Word32 VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame, - WebRtc_Word64 capture_time, VideoCodecType codec_type) { +WebRtc_Word32 VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& + captureFrame, WebRtc_Word64 capture_time, VideoCodecType codec_type) { UpdateFrameCount(); // frame count used for local frame rate callback. const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay; @@ -193,17 +193,17 @@ WebRtc_Word32 VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame, // Set the capture time if (capture_time != 0) { - captureFrame.SetRenderTime(capture_time); + captureFrame.set_render_time_ms(capture_time); } else { - captureFrame.SetRenderTime(TickTime::MillisecondTimestamp()); + captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp()); } - if (captureFrame.RenderTimeMs() == last_capture_time_) { + if (captureFrame.render_time_ms() == last_capture_time_) { // We don't allow the same capture time for two frames, drop this one. return -1; } - last_capture_time_ = captureFrame.RenderTimeMs(); + last_capture_time_ = captureFrame.render_time_ms(); if (_dataCallBack) { if (callOnCaptureDelayChanged) { @@ -228,7 +228,7 @@ WebRtc_Word32 VideoCaptureImpl::DeliverEncodedCapturedFrame( // Set the capture time if (capture_time != 0) { - captureFrame.SetRenderTime(capture_time); + captureFrame.SetRenderTime(capture_time); } else { captureFrame.SetRenderTime(TickTime::MillisecondTimestamp()); @@ -244,7 +244,8 @@ WebRtc_Word32 VideoCaptureImpl::DeliverEncodedCapturedFrame( if (callOnCaptureDelayChanged) { _dataCallBack->OnCaptureDelayChanged(_id, _captureDelay); } - _dataCallBack->OnIncomingCapturedFrame(_id, captureFrame, codec_type); + _dataCallBack->OnIncomingCapturedEncodedFrame(_id, captureFrame, + codec_type); } return 0; @@ -282,23 +283,18 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame( return -1; } - // Allocate I420 buffer. - int requiredLength = CalcBufferSize(kI420, width, abs(height)); - _captureFrame.VerifyAndAllocate(requiredLength); - if (!_captureFrame.Buffer()) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "Failed to allocate frame buffer."); - return -1; - } - - memset(_captureFrame.Buffer(), 0, _captureFrame.Size()); - _captureFrame.SetWidth(width); // Setting absolute height (in case it was negative). // In Windows, the image starts bottom left, instead of top left. // Setting a negative source height, inverts the image (within LibYuv). - _captureFrame.SetHeight(abs(height)); - // TODO(mikhal) : Set stride when available. + int ret = _captureFrame.CreateEmptyFrame(width, abs(height), + width, (width + 1) / 2, + (width + 1) / 2); + if (ret < 0) + { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "Failed to allocate I420 frame."); + return -1; + } const int conversionResult = ConvertToI420(commonVideoType, videoFrame, 0, 0, // No cropping @@ -313,7 +309,6 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame( frameInfo.rawType); return -1; } - _captureFrame.SetLength(requiredLength); DeliverCapturedFrame(_captureFrame, captureTime, frameInfo.codecType); } else // Encoded format @@ -328,7 +323,6 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame( frameInfo.codecType); } - const WebRtc_UWord32 processTime = (WebRtc_UWord32)(TickTime::Now() - startProcessTime).Milliseconds(); if (processTime > 10) // If the process time is too long MJPG will not work well. @@ -345,52 +339,23 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrameI420( const VideoFrameI420& video_frame, WebRtc_Word64 captureTime) { CriticalSectionScoped cs(&_callBackCs); - - // Allocate I420 buffer - int frame_size = CalcBufferSize(kI420, - video_frame.width, - video_frame.height); - _captureFrame.VerifyAndAllocate(frame_size); - if (!_captureFrame.Buffer()) { + // TODO(mikhal): Do we take the stride as is, or do we align it? + int size_y = video_frame.height * video_frame.y_pitch; + int size_u = video_frame.u_pitch * (video_frame.height + 1) / 2; + int size_v = video_frame.v_pitch * (video_frame.height + 1) / 2; + // TODO(mikhal): Can we use Swap here? This will do a memcpy. + int ret = _captureFrame.CreateFrame(size_y, video_frame.y_plane, + size_u, video_frame.u_plane, + size_v, video_frame.v_plane, + video_frame.width, video_frame.height, + video_frame.y_pitch, video_frame.u_pitch, + video_frame.v_pitch); + if (ret < 0) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "Failed to allocate frame buffer."); + "Failed to create I420VideoFrame"); return -1; } - // Copy planes to the _captureFrame - int y_width = video_frame.width; - int uv_width = video_frame.width / 2; - int y_rows = video_frame.height; - int uv_rows = video_frame.height / 2; // I420 - unsigned char* current_pointer = _captureFrame.Buffer(); - unsigned char* y_plane = video_frame.y_plane; - unsigned char* u_plane = video_frame.u_plane; - unsigned char* v_plane = video_frame.v_plane; - // Copy Y - for (int i = 0; i < y_rows; ++i) { - memcpy(current_pointer, y_plane, y_width); - // Remove the alignment which ViE doesn't support. - current_pointer += y_width; - y_plane += video_frame.y_pitch; - } - // Copy U - for (int i = 0; i < uv_rows; ++i) { - memcpy(current_pointer, u_plane, uv_width); - // Remove the alignment which ViE doesn't support. - current_pointer += uv_width; - u_plane += video_frame.u_pitch; - } - // Copy V - for (int i = 0; i < uv_rows; ++i) { - memcpy(current_pointer, v_plane, uv_width); - // Remove the alignment which ViE doesn't support. - current_pointer += uv_width; - v_plane += video_frame.v_pitch; - } - _captureFrame.SetLength(frame_size); - _captureFrame.SetWidth(video_frame.width); - _captureFrame.SetHeight(video_frame.height); - DeliverCapturedFrame(_captureFrame, captureTime, kVideoCodecUnknown); return 0; diff --git a/webrtc/modules/video_capture/main/source/video_capture_impl.h b/webrtc/modules/video_capture/main/source/video_capture_impl.h index 5d9021fca..c752abf21 100644 --- a/webrtc/modules/video_capture/main/source/video_capture_impl.h +++ b/webrtc/modules/video_capture/main/source/video_capture_impl.h @@ -18,6 +18,7 @@ #include "video_capture.h" #include "video_capture_config.h" #include "tick_util.h" +#include "common_video/interface/i420_video_frame.h" #include "common_video/libyuv/include/webrtc_libyuv.h" namespace webrtc @@ -97,9 +98,10 @@ public: protected: VideoCaptureImpl(const WebRtc_Word32 id); virtual ~VideoCaptureImpl(); - WebRtc_Word32 DeliverCapturedFrame( - VideoFrame& captureFrame, - WebRtc_Word64 capture_time, VideoCodecType codec_type); + // TODO(mikhal): Remove codec_type. + WebRtc_Word32 DeliverCapturedFrame(I420VideoFrame& captureFrame, + WebRtc_Word64 capture_time, + VideoCodecType codec_type); WebRtc_Word32 DeliverEncodedCapturedFrame( VideoFrame& captureFrame, WebRtc_Word64 capture_time, VideoCodecType codec_type); @@ -129,7 +131,7 @@ private: TickTime _incomingFrameTimes[kFrameRateCountHistorySize];// timestamp for local captured frames VideoRotationMode _rotateFrame; //Set if the frame should be rotated by the capture module. - VideoFrame _captureFrame; + I420VideoFrame _captureFrame; VideoFrame _capture_encoded_frame; // Used to make sure incoming timestamp is increasing for every frame. diff --git a/webrtc/modules/video_capture/main/test/video_capture_unittest.cc b/webrtc/modules/video_capture/main/test/video_capture_unittest.cc index 9da7bb28a..66562be96 100644 --- a/webrtc/modules/video_capture/main/test/video_capture_unittest.cc +++ b/webrtc/modules/video_capture/main/test/video_capture_unittest.cc @@ -14,6 +14,8 @@ #include "modules/utility/interface/process_thread.h" #include "modules/video_capture/main/interface/video_capture.h" #include "modules/video_capture/main/interface/video_capture_factory.h" +#include "common_video/interface/i420_video_frame.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "system_wrappers/interface/critical_section_wrapper.h" #include "system_wrappers/interface/scoped_ptr.h" #include "system_wrappers/interface/scoped_refptr.h" @@ -57,63 +59,80 @@ static const int kTestWidth = 352; static const int kTestFramerate = 30; // Compares the content of two video frames. -static bool CompareFrames(const webrtc::VideoFrame& frame1, - const webrtc::VideoFrame& frame2) { +static bool CompareFrames(const webrtc::I420VideoFrame& frame1, + const webrtc::I420VideoFrame& frame2) { bool result = - (frame1.Length() == frame2.Length()) && - (frame1.Width() == frame2.Width()) && - (frame1.Height() == frame2.Height()); + (frame1.stride(webrtc::kYPlane) == frame2.stride(webrtc::kYPlane)) && + (frame1.stride(webrtc::kUPlane) == frame2.stride(webrtc::kUPlane)) && + (frame1.stride(webrtc::kVPlane) == frame2.stride(webrtc::kVPlane)) && + (frame1.width() == frame2.width()) && + (frame1.height() == frame2.height()); - for (unsigned int i = 0; i < frame1.Length() && result; ++i) - result = (*(frame1.Buffer()+i) == *(frame2.Buffer()+i)); - return result; + if (!result) + return false; + for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) { + webrtc::PlaneType plane_type = static_cast(plane); + int allocated_size1 = frame1.allocated_size(plane_type); + int allocated_size2 = frame2.allocated_size(plane_type); + if (allocated_size1 != allocated_size2) + return false; + const uint8_t* plane_buffer1 = frame1.buffer(plane_type); + const uint8_t* plane_buffer2 = frame2.buffer(plane_type); + if (memcmp(plane_buffer1, plane_buffer2, allocated_size1)) + return false; + } + return true; } -// Compares the content of a I420 frame in planar form and video frame. +// Compares the content of a I420 frame in planar form and the new video frame. static bool CompareFrames(const webrtc::VideoFrameI420& frame1, - const webrtc::VideoFrame& frame2) { - if (frame1.width != frame2.Width() || - frame1.height != frame2.Height()) { + const webrtc::I420VideoFrame& frame2) { + if (frame1.width != frame2.width() || + frame1.height != frame2.height()) { return false; } // Compare Y - unsigned char* y_plane = frame1.y_plane; - for (unsigned int i = 0; i < frame2.Height(); ++i) { - for (unsigned int j = 0; j < frame2.Width(); ++j) { - if (*y_plane != *(frame2.Buffer()+i*frame2.Width() +j)) + const unsigned char* y_plane = frame1.y_plane; + const unsigned char* y_plane2 = frame2.buffer(webrtc::kYPlane); + for (int i = 0; i < frame2.height(); ++i) { + for (int j = 0; j < frame2.width(); ++j) { + if (*y_plane != *y_plane2) return false; ++y_plane; + ++y_plane2; } y_plane += frame1.y_pitch - frame1.width; + y_plane2 += frame2.stride(webrtc::kYPlane) - frame2.width(); } // Compare U - unsigned char* u_plane = frame1.u_plane; - for (unsigned int i = 0; i < frame2.Height() /2; ++i) { - for (unsigned int j = 0; j < frame2.Width() /2; ++j) { - if (*u_plane !=*( - frame2.Buffer()+frame2.Width() * frame2.Height() + - i*frame2.Width() / 2 + j)) { + const unsigned char* u_plane = frame1.u_plane; + const unsigned char* u_plane2 = frame2.buffer(webrtc::kUPlane); + for (int i = 0; i < (frame2.height() + 1) / 2; ++i) { + for (int j = 0; j < (frame2.width() + 1) / 2; ++j) { + if (*u_plane != *u_plane2) return false; - } ++u_plane; + ++u_plane2; } - u_plane += frame1.u_pitch - frame1.width / 2; + u_plane += frame1.u_pitch - (frame1.width + 1) / 2; + u_plane2+= frame2.stride(webrtc::kUPlane) - (frame2.width() + 1) / 2; } // Compare V unsigned char* v_plane = frame1.v_plane; - for (unsigned int i = 0; i < frame2.Height() /2; ++i) { - for (unsigned int j = 0; j < frame2.Width() /2; ++j) { - if (*v_plane != *( - frame2.Buffer()+frame2.Width() * frame2.Height()* 5 / 4 + - i*frame2.Width() / 2 + j)) { + const unsigned char* v_plane2 = frame2.buffer(webrtc::kVPlane); + for (int i = 0; i < frame2.height() /2; ++i) { + for (int j = 0; j < frame2.width() /2; ++j) { + if (*u_plane != *u_plane2) { return false; } ++v_plane; + ++v_plane2; } - v_plane += frame1.v_pitch - frame1.width / 2; + v_plane += frame1.v_pitch - (frame1.width + 1) / 2; + u_plane2+= frame2.stride(webrtc::kVPlane) - (frame2.width() + 1) / 2; } return true; } @@ -135,32 +154,38 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback { } virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id, - webrtc::VideoFrame& videoFrame, + webrtc::I420VideoFrame& videoFrame, webrtc::VideoCodecType codecType) { CriticalSectionScoped cs(capture_cs_.get()); - int height = static_cast(videoFrame.Height()); - int width = static_cast(videoFrame.Width()); + int height = videoFrame.height(); + int width = videoFrame.width(); EXPECT_EQ(height, capability_.height); EXPECT_EQ(width, capability_.width); // RenderTimstamp should be the time now. EXPECT_TRUE( - videoFrame.RenderTimeMs() >= TickTime::MillisecondTimestamp()-30 && - videoFrame.RenderTimeMs() <= TickTime::MillisecondTimestamp()); + videoFrame.render_time_ms() >= TickTime::MillisecondTimestamp()-30 && + videoFrame.render_time_ms() <= TickTime::MillisecondTimestamp()); - if ((videoFrame.RenderTimeMs() > + if ((videoFrame.render_time_ms() > last_render_time_ms_ + (1000 * 1.1) / capability_.maxFPS && last_render_time_ms_ > 0) || - (videoFrame.RenderTimeMs() < + (videoFrame.render_time_ms() < last_render_time_ms_ + (1000 * 0.9) / capability_.maxFPS && last_render_time_ms_ > 0)) { timing_warnings_++; } incoming_frames_++; - last_render_time_ms_ = videoFrame.RenderTimeMs(); + last_render_time_ms_ = videoFrame.render_time_ms(); last_frame_.CopyFrame(videoFrame); } + virtual void OnIncomingCapturedEncodedFrame(const WebRtc_Word32 id, + webrtc::VideoFrame& videoFrame, + webrtc::VideoCodecType codecType) + { + assert(!"NOTIMPLEMENTED"); + } virtual void OnCaptureDelayChanged(const WebRtc_Word32 id, const WebRtc_Word32 delay) { @@ -193,7 +218,7 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback { return capability_; } - bool CompareLastFrame(const webrtc::VideoFrame& frame) { + bool CompareLastFrame(const webrtc::I420VideoFrame& frame) { CriticalSectionScoped cs(capture_cs_.get()); return CompareFrames(last_frame_, frame); } @@ -210,7 +235,7 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback { WebRtc_Word64 last_render_time_ms_; int incoming_frames_; int timing_warnings_; - webrtc::VideoFrame last_frame_; + webrtc::I420VideoFrame last_frame_; }; class TestVideoCaptureFeedBack : public VideoCaptureFeedBack { @@ -421,12 +446,14 @@ class VideoCaptureExternalTest : public testing::Test { capability.maxFPS = kTestFramerate; capture_callback_.SetExpectedCapability(capability); - test_frame_.VerifyAndAllocate(kTestWidth * kTestHeight * 3 / 2); - test_frame_.SetLength(kTestWidth * kTestHeight * 3 / 2); - test_frame_.SetHeight(kTestHeight); - test_frame_.SetWidth(kTestWidth); + test_frame_.CreateEmptyFrame(kTestWidth, kTestHeight, kTestWidth, + ((kTestWidth + 1) / 2), (kTestWidth + 1) / 2); SleepMs(1); // Wait 1ms so that two tests can't have the same timestamp. - memset(test_frame_.Buffer(), 127, test_frame_.Length()); + memset(test_frame_.buffer(webrtc::kYPlane), 127, kTestWidth * kTestHeight); + memset(test_frame_.buffer(webrtc::kUPlane), 127, + ((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2)); + memset(test_frame_.buffer(webrtc::kVPlane), 127, + ((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2)); EXPECT_EQ(0, capture_module_->RegisterCaptureDataCallback( capture_callback_)); @@ -443,16 +470,20 @@ class VideoCaptureExternalTest : public testing::Test { webrtc::VideoCaptureExternal* capture_input_interface_; webrtc::scoped_refptr capture_module_; webrtc::ProcessThread* process_module_; - webrtc::VideoFrame test_frame_; + webrtc::I420VideoFrame test_frame_; TestVideoCaptureCallback capture_callback_; TestVideoCaptureFeedBack capture_feedback_; }; // Test input of external video frames. -TEST_F(VideoCaptureExternalTest , TestExternalCapture) { - EXPECT_EQ(0, capture_input_interface_->IncomingFrame( - test_frame_.Buffer(), test_frame_.Length(), - capture_callback_.capability(), 0)); +TEST_F(VideoCaptureExternalTest, TestExternalCapture) { + unsigned int length = webrtc::CalcBufferSize(webrtc::kI420, + test_frame_.width(), + test_frame_.height()); + webrtc::scoped_array test_buffer(new uint8_t[length]); + webrtc::ExtractBuffer(test_frame_, length, test_buffer.get()); + EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(), + length, capture_callback_.capability(), 0)); EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_)); } @@ -463,7 +494,7 @@ TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) { webrtc::VideoFrameI420 frame_i420; frame_i420.width = kTestWidth; frame_i420.height = kTestHeight; - frame_i420.y_plane = test_frame_.Buffer(); + frame_i420.y_plane = test_frame_.buffer(webrtc::kYPlane); frame_i420.u_plane = frame_i420.y_plane + (kTestWidth * kTestHeight); frame_i420.v_plane = frame_i420.u_plane + ((kTestWidth * kTestHeight) >> 2); frame_i420.y_pitch = kTestWidth; @@ -473,26 +504,34 @@ TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) { EXPECT_TRUE(capture_callback_.CompareLastFrame(frame_i420)); // Test with a frame with pitch not equal to width - memset(test_frame_.Buffer(), 0xAA, test_frame_.Length()); - webrtc::VideoFrame aligned_test_frame; + memset(test_frame_.buffer(webrtc::kYPlane), 0xAA, + test_frame_.allocated_size(webrtc::kYPlane)); + memset(test_frame_.buffer(webrtc::kUPlane), 0xAA, + test_frame_.allocated_size(webrtc::kUPlane)); + memset(test_frame_.buffer(webrtc::kVPlane), 0xAA, + test_frame_.allocated_size(webrtc::kVPlane)); + webrtc::I420VideoFrame aligned_test_frame; int y_pitch = kTestWidth + 2; int u_pitch = kTestWidth / 2 + 1; int v_pitch = u_pitch; - aligned_test_frame.VerifyAndAllocate(kTestHeight * y_pitch + - (kTestHeight / 2) * u_pitch + - (kTestHeight / 2) * v_pitch); - aligned_test_frame.SetLength(aligned_test_frame.Size()); - memset(aligned_test_frame.Buffer(), 0, aligned_test_frame.Length()); + aligned_test_frame.CreateEmptyFrame(kTestWidth, kTestHeight, + y_pitch, u_pitch, v_pitch); + memset(aligned_test_frame.buffer(webrtc::kYPlane), 0, + kTestWidth * kTestHeight); + memset(aligned_test_frame.buffer(webrtc::kUPlane), 0, + (kTestWidth + 1) / 2 * (kTestHeight + 1) / 2); + memset(aligned_test_frame.buffer(webrtc::kVPlane), 0, + (kTestWidth + 1) / 2 * (kTestHeight + 1) / 2); // Copy the test_frame_ to aligned_test_frame. int y_width = kTestWidth; int uv_width = kTestWidth / 2; int y_rows = kTestHeight; int uv_rows = kTestHeight / 2; - unsigned char* current_pointer = aligned_test_frame.Buffer(); - unsigned char* y_plane = test_frame_.Buffer(); - unsigned char* u_plane = y_plane + kTestWidth * kTestHeight; - unsigned char* v_plane = u_plane + ((kTestWidth * kTestHeight) >> 2); + unsigned char* y_plane = test_frame_.buffer(webrtc::kYPlane); + unsigned char* u_plane = test_frame_.buffer(webrtc::kUPlane); + unsigned char* v_plane = test_frame_.buffer(webrtc::kVPlane); // Copy Y + unsigned char* current_pointer = aligned_test_frame.buffer(webrtc::kYPlane); for (int i = 0; i < y_rows; ++i) { memcpy(current_pointer, y_plane, y_width); // Remove the alignment which ViE doesn't support. @@ -500,6 +539,7 @@ TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) { y_plane += y_width; } // Copy U + current_pointer = aligned_test_frame.buffer(webrtc::kUPlane); for (int i = 0; i < uv_rows; ++i) { memcpy(current_pointer, u_plane, uv_width); // Remove the alignment which ViE doesn't support. @@ -507,6 +547,7 @@ TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) { u_plane += uv_width; } // Copy V + current_pointer = aligned_test_frame.buffer(webrtc::kVPlane); for (int i = 0; i < uv_rows; ++i) { memcpy(current_pointer, v_plane, uv_width); // Remove the alignment which ViE doesn't support. @@ -515,9 +556,9 @@ TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) { } frame_i420.width = kTestWidth; frame_i420.height = kTestHeight; - frame_i420.y_plane = aligned_test_frame.Buffer(); - frame_i420.u_plane = frame_i420.y_plane + (y_pitch * y_rows); - frame_i420.v_plane = frame_i420.u_plane + (u_pitch * uv_rows); + frame_i420.y_plane = aligned_test_frame.buffer(webrtc::kYPlane); + frame_i420.u_plane = aligned_test_frame.buffer(webrtc::kYPlane); + frame_i420.v_plane = aligned_test_frame.buffer(webrtc::kVPlane); frame_i420.y_pitch = y_pitch; frame_i420.u_pitch = u_pitch; frame_i420.v_pitch = v_pitch; @@ -532,9 +573,13 @@ TEST_F(VideoCaptureExternalTest , FrameRate) { TickTime startTime = TickTime::Now(); while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) { - EXPECT_EQ(0, capture_input_interface_->IncomingFrame( - test_frame_.Buffer(), test_frame_.Length(), - capture_callback_.capability(), 0)); + unsigned int length = webrtc::CalcBufferSize(webrtc::kI420, + test_frame_.width(), + test_frame_.height()); + webrtc::scoped_array test_buffer(new uint8_t[length]); + webrtc::ExtractBuffer(test_frame_, length, test_buffer.get()); + EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(), + length, capture_callback_.capability(), 0)); SleepMs(100); } EXPECT_TRUE(capture_feedback_.frame_rate() >= 8 && @@ -544,9 +589,13 @@ TEST_F(VideoCaptureExternalTest , FrameRate) { startTime = TickTime::Now(); while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) { - EXPECT_EQ(0, capture_input_interface_->IncomingFrame( - test_frame_.Buffer(), test_frame_.Length(), - capture_callback_.capability(), 0)); + unsigned int length = webrtc::CalcBufferSize(webrtc::kI420, + test_frame_.width(), + test_frame_.height()); + webrtc::scoped_array test_buffer(new uint8_t[length]); + webrtc::ExtractBuffer(test_frame_, length, test_buffer.get()); + EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(), + length, capture_callback_.capability(), 0)); SleepMs(1000 / 30); } EXPECT_EQ(webrtc::Cleared, capture_feedback_.alarm()); diff --git a/webrtc/modules/video_coding/codecs/i420/main/interface/i420.h b/webrtc/modules/video_coding/codecs/i420/main/interface/i420.h index 6699c2238..d3bb45ed6 100644 --- a/webrtc/modules/video_coding/codecs/i420/main/interface/i420.h +++ b/webrtc/modules/video_coding/codecs/i420/main/interface/i420.h @@ -47,7 +47,7 @@ public: // // Return value : WEBRTC_VIDEO_CODEC_OK if OK. // <0 - Error - virtual int Encode(const VideoFrame& inputImage, + virtual int Encode(const I420VideoFrame& inputImage, const CodecSpecificInfo* /*codecSpecificInfo*/, const std::vector* /*frame_types*/); @@ -138,7 +138,7 @@ public: private: - VideoFrame _decodedImage; + I420VideoFrame _decodedImage; int _width; int _height; bool _inited; diff --git a/webrtc/modules/video_coding/codecs/i420/main/source/i420.cc b/webrtc/modules/video_coding/codecs/i420/main/source/i420.cc index 75d85c141..f5831823e 100644 --- a/webrtc/modules/video_coding/codecs/i420/main/source/i420.cc +++ b/webrtc/modules/video_coding/codecs/i420/main/source/i420.cc @@ -14,7 +14,6 @@ #include "common_video/libyuv/include/webrtc_libyuv.h" - namespace webrtc { @@ -76,9 +75,9 @@ int I420Encoder::InitEncode(const VideoCodec* codecSettings, -int I420Encoder::Encode(const VideoFrame& inputImage, - const CodecSpecificInfo* /*codecSpecificInfo*/, - const std::vector* /*frame_types*/) { +int I420Encoder::Encode(const I420VideoFrame& inputImage, + const CodecSpecificInfo* /*codecSpecificInfo*/, + const std::vector* /*frame_types*/) { if (!_inited) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } @@ -87,29 +86,32 @@ int I420Encoder::Encode(const VideoFrame& inputImage, } _encodedImage._frameType = kKeyFrame; // No coding. - _encodedImage._timeStamp = inputImage.TimeStamp(); - _encodedImage._encodedHeight = inputImage.Height(); - _encodedImage._encodedWidth = inputImage.Width(); - if (inputImage.Length() > _encodedImage._size) { + _encodedImage._timeStamp = inputImage.timestamp(); + _encodedImage._encodedHeight = inputImage.height(); + _encodedImage._encodedWidth = inputImage.width(); + int req_length = CalcBufferSize(kI420, inputImage.width(), + inputImage.height()); + if (_encodedImage._size > static_cast(req_length)) { // Allocating encoded memory. if (_encodedImage._buffer != NULL) { delete [] _encodedImage._buffer; _encodedImage._buffer = NULL; _encodedImage._size = 0; } - const uint32_t newSize = CalcBufferSize(kI420, - _encodedImage._encodedWidth, - _encodedImage._encodedHeight); - uint8_t* newBuffer = new uint8_t[newSize]; + uint8_t* newBuffer = new uint8_t[req_length]; if (newBuffer == NULL) { return WEBRTC_VIDEO_CODEC_MEMORY; } - _encodedImage._size = newSize; + _encodedImage._size = req_length; _encodedImage._buffer = newBuffer; } - memcpy(_encodedImage._buffer, inputImage.Buffer(), inputImage.Length()); - _encodedImage._length = inputImage.Length(); + + int ret_length = ExtractBuffer(inputImage, req_length, _encodedImage._buffer); + if (ret_length < 0) + return WEBRTC_VIDEO_CODEC_MEMORY; + _encodedImage._length = ret_length; + _encodedCompleteCallback->Encoded(_encodedImage); return WEBRTC_VIDEO_CODEC_OK; } @@ -174,12 +176,24 @@ I420Decoder::Decode(const EncodedImage& inputImage, } // Set decoded image parameters. - if (_decodedImage.CopyFrame(inputImage._length, inputImage._buffer) < 0) { + int half_width = (_width + 1) / 2; + int half_height = (_height + 1) / 2; + int size_y = _width * _height; + int size_uv = half_width * half_height; + + const uint8_t* buffer_y = inputImage._buffer; + const uint8_t* buffer_u = buffer_y + size_y; + const uint8_t* buffer_v = buffer_u + size_uv; + // TODO(mikhal): Do we need an align stride? + int ret = _decodedImage.CreateFrame(size_y, buffer_y, + size_uv, buffer_u, + size_uv, buffer_v, + _width, _height, + _width, half_width, half_width); + if (ret < 0) { return WEBRTC_VIDEO_CODEC_MEMORY; } - _decodedImage.SetHeight(_height); - _decodedImage.SetWidth(_width); - _decodedImage.SetTimeStamp(inputImage._timeStamp); + _decodedImage.set_timestamp(inputImage._timeStamp); _decodeCompleteCallback->Decoded(_decodedImage); return WEBRTC_VIDEO_CODEC_OK; @@ -193,7 +207,6 @@ I420Decoder::RegisterDecodeCompleteCallback(DecodedImageCallback* callback) { int I420Decoder::Release() { - _decodedImage.Free(); _inited = false; return WEBRTC_VIDEO_CODEC_OK; } diff --git a/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h b/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h index f1123c922..890be1685 100644 --- a/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h +++ b/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h @@ -37,7 +37,7 @@ class MockVideoEncoder : public VideoEncoder { WebRtc_Word32 numberOfCores, WebRtc_UWord32 maxPayloadSize)); MOCK_METHOD3(Encode, - WebRtc_Word32(const VideoFrame& inputImage, + WebRtc_Word32(const I420VideoFrame& inputImage, const CodecSpecificInfo* codecSpecificInfo, const std::vector* frame_types)); MOCK_METHOD1(RegisterEncodeCompleteCallback, @@ -57,7 +57,7 @@ class MockVideoEncoder : public VideoEncoder { class MockDecodedImageCallback : public DecodedImageCallback { public: MOCK_METHOD1(Decoded, - WebRtc_Word32(VideoFrame& decodedImage)); + WebRtc_Word32(I420VideoFrame& decodedImage)); MOCK_METHOD1(ReceivedDecodedReferenceFrame, WebRtc_Word32(const WebRtc_UWord64 pictureId)); MOCK_METHOD1(ReceivedDecodedFrame, diff --git a/webrtc/modules/video_coding/codecs/interface/video_codec_interface.h b/webrtc/modules/video_coding/codecs/interface/video_codec_interface.h index 7ae525336..cc6359a83 100644 --- a/webrtc/modules/video_coding/codecs/interface/video_codec_interface.h +++ b/webrtc/modules/video_coding/codecs/interface/video_codec_interface.h @@ -14,9 +14,11 @@ #include #include "common_types.h" +#include "common_video/interface/i420_video_frame.h" #include "modules/interface/module_common_types.h" #include "modules/video_coding/codecs/interface/video_error_codes.h" #include "common_video/interface/video_image.h" + #include "typedefs.h" namespace webrtc @@ -102,7 +104,7 @@ public: // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 // otherwise. virtual WebRtc_Word32 Encode( - const VideoFrame& inputImage, + const I420VideoFrame& inputImage, const CodecSpecificInfo* codecSpecificInfo, const std::vector* frame_types) = 0; @@ -167,7 +169,7 @@ public: // - decodedImage : The decoded image. // // Return value : 0 if OK, < 0 otherwise. - virtual WebRtc_Word32 Decoded(VideoFrame& decodedImage) = 0; + virtual WebRtc_Word32 Decoded(I420VideoFrame& decodedImage) = 0; virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId) {return -1;} diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc index 53bd11232..4494e53cf 100644 --- a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc +++ b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc @@ -58,18 +58,11 @@ bool VideoProcessorImpl::Init() { // Calculate a factor used for bit rate calculations: bit_rate_factor_ = config_.codec_settings->maxFramerate * 0.001 * 8; // bits - int frame_length_in_bytes = frame_reader_->FrameLength(); - // Initialize data structures used by the encoder/decoder APIs + int frame_length_in_bytes = frame_reader_->FrameLength(); source_buffer_ = new WebRtc_UWord8[frame_length_in_bytes]; last_successful_frame_buffer_ = new WebRtc_UWord8[frame_length_in_bytes]; - - // Set fixed properties common for all frames: - source_frame_.SetWidth(config_.codec_settings->width); - source_frame_.SetHeight(config_.codec_settings->height); - source_frame_.VerifyAndAllocate(frame_length_in_bytes); - source_frame_.SetLength(frame_length_in_bytes); - + // Set fixed properties common for all frames. // To keep track of spatial resize actions by encoder. last_encoder_frame_width_ = config_.codec_settings->width; last_encoder_frame_height_ = config_.codec_settings->height; @@ -169,15 +162,24 @@ bool VideoProcessorImpl::ProcessFrame(int frame_number) { } if (frame_reader_->ReadFrame(source_buffer_)) { // Copy the source frame to the newly read frame data. - // Length is common for all frames. - source_frame_.CopyFrame(source_frame_.Length(), source_buffer_); + int size_y = config_.codec_settings->width * config_.codec_settings->height; + int half_width = (config_.codec_settings->width + 1) / 2; + int half_height = (config_.codec_settings->height + 1) / 2; + int size_uv = half_width * half_height; + source_frame_.CreateFrame(size_y, source_buffer_, + size_uv, source_buffer_ + size_y, + size_uv, source_buffer_ + size_y + size_uv, + config_.codec_settings->width, + config_.codec_settings->height, + config_.codec_settings->width, + half_width, half_width); // Ensure we have a new statistics data object we can fill: FrameStatistic& stat = stats_->NewFrame(frame_number); encode_start_ = TickTime::Now(); // Use the frame number as "timestamp" to identify frames - source_frame_.SetTimeStamp(frame_number); + source_frame_.set_timestamp(frame_number); // Decide if we're going to force a keyframe: std::vector frame_types(1, kDeltaFrame); @@ -273,9 +275,9 @@ void VideoProcessorImpl::FrameEncoded(EncodedImage* encoded_image) { last_frame_missing_ = encoded_image->_length == 0; } -void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) { +void VideoProcessorImpl::FrameDecoded(const I420VideoFrame& image) { TickTime decode_stop = TickTime::Now(); - int frame_number = image.TimeStamp(); + int frame_number = image.timestamp(); // Report stats FrameStatistic& stat = stats_->stats_[frame_number]; stat.decode_time_in_us = GetElapsedTimeMicroseconds(decode_start_, @@ -283,18 +285,18 @@ void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) { stat.decoding_successful = true; // Check for resize action (either down or up): - if (static_cast(image.Width()) != last_encoder_frame_width_ || - static_cast(image.Height()) != last_encoder_frame_height_ ) { + if (static_cast(image.width()) != last_encoder_frame_width_ || + static_cast(image.height()) != last_encoder_frame_height_ ) { ++num_spatial_resizes_; - last_encoder_frame_width_ = image.Width(); - last_encoder_frame_height_ = image.Height(); + last_encoder_frame_width_ = image.width(); + last_encoder_frame_height_ = image.height(); } // Check if codec size is different from native/original size, and if so, // upsample back to original size: needed for PSNR and SSIM computations. - if (image.Width() != config_.codec_settings->width || - image.Height() != config_.codec_settings->height) { - VideoFrame up_image; - int ret_val = scaler_.Set(image.Width(), image.Height(), + if (image.width() != config_.codec_settings->width || + image.height() != config_.codec_settings->height) { + I420VideoFrame up_image; + int ret_val = scaler_.Set(image.width(), image.height(), config_.codec_settings->width, config_.codec_settings->height, kI420, kI420, kScaleBilinear); @@ -309,20 +311,27 @@ void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) { fprintf(stderr, "Failed to scale frame: %d, return code: %d\n", frame_number, ret_val); } + // TODO(mikhal): Extracting the buffer for now - need to update test. + int length = CalcBufferSize(kI420, up_image.width(), up_image.height()); + scoped_array image_buffer(new uint8_t[length]); + length = ExtractBuffer(up_image, length, image_buffer.get()); // Update our copy of the last successful frame: - memcpy(last_successful_frame_buffer_, up_image.Buffer(), up_image.Length()); - - bool write_success = frame_writer_->WriteFrame(up_image.Buffer()); + memcpy(last_successful_frame_buffer_, image_buffer.get(), length); + bool write_success = frame_writer_->WriteFrame(image_buffer.get()); assert(write_success); if (!write_success) { fprintf(stderr, "Failed to write frame %d to disk!", frame_number); } - up_image.Free(); } else { // No resize. // Update our copy of the last successful frame: - memcpy(last_successful_frame_buffer_, image.Buffer(), image.Length()); + // TODO(mikhal): Add as a member function, so won't be allocated per frame. + int length = CalcBufferSize(kI420,image.width(), image.height()); + scoped_array image_buffer(new uint8_t[length]); + length = ExtractBuffer(image, length, image_buffer.get()); + assert(length > 0); + memcpy(last_successful_frame_buffer_, image_buffer.get(), length); - bool write_success = frame_writer_->WriteFrame(image.Buffer()); + bool write_success = frame_writer_->WriteFrame(image_buffer.get()); assert(write_success); if (!write_success) { fprintf(stderr, "Failed to write frame %d to disk!", frame_number); @@ -379,7 +388,7 @@ VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded( } WebRtc_Word32 VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded( - VideoFrame& image) { + I420VideoFrame& image) { video_processor_->FrameDecoded(image); // forward to parent class return 0; } diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor.h b/webrtc/modules/video_coding/codecs/test/videoprocessor.h index a8f9228d4..ed3e17306 100644 --- a/webrtc/modules/video_coding/codecs/test/videoprocessor.h +++ b/webrtc/modules/video_coding/codecs/test/videoprocessor.h @@ -15,7 +15,7 @@ #include "common_video/libyuv/include/webrtc_libyuv.h" #include "common_video/libyuv/include/scaler.h" -#include "modules/interface/module_common_types.h" +#include "common_video/interface/i420_video_frame.h" #include "modules/video_coding/codecs/interface/video_codec_interface.h" #include "modules/video_coding/codecs/test/packet_manipulator.h" #include "modules/video_coding/codecs/test/stats.h" @@ -175,7 +175,7 @@ class VideoProcessorImpl : public VideoProcessor { // Invoked by the callback when a frame has completed encoding. void FrameEncoded(webrtc::EncodedImage* encodedImage); // Invoked by the callback when a frame has completed decoding. - void FrameDecoded(const webrtc::VideoFrame& image); + void FrameDecoded(const webrtc::I420VideoFrame& image); // Used for getting a 32-bit integer representing time // (checks the size is within signed 32-bit bounds before casting it) int GetElapsedTimeMicroseconds(const webrtc::TickTime& start, @@ -204,7 +204,7 @@ class VideoProcessorImpl : public VideoProcessor { // Keep track of the last successful frame, since we need to write that // when decoding fails: WebRtc_UWord8* last_successful_frame_buffer_; - webrtc::VideoFrame source_frame_; + webrtc::I420VideoFrame source_frame_; // To keep track of if we have excluded the first key frame from packet loss: bool first_key_frame_has_been_excluded_; // To tell the decoder previous frame have been dropped due to packet loss: @@ -247,7 +247,7 @@ class VideoProcessorImpl : public VideoProcessor { explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp) : video_processor_(vp) { } - WebRtc_Word32 Decoded(webrtc::VideoFrame& image); + WebRtc_Word32 Decoded(webrtc::I420VideoFrame& image); private: VideoProcessorImpl* video_processor_; diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc b/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc index 0d7a06c1c..56c783962 100644 --- a/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc +++ b/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc @@ -156,7 +156,8 @@ class VideoProcessorIntegrationTest: public testing::Test { webrtc::test::ResourcePath("foreman_cif", "yuv"); config_.output_filename = webrtc::test::OutputPath() + "foreman_cif_short_video_codecs_test_framework_integrationtests.yuv"; - config_.frame_length_in_bytes = 3 * kCIFWidth * kCIFHeight / 2; + config_.frame_length_in_bytes = CalcBufferSize(kI420, + kCIFWidth, kCIFHeight); config_.verbose = false; // Only allow encoder/decoder to use single core, for predictability. config_.use_single_core = true; diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc b/webrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc index d51ef6bd9..d0cd2c9dc 100644 --- a/webrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc +++ b/webrtc/modules/video_coding/codecs/test/videoprocessor_unittest.cc @@ -64,7 +64,7 @@ class VideoProcessorTest: public testing::Test { EXPECT_CALL(frame_reader_mock_, NumberOfFrames()) .WillOnce(Return(1)); EXPECT_CALL(frame_reader_mock_, FrameLength()) - .WillOnce(Return(150000)); + .WillOnce(Return(152064)); } }; diff --git a/webrtc/modules/video_coding/codecs/test_framework/benchmark.cc b/webrtc/modules/video_coding/codecs/test_framework/benchmark.cc index 8b5fe0abe..1d714e6e0 100644 --- a/webrtc/modules/video_coding/codecs/test_framework/benchmark.cc +++ b/webrtc/modules/video_coding/codecs/test_framework/benchmark.cc @@ -230,9 +230,6 @@ Benchmark::PerformNormalTest() CodecSettings(_target->GetWidth(), _target->GetHeight(), _target->GetFrameRate(), _bitRate); Setup(); EventWrapper* waitEvent = EventWrapper::Create(); - - _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); - _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); _encoder->InitEncode(&_inst, 4, 1440); CodecSpecific_InitBitrate(); _decoder->InitDecode(&_inst,1); @@ -282,9 +279,7 @@ Benchmark::PerformNormalTest() waitEvent->Wait(5); } - _inputVideoBuffer.Free(); _encodedVideoBuffer.Free(); - _decodedVideoBuffer.Free(); _encoder->Release(); _decoder->Release(); diff --git a/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc b/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc index 6a53bc807..67cd02c45 100644 --- a/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc +++ b/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc @@ -16,6 +16,7 @@ #include #include +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "gtest/gtest.h" #include "tick_util.h" #include "testsupport/fileutils.h" @@ -262,16 +263,13 @@ WebRtc_UWord32 VideoDecodeCompleteCallback::DecodedBytes() } WebRtc_Word32 -VideoDecodeCompleteCallback::Decoded(VideoFrame& image) +VideoDecodeCompleteCallback::Decoded(I420VideoFrame& image) { _test.Decoded(image); - _decodedBytes += image.Length(); + _decodedBytes += CalcBufferSize(kI420, image.width(), image.height()); if (_decodedFile != NULL) { - if (fwrite(image.Buffer(), 1, image.Length(), - _decodedFile) != image.Length()) { - return -1; - } + return PrintI420VideoFrame(image, _decodedFile); } return 0; } @@ -300,14 +298,14 @@ NormalAsyncTest::Encoded(const EncodedImage& encodedImage) } void -NormalAsyncTest::Decoded(const VideoFrame& decodedImage) +NormalAsyncTest::Decoded(const I420VideoFrame& decodedImage) { _decodeCompleteTime = tGetTime(); _decFrameCnt++; _totalDecodePipeTime += _decodeCompleteTime - - _decodeTimes[decodedImage.TimeStamp()]; - _decodedWidth = decodedImage.Width(); - _decodedHeight = decodedImage.Height(); + _decodeTimes[decodedImage.timestamp()]; + _decodedWidth = decodedImage.width(); + _decodedHeight = decodedImage.height(); } void @@ -316,8 +314,6 @@ NormalAsyncTest::Perform() _inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv"; CodecSettings(352, 288, 30, _bitRate); Setup(); - _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); - _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); if(_encoder->InitEncode(&_inst, 1, 1440) < 0) { exit(EXIT_FAILURE); @@ -410,17 +406,19 @@ NormalAsyncTest::Encode() { _lengthEncFrame = 0; EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile), 0u); - _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer); - _inputVideoBuffer.SetTimeStamp((unsigned int) + _inputVideoBuffer.CreateFrame(_sizeY, _sourceBuffer, + _sizeUv, _sourceBuffer + _sizeY, + _sizeUv, _sourceBuffer + _sizeY + _sizeUv, + _width, _height, + _width, _halfWidth, _halfWidth); + _inputVideoBuffer.set_timestamp((unsigned int) (_encFrameCnt * 9e4 / _inst.maxFramerate)); - _inputVideoBuffer.SetWidth(_inst.width); - _inputVideoBuffer.SetHeight(_inst.height); if (feof(_sourceFile) != 0) { return true; } _encodeCompleteTime = 0; - _encodeTimes[_inputVideoBuffer.TimeStamp()] = tGetTime(); + _encodeTimes[_inputVideoBuffer.timestamp()] = tGetTime(); std::vector frame_types(1, kDeltaFrame); // check SLI queue @@ -474,12 +472,12 @@ NormalAsyncTest::Encode() if (_encodeCompleteTime > 0) { _totalEncodeTime += _encodeCompleteTime - - _encodeTimes[_inputVideoBuffer.TimeStamp()]; + _encodeTimes[_inputVideoBuffer.timestamp()]; } else { _totalEncodeTime += tGetTime() - - _encodeTimes[_inputVideoBuffer.TimeStamp()]; + _encodeTimes[_inputVideoBuffer.timestamp()]; } assert(ret >= 0); return false; diff --git a/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h b/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h index e03f7bfe6..76ec5283f 100644 --- a/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h +++ b/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h @@ -80,7 +80,7 @@ public: virtual ~NormalAsyncTest() {}; virtual void Perform(); virtual void Encoded(const webrtc::EncodedImage& encodedImage); - virtual void Decoded(const webrtc::VideoFrame& decodedImage); + virtual void Decoded(const webrtc::I420VideoFrame& decodedImage); virtual webrtc::CodecSpecificInfo* CopyCodecSpecificInfo( const webrtc::CodecSpecificInfo* codecSpecificInfo) const; @@ -172,7 +172,7 @@ public: _decodedBytes(0) {} - virtual WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage); + virtual WebRtc_Word32 Decoded(webrtc::I420VideoFrame& decodedImage); virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId); virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId); diff --git a/webrtc/modules/video_coding/codecs/test_framework/normal_test.cc b/webrtc/modules/video_coding/codecs/test_framework/normal_test.cc index 97f3f88dd..b341f356f 100644 --- a/webrtc/modules/video_coding/codecs/test_framework/normal_test.cc +++ b/webrtc/modules/video_coding/codecs/test_framework/normal_test.cc @@ -14,6 +14,7 @@ #include #include +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "gtest/gtest.h" #include "testsupport/fileutils.h" @@ -22,7 +23,13 @@ NormalTest::NormalTest() CodecTest("Normal Test 1", "A test of normal execution of the codec"), _testNo(1), _lengthEncFrame(0), -_appendNext(false) +_appendNext(false), +_width(0), +_halfWidth(0), +_height(0), +_halfHeight(0), +_sizeY(0), +_sizeUv(0) { } @@ -33,7 +40,13 @@ CodecTest(name, description), _requestKeyFrame(false), _testNo(testNo), _lengthEncFrame(0), -_appendNext(false) +_appendNext(false), +_width(0), +_halfWidth(0), +_height(0), +_halfHeight(0), +_sizeY(0), +_sizeUv(0) { } @@ -108,12 +121,22 @@ NormalTest::Teardown() void NormalTest::Perform() { + _width = 352; + _halfWidth = (_width + 1) / 2; + _height = 288; + _halfHeight = (_height + 1) / 2; + _sizeY = _width * _height; + _sizeUv = _halfWidth * _halfHeight; _inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv"; - CodecSettings(352, 288, 30, _bitRate); + CodecSettings(_width, _height, 30, _bitRate); Setup(); - _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); - _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); + _inputVideoBuffer.CreateEmptyFrame(_width, _height, + _width, _halfWidth, _halfWidth); + _inputVideoBuffer.CreateEmptyFrame(_width, _height, + _width, _halfWidth, _halfWidth); + _decodedVideoBuffer.CreateEmptyFrame(_width, _height, + _width, _halfWidth, _halfWidth); _encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); _encoder->InitEncode(&_inst, 1, 1460); @@ -140,8 +163,7 @@ NormalTest::Perform() fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength); exit(EXIT_FAILURE); } - if (fwrite(_decodedVideoBuffer.Buffer(), 1, decodeLength, - _decodedFile) != static_cast(decodeLength)) { + if (PrintI420VideoFrame(_decodedVideoBuffer, _decodedFile) < 0) { return; } CodecSpecific_InitBitrate(); @@ -157,8 +179,7 @@ NormalTest::Perform() fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength); exit(EXIT_FAILURE); } - if (fwrite(_decodedVideoBuffer.Buffer(), 1, decodeLength, - _decodedFile) != static_cast(decodeLength)) { + if (PrintI420VideoFrame(_decodedVideoBuffer, _decodedFile) < 0) { return; } } @@ -173,8 +194,6 @@ NormalTest::Perform() (*_log) << "Average encode time: " << avgEncTime << " s" << std::endl; (*_log) << "Average decode time: " << avgDecTime << " s" << std::endl; - _inputVideoBuffer.Free(); - _encoder->Release(); _decoder->Release(); @@ -190,8 +209,13 @@ NormalTest::Encode() { return true; } - _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer); - _inputVideoBuffer.SetTimeStamp(_framecnt); + _inputVideoBuffer.CreateFrame(_sizeY, _sourceBuffer, + _sizeUv, _sourceBuffer + _sizeY, + _sizeUv, _sourceBuffer + _sizeY + + _sizeUv, + _width, _height, + _width, _halfWidth, _halfWidth); + _inputVideoBuffer.set_timestamp(_framecnt); // This multiple attempt ridiculousness is to accomodate VP7: // 1. The wrapper can unilaterally reduce the framerate for low bitrates. @@ -204,8 +228,8 @@ NormalTest::Encode() { starttime = clock()/(double)CLOCKS_PER_SEC; - _inputVideoBuffer.SetWidth(_inst.width); - _inputVideoBuffer.SetHeight(_inst.height); + _inputVideoBuffer.set_width(_inst.width); + _inputVideoBuffer.set_height(_inst.height); //_lengthEncFrame = _encoder->Encode(_inputVideoBuffer, _encodedVideoBuffer, _frameInfo, // _inst.frameRate, _requestKeyFrame && !(_framecnt%50)); diff --git a/webrtc/modules/video_coding/codecs/test_framework/normal_test.h b/webrtc/modules/video_coding/codecs/test_framework/normal_test.h index 061fb6d96..ca3aba301 100644 --- a/webrtc/modules/video_coding/codecs/test_framework/normal_test.h +++ b/webrtc/modules/video_coding/codecs/test_framework/normal_test.h @@ -40,6 +40,12 @@ protected: unsigned int _testNo; int _lengthEncFrame; bool _appendNext; + int _width; + int _halfWidth; + int _height; + int _halfHeight; + int _sizeY; + int _sizeUv; }; #endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_TEST_H_ diff --git a/webrtc/modules/video_coding/codecs/test_framework/packet_loss_test.cc b/webrtc/modules/video_coding/codecs/test_framework/packet_loss_test.cc index a9d4b37cf..674072a57 100644 --- a/webrtc/modules/video_coding/codecs/test_framework/packet_loss_test.cc +++ b/webrtc/modules/video_coding/codecs/test_framework/packet_loss_test.cc @@ -64,11 +64,11 @@ PacketLossTest::Encoded(const EncodedImage& encodedImage) } void -PacketLossTest::Decoded(const VideoFrame& decodedImage) +PacketLossTest::Decoded(const I420VideoFrame& decodedImage) { // check the frame queue if any frames have gone missing assert(!_frameQueue.empty()); // decoded frame is not in the queue - while(_frameQueue.front() < decodedImage.TimeStamp()) + while(_frameQueue.front() < decodedImage.timestamp()) { // this frame is missing // write previous decoded frame again (frame freeze) @@ -84,20 +84,23 @@ PacketLossTest::Decoded(const VideoFrame& decodedImage) _frameQueue.pop_front(); } // Decoded frame is not in the queue. - assert(_frameQueue.front() == decodedImage.TimeStamp()); + assert(_frameQueue.front() == decodedImage.timestamp()); // pop the current frame _frameQueue.pop_front(); // save image for future freeze-frame - if (_lastFrameLength < decodedImage.Length()) + unsigned int length = CalcBufferSize(kI420, decodedImage.width(), + decodedImage.height()); + if (_lastFrameLength < length) { if (_lastFrame) delete [] _lastFrame; - _lastFrame = new WebRtc_UWord8[decodedImage.Length()]; + _lastFrame = new WebRtc_UWord8[length]; } - memcpy(_lastFrame, decodedImage.Buffer(), decodedImage.Length()); - _lastFrameLength = decodedImage.Length(); + // TODO(mikhal): Can't the last frame be a I420VideoFrame? + ExtractBuffer(decodedImage, length, _lastFrame); + _lastFrameLength = length; NormalAsyncTest::Decoded(decodedImage); } diff --git a/webrtc/modules/video_coding/codecs/test_framework/packet_loss_test.h b/webrtc/modules/video_coding/codecs/test_framework/packet_loss_test.h index 1702f506b..e87dbc06e 100644 --- a/webrtc/modules/video_coding/codecs/test_framework/packet_loss_test.h +++ b/webrtc/modules/video_coding/codecs/test_framework/packet_loss_test.h @@ -21,7 +21,7 @@ public: PacketLossTest(); virtual ~PacketLossTest() {if(_lastFrame) {delete [] _lastFrame; _lastFrame = NULL;}} virtual void Encoded(const webrtc::EncodedImage& encodedImage); - virtual void Decoded(const webrtc::VideoFrame& decodedImage); + virtual void Decoded(const webrtc::I420VideoFrame& decodedImage); protected: PacketLossTest(std::string name, std::string description); PacketLossTest(std::string name, diff --git a/webrtc/modules/video_coding/codecs/test_framework/test.h b/webrtc/modules/video_coding/codecs/test_framework/test.h index 27207e01c..b0a639388 100644 --- a/webrtc/modules/video_coding/codecs/test_framework/test.h +++ b/webrtc/modules/video_coding/codecs/test_framework/test.h @@ -49,11 +49,11 @@ protected: WebRtc_UWord32 _bitRate; unsigned int _lengthSourceFrame; unsigned char* _sourceBuffer; - webrtc::VideoFrame _inputVideoBuffer; + webrtc::I420VideoFrame _inputVideoBuffer; // TODO(mikhal): For now using VideoFrame for encodedBuffer, should use a // designated class. webrtc::VideoFrame _encodedVideoBuffer; - webrtc::VideoFrame _decodedVideoBuffer; + webrtc::I420VideoFrame _decodedVideoBuffer; webrtc::VideoCodec _inst; std::fstream* _log; std::string _inname; diff --git a/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc b/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc index 8d38593c1..5af0f2fe2 100644 --- a/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc +++ b/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc @@ -98,7 +98,8 @@ UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage, _encodedVideoBuffer->VerifyAndAllocate(encodedImage._size); _encodedVideoBuffer->CopyFrame(encodedImage._size, encodedImage._buffer); _encodedVideoBuffer->SetLength(encodedImage._length); -// _encodedVideoBuffer->SetFrameType(encodedImage._frameType); + // TODO(mikhal): Update frame type API. + // _encodedVideoBuffer->SetFrameType(encodedImage._frameType); _encodedVideoBuffer->SetWidth( (WebRtc_UWord16)encodedImage._encodedWidth); _encodedVideoBuffer->SetHeight( @@ -109,12 +110,9 @@ UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage, return 0; } -WebRtc_Word32 UnitTestDecodeCompleteCallback::Decoded(VideoFrame& image) +WebRtc_Word32 UnitTestDecodeCompleteCallback::Decoded(I420VideoFrame& image) { - _decodedVideoBuffer->CopyFrame(image.Length(), image.Buffer()); - _decodedVideoBuffer->SetWidth(image.Width()); - _decodedVideoBuffer->SetHeight(image.Height()); - _decodedVideoBuffer->SetTimeStamp(image.TimeStamp()); + _decodedVideoBuffer->CopyFrame(image); _decodeComplete = true; return 0; } @@ -155,7 +153,7 @@ UnitTest::WaitForEncodedFrame() const { if (_encodeCompleteCallback->EncodeComplete()) { - return _encodedVideoBuffer.Length(); + return _encodedVideoBuffer.Length(); } } return 0; @@ -169,7 +167,8 @@ UnitTest::WaitForDecodedFrame() const { if (_decodeCompleteCallback->DecodeComplete()) { - return _decodedVideoBuffer.Length(); + return webrtc::CalcBufferSize(kI420, _decodedVideoBuffer.width(), + _decodedVideoBuffer.height()); } } return 0; @@ -224,12 +223,16 @@ UnitTest::Setup() _inst.codecSpecific.VP8.denoisingOn = true; // Get input frame. - _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame, _sourceFile) == _lengthSourceFrame); - _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame); - _inputVideoBuffer.SetWidth(_source->GetWidth()); - _inputVideoBuffer.SetHeight(_source->GetHeight()); + int size_y = _inst.width * _inst.height; + int size_uv = ((_inst.width + 1) / 2) * ((_inst.height + 1) / 2); + _inputVideoBuffer.CreateFrame(size_y, _refFrame, + size_uv, _refFrame + size_y, + size_uv, _refFrame + size_y + size_uv, + _inst.width, _inst.height, + _inst.width, + (_inst.width + 1) / 2, (_inst.width + 1) / 2); rewind(_sourceFile); // Get a reference encoded frame. @@ -244,7 +247,9 @@ UnitTest::Setup() memcpy(_refEncFrame, _encodedVideoBuffer.Buffer(), _refEncFrameLength); // Get a reference decoded frame. - _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); + _decodedVideoBuffer.CreateEmptyFrame(_inst.width, _inst.height, _inst.width, + (_inst.width + 1) / 2, + (_inst.width + 1) / 2); EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK); ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK); @@ -255,12 +260,15 @@ UnitTest::Setup() if (i > 0) { // Insert yet another frame - _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame, _sourceFile) == _lengthSourceFrame); - _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame); - _inputVideoBuffer.SetWidth(_source->GetWidth()); - _inputVideoBuffer.SetHeight(_source->GetHeight()); + _inputVideoBuffer.CreateFrame(size_y, _refFrame, + size_uv, _refFrame + size_y, + size_uv, _refFrame + size_y + size_uv, + _inst.width, _inst.height, + _inst.width, + (_inst.width + 1) / 2, + (_inst.width + 1) / 2); _encoder->Encode(_inputVideoBuffer, NULL, NULL); ASSERT_TRUE(WaitForEncodedFrame() > 0); } @@ -274,7 +282,7 @@ UnitTest::Setup() } rewind(_sourceFile); EXPECT_TRUE(frameLength == _lengthSourceFrame); - memcpy(_refDecFrame, _decodedVideoBuffer.Buffer(), _lengthSourceFrame); + ExtractBuffer(_decodedVideoBuffer, _lengthSourceFrame, _refDecFrame); } void @@ -342,7 +350,7 @@ UnitTest::Perform() { UnitTest::Setup(); int frameLength; - VideoFrame inputImage; + I420VideoFrame inputImage; EncodedImage encodedImage; //----- Encoder parameter tests ----- @@ -409,17 +417,20 @@ UnitTest::Perform() EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK); //-- Encode() errors -- - - // inputVideoBuffer unallocated. - _inputVideoBuffer.Free(); - inputImage.Free(); + inputImage.ResetSize(); EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) == WEBRTC_VIDEO_CODEC_ERR_PARAMETER); - _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); - _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame); - _inputVideoBuffer.SetWidth(_source->GetWidth()); - _inputVideoBuffer.SetHeight(_source->GetHeight()); - + int width = _source->GetWidth(); + int half_width = (width + 1) / 2; + int height = _source->GetHeight(); + int half_height = (height + 1) / 2; + int size_y = width * height; + int size_uv = half_width * half_height; + _inputVideoBuffer.CreateFrame(size_y, _refFrame, + size_uv, _refFrame + size_y, + size_uv, _refFrame + size_y + size_uv, + width, height, + width, half_width, half_width); //----- Encoder stress tests ----- // Vary frame rate and I-frame request. @@ -539,8 +550,12 @@ UnitTest::Perform() _decoder->Decode(encodedImage, false, NULL); frameLength = WaitForDecodedFrame(); } - EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength, - _refDecFrame, _lengthSourceFrame) == true); + unsigned int length = CalcBufferSize(kI420, width, height); + scoped_array decoded_buffer(new uint8_t[length]); + ExtractBuffer(_decodedVideoBuffer, _lengthSourceFrame, + decoded_buffer.get()); + EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), frameLength, _refDecFrame, + _lengthSourceFrame) == true); // Reset then decode. EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK); @@ -551,8 +566,10 @@ UnitTest::Perform() _decoder->Decode(encodedImage, false, NULL); frameLength = WaitForDecodedFrame(); } - EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength, - _refDecFrame, _lengthSourceFrame) == true); + ExtractBuffer(_decodedVideoBuffer, _lengthSourceFrame, + decoded_buffer.get()); + EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), frameLength, + _refDecFrame, _lengthSourceFrame) == true); // Decode with other size, reset, then decode with original size again // to verify that decoder is reset to a "fresh" state upon Reset(). @@ -565,20 +582,25 @@ UnitTest::Perform() memcpy(&tempInst, &_inst, sizeof(VideoCodec)); tempInst.width /= 2; tempInst.height /= 2; + int tmpHalfWidth = (tempInst.width + 1) / 2; + int tmpHalfHeight = (tempInst.height + 1) / 2; + + int tmpSizeY = tempInst.width * tempInst.height; + int tmpSizeUv = tmpHalfWidth * tmpHalfHeight; // Encode reduced (quarter) frame size. EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK); EXPECT_TRUE(_encoder->InitEncode(&tempInst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK); - VideoFrame tempInput; - unsigned int tmpLength = _inputVideoBuffer.Length() / 4; - tempInput.CopyFrame(tmpLength, _inputVideoBuffer.Buffer()); - tempInput.SetWidth(tempInst.width); - tempInput.SetHeight(tempInst.height); + webrtc::I420VideoFrame tempInput; + tempInput.CreateFrame(tmpSizeY, _inputVideoBuffer.buffer(kYPlane), + tmpSizeUv, _inputVideoBuffer.buffer(kUPlane), + tmpSizeUv, _inputVideoBuffer.buffer(kVPlane), + tempInst.width, tempInst.height, + tempInst.width, tmpHalfWidth, tmpHalfWidth); _encoder->Encode(tempInput, NULL, NULL); frameLength = WaitForEncodedFrame(); EXPECT_TRUE(frameLength > 0); - tempInput.Free(); // Reset then decode. EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK); frameLength = 0; @@ -608,9 +630,11 @@ UnitTest::Perform() } // check that decoded frame matches with reference - EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength, - _refDecFrame, _lengthSourceFrame) == true); - + unsigned int length = CalcBufferSize(kI420, width, height); + scoped_array decoded_buffer(new uint8_t[length]); + ExtractBuffer(_decodedVideoBuffer, length, decoded_buffer.get()); + EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), length, + _refDecFrame, _lengthSourceFrame) == true); } // Release then decode. @@ -624,8 +648,9 @@ UnitTest::Perform() _decoder->Decode(encodedImage, false, NULL); frameLength = WaitForDecodedFrame(); } - EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength, - _refDecFrame, _lengthSourceFrame) == true); + ExtractBuffer(_decodedVideoBuffer, length, decoded_buffer.get()); + EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), frameLength, + _refDecFrame, _lengthSourceFrame) == true); _encodedVideoBuffer.SetLength(0); delete [] tmpBuf; @@ -644,19 +669,24 @@ UnitTest::Perform() frames = 0; int frameDelay = 0; int encTimeStamp; - _decodedVideoBuffer.SetTimeStamp(0); + _decodedVideoBuffer.set_timestamp(0); while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) == _lengthSourceFrame) { - _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer); - _inputVideoBuffer.SetTimeStamp(frames); + _inputVideoBuffer.CreateFrame(size_y, _sourceBuffer, + size_uv, _sourceBuffer + size_y, + size_uv, _sourceBuffer + size_y + size_uv, + width, height, + width, half_width, half_width); + + _inputVideoBuffer.set_timestamp(frames); ASSERT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, NULL) == WEBRTC_VIDEO_CODEC_OK); frameLength = WaitForEncodedFrame(); //ASSERT_TRUE(frameLength); EXPECT_TRUE(frameLength > 0); encTimeStamp = _encodedVideoBuffer.TimeStamp(); - EXPECT_TRUE(_inputVideoBuffer.TimeStamp() == + EXPECT_TRUE(_inputVideoBuffer.timestamp() == static_cast(encTimeStamp)); frameLength = Decode(); @@ -670,7 +700,7 @@ UnitTest::Perform() { encTimeStamp = 0; } - EXPECT_TRUE(_decodedVideoBuffer.TimeStamp() == + EXPECT_TRUE(_decodedVideoBuffer.timestamp() == static_cast(encTimeStamp)); frames++; } @@ -678,7 +708,6 @@ UnitTest::Perform() rewind(_sourceFile); RateControlTests(); - inputImage.Free(); Teardown(); } @@ -719,13 +748,22 @@ UnitTest::RateControlTests() { CodecSpecific_SetBitrate(_bitRate, _inst.maxFramerate); } - + int width = _source->GetWidth(); + int half_width = (width + 1) / 2; + int height = _source->GetHeight(); + int half_height = (height + 1) / 2; + int size_y = width * height; + int size_uv = half_width * half_height; while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) == _lengthSourceFrame) { - _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer); - _inputVideoBuffer.SetTimeStamp(_inputVideoBuffer.TimeStamp() + - static_cast(9e4 / + _inputVideoBuffer.CreateFrame(size_y, _sourceBuffer, + size_uv, _sourceBuffer + size_y, + size_uv, _sourceBuffer + size_y + + size_uv, + width, height, + width, half_width, half_width); + _inputVideoBuffer.set_timestamp(static_cast(9e4 / static_cast(_inst.maxFramerate))); ASSERT_EQ(_encoder->Encode(_inputVideoBuffer, NULL, NULL), WEBRTC_VIDEO_CODEC_OK); diff --git a/webrtc/modules/video_coding/codecs/test_framework/unit_test.h b/webrtc/modules/video_coding/codecs/test_framework/unit_test.h index 2ff895952..31b0af1cf 100644 --- a/webrtc/modules/video_coding/codecs/test_framework/unit_test.h +++ b/webrtc/modules/video_coding/codecs/test_framework/unit_test.h @@ -94,12 +94,12 @@ private: class UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback { public: - UnitTestDecodeCompleteCallback(webrtc::VideoFrame* buffer) : + UnitTestDecodeCompleteCallback(webrtc::I420VideoFrame* buffer) : _decodedVideoBuffer(buffer), _decodeComplete(false) {} - WebRtc_Word32 Decoded(webrtc::VideoFrame& image); + WebRtc_Word32 Decoded(webrtc::I420VideoFrame& image); bool DecodeComplete(); private: - webrtc::VideoFrame* _decodedVideoBuffer; + webrtc::I420VideoFrame* _decodedVideoBuffer; bool _decodeComplete; }; diff --git a/webrtc/modules/video_coding/codecs/vp8/test/dual_decoder_test.cc b/webrtc/modules/video_coding/codecs/vp8/test/dual_decoder_test.cc index 79247597b..29ab3dee5 100644 --- a/webrtc/modules/video_coding/codecs/vp8/test/dual_decoder_test.cc +++ b/webrtc/modules/video_coding/codecs/vp8/test/dual_decoder_test.cc @@ -36,8 +36,6 @@ VP8DualDecoderTest::~VP8DualDecoderTest() _decoder2->Release(); delete _decoder2; } - - _decodedVideoBuffer2.Free(); } void @@ -46,9 +44,6 @@ VP8DualDecoderTest::Perform() _inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv"; CodecSettings(352, 288, 30, _bitRate); Setup(); - _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); - _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); - _decodedVideoBuffer2.VerifyAndAllocate(_lengthSourceFrame); if(_encoder->InitEncode(&_inst, 4, 1460) < 0) { exit(EXIT_FAILURE); @@ -171,9 +166,7 @@ VP8DualDecoderTest::Decode(int lossValue) } // compare decoded images - if (!CheckIfBitExact(_decodedVideoBuffer.Buffer(), - _decodedVideoBuffer.Length(), - _decodedVideoBuffer2.Buffer(), _decodedVideoBuffer.Length())) + if (!CheckIfBitExactFrames(_decodedVideoBuffer, _decodedVideoBuffer2)) { fprintf(stderr,"\n\nClone output different from master.\n\n"); exit(EXIT_FAILURE); @@ -185,26 +178,10 @@ VP8DualDecoderTest::Decode(int lossValue) return ret; } - -bool -VP8DualDecoderTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes, - const void* ptrB, unsigned int bLengthBytes) +WebRtc_Word32 DualDecoderCompleteCallback::Decoded(webrtc::I420VideoFrame& + image) { - if (aLengthBytes != bLengthBytes) - { - return false; - } - - return memcmp(ptrA, ptrB, aLengthBytes) == 0; -} - -WebRtc_Word32 DualDecoderCompleteCallback::Decoded(webrtc::VideoFrame& image) -{ - _decodedVideoBuffer->VerifyAndAllocate(image.Length()); - _decodedVideoBuffer->CopyFrame(image.Length(), image.Buffer()); - _decodedVideoBuffer->SetWidth(image.Width()); - _decodedVideoBuffer->SetHeight(image.Height()); - _decodedVideoBuffer->SetTimeStamp(image.TimeStamp()); + _decodedVideoBuffer->CopyFrame(image); _decodeComplete = true; return 0; } @@ -219,3 +196,20 @@ bool DualDecoderCompleteCallback::DecodeComplete() return false; } +bool +VP8DualDecoderTest::CheckIfBitExactFrames(const webrtc::I420VideoFrame& frame1, + const webrtc::I420VideoFrame& frame2) { + for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) { + webrtc::PlaneType plane_type = static_cast(plane); + int allocated_size1 = frame1.allocated_size(plane_type); + int allocated_size2 = frame2.allocated_size(plane_type); + if (allocated_size1 != allocated_size2) + return false; + const uint8_t* plane_buffer1 = frame1.buffer(plane_type); + const uint8_t* plane_buffer2 = frame2.buffer(plane_type); + if (memcmp(plane_buffer1, plane_buffer2, allocated_size1) != 0) + return false; + } + return true; +} + diff --git a/webrtc/modules/video_coding/codecs/vp8/test/dual_decoder_test.h b/webrtc/modules/video_coding/codecs/vp8/test/dual_decoder_test.h index e4e17e578..744ff854b 100644 --- a/webrtc/modules/video_coding/codecs/vp8/test/dual_decoder_test.h +++ b/webrtc/modules/video_coding/codecs/vp8/test/dual_decoder_test.h @@ -30,21 +30,21 @@ protected: virtual int Decode(int lossValue = 0); webrtc::VP8Decoder* _decoder2; - webrtc::VideoFrame _decodedVideoBuffer2; - static bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes, - const void *ptrB, unsigned int bLengthBytes); + webrtc::I420VideoFrame _decodedVideoBuffer2; + static bool CheckIfBitExactFrames(const webrtc::I420VideoFrame& frame1, + const webrtc::I420VideoFrame& frame2); private: }; class DualDecoderCompleteCallback : public webrtc::DecodedImageCallback { public: - DualDecoderCompleteCallback(webrtc::VideoFrame* buffer) + DualDecoderCompleteCallback(webrtc::I420VideoFrame* buffer) : _decodedVideoBuffer(buffer), _decodeComplete(false) {} - WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage); + WebRtc_Word32 Decoded(webrtc::I420VideoFrame& decodedImage); bool DecodeComplete(); private: - webrtc::VideoFrame* _decodedVideoBuffer; + webrtc::I420VideoFrame* _decodedVideoBuffer; bool _decodeComplete; }; diff --git a/webrtc/modules/video_coding/codecs/vp8/test/rps_test.cc b/webrtc/modules/video_coding/codecs/vp8/test/rps_test.cc index 0ca02b72f..c3213c07e 100644 --- a/webrtc/modules/video_coding/codecs/vp8/test/rps_test.cc +++ b/webrtc/modules/video_coding/codecs/vp8/test/rps_test.cc @@ -34,16 +34,12 @@ VP8RpsTest::~VP8RpsTest() { decoder2_->Release(); delete decoder2_; } - decoded_frame2_.Free(); } void VP8RpsTest::Perform() { _inname = "test/testFiles/foreman_cif.yuv"; CodecSettings(352, 288, 30, _bitRate); Setup(); - _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); - _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); - decoded_frame2_.VerifyAndAllocate(_lengthSourceFrame); // Enable RPS functionality _inst.codecSpecific.VP8.pictureLossIndicationOn = true; @@ -137,16 +133,22 @@ bool VP8RpsTest::EncodeRps(RpsDecodeCompleteCallback* decodeCallback) { size_t bytes_read = fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile); if (bytes_read < _lengthSourceFrame) return true; - _inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer); - _inputVideoBuffer.SetTimeStamp((unsigned int) - (_encFrameCnt * 9e4 / _inst.maxFramerate)); - _inputVideoBuffer.SetWidth(_inst.width); - _inputVideoBuffer.SetHeight(_inst.height); + int half_width = (_inst.width + 1) / 2; + int half_height = (_inst.height + 1) / 2; + int size_y = _inst.width * _inst.height; + int size_uv = half_width * half_height; + _inputVideoBuffer.CreateFrame(size_y, _sourceBuffer, + size_uv, _sourceBuffer + size_y, + size_uv, _sourceBuffer + size_y + size_uv, + _inst.width, _inst.height, + _inst.width, half_width, half_width); + _inputVideoBuffer.set_timestamp((unsigned int) + (_encFrameCnt * 9e4 / _inst.maxFramerate)); if (feof(_sourceFile) != 0) { return true; } _encodeCompleteTime = 0; - _encodeTimes[_inputVideoBuffer.TimeStamp()] = tGetTime(); + _encodeTimes[_inputVideoBuffer.timestamp()] = tGetTime(); webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo(); codecSpecificInfo->codecSpecific.VP8.pictureIdRPSI = @@ -169,11 +171,11 @@ bool VP8RpsTest::EncodeRps(RpsDecodeCompleteCallback* decodeCallback) { } if (_encodeCompleteTime > 0) { _totalEncodeTime += _encodeCompleteTime - - _encodeTimes[_inputVideoBuffer.TimeStamp()]; + _encodeTimes[_inputVideoBuffer.timestamp()]; } else { _totalEncodeTime += tGetTime() - - _encodeTimes[_inputVideoBuffer.TimeStamp()]; + _encodeTimes[_inputVideoBuffer.timestamp()]; } return false; } @@ -219,9 +221,8 @@ int VP8RpsTest::Decode(int lossValue) { // compare decoded images #if FRAME_LOSS if (!_missingFrames) { - if (!CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), - _decodedVideoBuffer.GetLength(), - decoded_frame2_.GetBuffer(), _decodedVideoBuffer.GetLength())) { + if (!CheckIfBitExactFrames(_decodedVideoBuffer, + decoded_frame2_)) { fprintf(stderr,"\n\nRPS decoder different from master: %u\n\n", _framecnt); return -1; @@ -229,9 +230,7 @@ int VP8RpsTest::Decode(int lossValue) { } #else if (_framecnt > 0 && _framecnt % 10 != 0) { - if (!CheckIfBitExact(_decodedVideoBuffer.Buffer(), - _decodedVideoBuffer.Length(), - decoded_frame2_.Buffer(), _decodedVideoBuffer.Length())) { + if (!CheckIfBitExactFrames(_decodedVideoBuffer, decoded_frame2_)) { fprintf(stderr,"\n\nRPS decoder different from master: %u\n\n", _framecnt); return -1; @@ -247,24 +246,30 @@ int VP8RpsTest::Decode(int lossValue) { return 0; } - bool -VP8RpsTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes, - const void* ptrB, unsigned int bLengthBytes) { - if (aLengthBytes != bLengthBytes) - return false; - return memcmp(ptrA, ptrB, aLengthBytes) == 0; +VP8RpsTest::CheckIfBitExactFrames(const webrtc::I420VideoFrame& frame1, + const webrtc::I420VideoFrame& frame2) { + for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) { + webrtc::PlaneType plane_type = static_cast(plane); + int allocated_size1 = frame1.allocated_size(plane_type); + int allocated_size2 = frame2.allocated_size(plane_type); + if (allocated_size1 != allocated_size2) + return false; + const uint8_t* plane_buffer1 = frame1.buffer(plane_type); + const uint8_t* plane_buffer2 = frame2.buffer(plane_type); + if (memcmp(plane_buffer1, plane_buffer2, allocated_size1) != 0) + return false; + } + return true; } -RpsDecodeCompleteCallback::RpsDecodeCompleteCallback(webrtc::VideoFrame* buffer) +RpsDecodeCompleteCallback::RpsDecodeCompleteCallback(webrtc::I420VideoFrame* + buffer) : decoded_frame_(buffer), - decode_complete_(false), - last_decoded_picture_id_(0), - last_decoded_ref_picture_id_(0), - updated_ref_picture_id_(false) { -} + decode_complete_(false) {} -WebRtc_Word32 RpsDecodeCompleteCallback::Decoded(webrtc::VideoFrame& image) { +WebRtc_Word32 RpsDecodeCompleteCallback::Decoded(webrtc::I420VideoFrame& + image) { return decoded_frame_->CopyFrame(image); decode_complete_ = true; } diff --git a/webrtc/modules/video_coding/codecs/vp8/test/rps_test.h b/webrtc/modules/video_coding/codecs/vp8/test/rps_test.h index b00e773b8..2e3149ca0 100644 --- a/webrtc/modules/video_coding/codecs/vp8/test/rps_test.h +++ b/webrtc/modules/video_coding/codecs/vp8/test/rps_test.h @@ -11,6 +11,7 @@ #ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_ #define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_ +#include "common_video/interface/i420_video_frame.h" #include "vp8.h" #include "normal_async_test.h" @@ -28,18 +29,18 @@ class VP8RpsTest : public VP8NormalAsyncTest { virtual bool EncodeRps(RpsDecodeCompleteCallback* decodeCallback); virtual int Decode(int lossValue = 0); - static bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes, - const void *ptrB, unsigned int bLengthBytes); + static bool CheckIfBitExactFrames(const webrtc::I420VideoFrame& frame1, + const webrtc::I420VideoFrame& frame2); webrtc::VP8Decoder* decoder2_; - webrtc::VideoFrame decoded_frame2_; + webrtc::I420VideoFrame decoded_frame2_; bool sli_; }; class RpsDecodeCompleteCallback : public webrtc::DecodedImageCallback { public: - RpsDecodeCompleteCallback(webrtc::VideoFrame* buffer); - WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage); + RpsDecodeCompleteCallback(webrtc::I420VideoFrame* buffer); + WebRtc_Word32 Decoded(webrtc::I420VideoFrame& decodedImage); bool DecodeComplete(); WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 picture_id); WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 picture_id); @@ -47,7 +48,7 @@ class RpsDecodeCompleteCallback : public webrtc::DecodedImageCallback { WebRtc_UWord64 LastDecodedRefPictureId(bool *updated); private: - webrtc::VideoFrame* decoded_frame_; + webrtc::I420VideoFrame* decoded_frame_; bool decode_complete_; WebRtc_UWord64 last_decoded_picture_id_; WebRtc_UWord64 last_decoded_ref_picture_id_; diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc index 5f5a6bc18..869a697c8 100644 --- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc +++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc @@ -324,13 +324,13 @@ uint32_t VP8EncoderImpl::MaxIntraTarget(uint32_t optimalBuffersize) { return (targetPct < minIntraTh) ? minIntraTh: targetPct; } -int VP8EncoderImpl::Encode(const VideoFrame& input_image, +int VP8EncoderImpl::Encode(const I420VideoFrame& input_image, const CodecSpecificInfo* codec_specific_info, const std::vector* frame_types) { if (!inited_) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } - if (input_image.Buffer() == NULL) { + if (input_image.IsZeroSize()) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } if (encoded_complete_callback_ == NULL) { @@ -344,20 +344,18 @@ int VP8EncoderImpl::Encode(const VideoFrame& input_image, } // Check for change in frame size. - if (input_image.Width() != codec_.width || - input_image.Height() != codec_.height) { - int ret = UpdateCodecFrameSize(input_image.Width(), input_image.Height()); + if (input_image.width() != codec_.width || + input_image.height() != codec_.height) { + int ret = UpdateCodecFrameSize(input_image.width(), input_image.height()); if (ret < 0) { return ret; } } // Image in vpx_image_t format. - uint8_t* buffer = input_image.Buffer(); - uint32_t v_plane_loc = codec_.height * codec_.width + - ((codec_.width + 1) >> 1) * ((codec_.height + 1) >> 1); - raw_->planes[PLANE_Y] = buffer; - raw_->planes[PLANE_U] = &buffer[codec_.width * codec_.height]; - raw_->planes[PLANE_V] = &buffer[v_plane_loc]; + // Input image is const. VP8's raw image is not defined as const. + raw_->planes[PLANE_Y] = const_cast(input_image.buffer(kYPlane)); + raw_->planes[PLANE_U] = const_cast(input_image.buffer(kUPlane)); + raw_->planes[PLANE_V] = const_cast(input_image.buffer(kVPlane)); int flags = 0; #if WEBRTC_LIBVPX_VERSION >= 971 @@ -379,11 +377,11 @@ int VP8EncoderImpl::Encode(const VideoFrame& input_image, codec_specific_info->codecSpecific.VP8.pictureIdRPSI); } if (codec_specific_info->codecSpecific.VP8.hasReceivedSLI) { - sendRefresh = rps_->ReceivedSLI(input_image.TimeStamp()); + sendRefresh = rps_->ReceivedSLI(input_image.timestamp()); } } flags = rps_->EncodeFlags(picture_id_, sendRefresh, - input_image.TimeStamp()); + input_image.timestamp()); } // TODO(holmer): Ideally the duration should be the timestamp diff of this @@ -456,7 +454,7 @@ void VP8EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, picture_id_ = (picture_id_ + 1) & 0x7FFF; // prepare next } -int VP8EncoderImpl::GetEncodedFrame(const VideoFrame& input_image) { +int VP8EncoderImpl::GetEncodedFrame(const I420VideoFrame& input_image) { vpx_codec_iter_t iter = NULL; encoded_image_._frameType = kDeltaFrame; const vpx_codec_cx_pkt_t *pkt= vpx_codec_get_cx_data(encoder_, &iter); @@ -469,7 +467,7 @@ int VP8EncoderImpl::GetEncodedFrame(const VideoFrame& input_image) { } } else if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) { CodecSpecificInfo codecSpecific; - PopulateCodecSpecific(&codecSpecific, *pkt, input_image.TimeStamp()); + PopulateCodecSpecific(&codecSpecific, *pkt, input_image.timestamp()); assert(pkt->data.frame.sz <= encoded_image_._size); memcpy(encoded_image_._buffer, pkt->data.frame.buf, pkt->data.frame.sz); @@ -484,9 +482,9 @@ int VP8EncoderImpl::GetEncodedFrame(const VideoFrame& input_image) { } if (encoded_image_._length > 0) { - encoded_image_._timeStamp = input_image.TimeStamp(); + encoded_image_._timeStamp = input_image.timestamp(); // TODO(mikhal): Resolve confusion in terms. - encoded_image_.capture_time_ms_ = input_image.RenderTimeMs(); + encoded_image_.capture_time_ms_ = input_image.render_time_ms(); // Figure out where partition boundaries are located. RTPFragmentationHeader fragInfo; @@ -518,7 +516,7 @@ int VP8EncoderImpl::GetEncodedFrame(const VideoFrame& input_image) { } #if WEBRTC_LIBVPX_VERSION >= 971 -int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image) { +int VP8EncoderImpl::GetEncodedPartitions(const I420VideoFrame& input_image) { vpx_codec_iter_t iter = NULL; int part_idx = 0; encoded_image_._length = 0; @@ -554,13 +552,13 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image) { encoded_image_._frameType = kKeyFrame; rps_->EncodedKeyFrame(picture_id_); } - PopulateCodecSpecific(&codec_specific, *pkt, input_image.TimeStamp()); + PopulateCodecSpecific(&codec_specific, *pkt, input_image.timestamp()); break; } } if (encoded_image_._length > 0) { - encoded_image_._timeStamp = input_image.TimeStamp(); - encoded_image_.capture_time_ms_ = input_image.RenderTimeMs(); + encoded_image_._timeStamp = input_image.timestamp(); + encoded_image_.capture_time_ms_ = input_image.render_time_ms(); encoded_image_._encodedHeight = raw_->h; encoded_image_._encodedWidth = raw_->w; encoded_complete_callback_->Encoded(encoded_image_, &codec_specific, @@ -873,30 +871,18 @@ int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) { // Decoder OK and NULL image => No show frame return WEBRTC_VIDEO_CODEC_NO_OUTPUT; } - - uint32_t required_size = CalcBufferSize(kI420, img->d_w, img->d_h); - decoded_image_.VerifyAndAllocate(required_size); - - uint8_t* buf; - uint32_t pos = 0; - uint32_t plane, y; - uint8_t* buffer = decoded_image_.Buffer(); - for (plane = 0; plane < 3; plane++) { - unsigned int width = (plane ? (img->d_w + 1) >> 1 : img->d_w); - unsigned int height = (plane ? (img->d_h + 1) >> 1 : img->d_h); - buf = img->planes[plane]; - for (y = 0; y < height; y++) { - memcpy(&buffer[pos], buf, width); - pos += width; - buf += img->stride[plane]; - } - } - - // Set decoded image parameters. - decoded_image_.SetHeight(img->d_h); - decoded_image_.SetWidth(img->d_w); - decoded_image_.SetLength(CalcBufferSize(kI420, img->d_w, img->d_h)); - decoded_image_.SetTimeStamp(timestamp); + int size_y = img->stride[VPX_PLANE_Y] * img->d_h; + int size_u = img->stride[VPX_PLANE_U] * ((img->d_h + 1) / 2); + int size_v = img->stride[VPX_PLANE_V] * ((img->d_h + 1) / 2); + // TODO(mikhal): This does a copy - need to SwapBuffers. + decoded_image_.CreateFrame(size_y, img->planes[VPX_PLANE_Y], + size_u, img->planes[VPX_PLANE_U], + size_v, img->planes[VPX_PLANE_V], + img->d_w, img->d_h, + img->stride[VPX_PLANE_Y], + img->stride[VPX_PLANE_U], + img->stride[VPX_PLANE_V]); + decoded_image_.set_timestamp(timestamp); int ret = decode_complete_callback_->Decoded(decoded_image_); if (ret != 0) return ret; @@ -913,7 +899,6 @@ int VP8DecoderImpl::RegisterDecodeCompleteCallback( } int VP8DecoderImpl::Release() { - decoded_image_.Free(); if (last_keyframe_._buffer != NULL) { delete [] last_keyframe_._buffer; last_keyframe_._buffer = NULL; @@ -941,7 +926,7 @@ VideoDecoder* VP8DecoderImpl::Copy() { assert(false); return NULL; } - if (decoded_image_.Buffer() == NULL) { + if (decoded_image_.IsZeroSize()) { // Nothing has been decoded before; cannot clone. return NULL; } @@ -964,13 +949,13 @@ VideoDecoder* VP8DecoderImpl::Copy() { return NULL; } // Allocate memory for reference image copy - assert(decoded_image_.Width() > 0); - assert(decoded_image_.Height() > 0); + assert(decoded_image_.width() > 0); + assert(decoded_image_.height() > 0); assert(image_format_ > VPX_IMG_FMT_NONE); // Check if frame format has changed. if (ref_frame_ && - (decoded_image_.Width() != ref_frame_->img.d_w || - decoded_image_.Height() != ref_frame_->img.d_h || + (decoded_image_.width() != static_cast(ref_frame_->img.d_w) || + decoded_image_.height() != static_cast(ref_frame_->img.d_h) || image_format_ != ref_frame_->img.fmt)) { vpx_img_free(&ref_frame_->img); delete ref_frame_; @@ -982,12 +967,12 @@ VideoDecoder* VP8DecoderImpl::Copy() { ref_frame_ = new vpx_ref_frame_t; unsigned int align = 1; - if (decoded_image_.Width() % 32 == 0) { + if (decoded_image_.width() % 32 == 0) { align = 32; } if (!vpx_img_alloc(&ref_frame_->img, static_cast(image_format_), - decoded_image_.Width(), decoded_image_.Height(), + decoded_image_.width(), decoded_image_.height(), align)) { assert(false); delete copy; diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h index e1843a665..e6fbeda0d 100644 --- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h +++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h @@ -72,7 +72,7 @@ class VP8EncoderImpl : public VP8Encoder { // WEBRTC_VIDEO_CODEC_ERROR // WEBRTC_VIDEO_CODEC_TIMEOUT - virtual int Encode(const VideoFrame& input_image, + virtual int Encode(const I420VideoFrame& input_image, const CodecSpecificInfo* codec_specific_info, const std::vector* frame_types); @@ -115,9 +115,9 @@ class VP8EncoderImpl : public VP8Encoder { const vpx_codec_cx_pkt& pkt, uint32_t timestamp); - int GetEncodedFrame(const VideoFrame& input_image); + int GetEncodedFrame(const I420VideoFrame& input_image); - int GetEncodedPartitions(const VideoFrame& input_image); + int GetEncodedPartitions(const I420VideoFrame& input_image); // Determine maximum target for Intra frames // @@ -219,7 +219,7 @@ class VP8DecoderImpl : public VP8Decoder { int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp); - VideoFrame decoded_image_; + I420VideoFrame decoded_image_; DecodedImageCallback* decode_complete_callback_; bool inited_; bool feedback_mode_; diff --git a/webrtc/modules/video_coding/main/interface/video_coding.h b/webrtc/modules/video_coding/main/interface/video_coding.h index bea0107d4..ad7edcdfb 100644 --- a/webrtc/modules/video_coding/main/interface/video_coding.h +++ b/webrtc/modules/video_coding/main/interface/video_coding.h @@ -11,6 +11,7 @@ #ifndef WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_ #define WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_ +#include "common_video/interface/i420_video_frame.h" #include "modules/interface/module.h" #include "modules/interface/module_common_types.h" #include "modules/video_coding/main/interface/video_coding_defines.h" @@ -252,7 +253,7 @@ public: // Return value : VCM_OK, on success. // < 0, on error. virtual WebRtc_Word32 AddVideoFrame( - const VideoFrame& videoFrame, + const I420VideoFrame& videoFrame, const VideoContentMetrics* contentMetrics = NULL, const CodecSpecificInfo* codecSpecificInfo = NULL) = 0; diff --git a/webrtc/modules/video_coding/main/interface/video_coding_defines.h b/webrtc/modules/video_coding/main/interface/video_coding_defines.h index 324b24bb3..273c625e5 100644 --- a/webrtc/modules/video_coding/main/interface/video_coding_defines.h +++ b/webrtc/modules/video_coding/main/interface/video_coding_defines.h @@ -12,6 +12,7 @@ #define WEBRTC_MODULES_INTERFACE_VIDEO_CODING_DEFINES_H_ #include "typedefs.h" +#include "common_video/interface/i420_video_frame.h" #include "modules/interface/module_common_types.h" namespace webrtc { @@ -96,7 +97,7 @@ class VCMFrameStorageCallback { // Callback class used for passing decoded frames which are ready to be rendered. class VCMReceiveCallback { public: - virtual WebRtc_Word32 FrameToRender(VideoFrame& videoFrame) = 0; + virtual WebRtc_Word32 FrameToRender(I420VideoFrame& videoFrame) = 0; virtual WebRtc_Word32 ReceivedDecodedReferenceFrame( const WebRtc_UWord64 pictureId) { return -1; diff --git a/webrtc/modules/video_coding/main/source/generic_decoder.cc b/webrtc/modules/video_coding/main/source/generic_decoder.cc index a9f9682e8..edb6a64d8 100644 --- a/webrtc/modules/video_coding/main/source/generic_decoder.cc +++ b/webrtc/modules/video_coding/main/source/generic_decoder.cc @@ -40,13 +40,13 @@ void VCMDecodedFrameCallback::SetUserReceiveCallback( _receiveCallback = receiveCallback; } -WebRtc_Word32 VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage) +WebRtc_Word32 VCMDecodedFrameCallback::Decoded(I420VideoFrame& decodedImage) { // TODO(holmer): We should improve this so that we can handle multiple // callbacks from one call to Decode(). CriticalSectionScoped cs(_critSect); VCMFrameInformation* frameInfo = static_cast( - _timestampMap.Pop(decodedImage.TimeStamp())); + _timestampMap.Pop(decodedImage.timestamp())); if (frameInfo == NULL) { // The map should never be empty or full if this callback is called. @@ -54,14 +54,14 @@ WebRtc_Word32 VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage) } _timing.StopDecodeTimer( - decodedImage.TimeStamp(), + decodedImage.timestamp(), frameInfo->decodeStartTimeMs, _clock->MillisecondTimestamp()); if (_receiveCallback != NULL) { - _frame.SwapFrame(decodedImage); - _frame.SetRenderTime(frameInfo->renderTimeMs); + _frame.SwapFrame(&decodedImage); + _frame.set_render_time_ms(frameInfo->renderTimeMs); WebRtc_Word32 callbackReturn = _receiveCallback->FrameToRender(_frame); if (callbackReturn < 0) { diff --git a/webrtc/modules/video_coding/main/source/generic_decoder.h b/webrtc/modules/video_coding/main/source/generic_decoder.h index 5299f423a..016142aa4 100644 --- a/webrtc/modules/video_coding/main/source/generic_decoder.h +++ b/webrtc/modules/video_coding/main/source/generic_decoder.h @@ -38,7 +38,7 @@ public: virtual ~VCMDecodedFrameCallback(); void SetUserReceiveCallback(VCMReceiveCallback* receiveCallback); - virtual WebRtc_Word32 Decoded(VideoFrame& decodedImage); + virtual WebRtc_Word32 Decoded(I420VideoFrame& decodedImage); virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId); virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId); @@ -50,7 +50,7 @@ public: private: CriticalSectionWrapper* _critSect; TickTimeBase* _clock; - VideoFrame _frame; + I420VideoFrame _frame; VCMReceiveCallback* _receiveCallback; VCMTiming& _timing; VCMTimestampMap _timestampMap; diff --git a/webrtc/modules/video_coding/main/source/generic_encoder.cc b/webrtc/modules/video_coding/main/source/generic_encoder.cc index 1bf8fbee8..727ca2537 100644 --- a/webrtc/modules/video_coding/main/source/generic_encoder.cc +++ b/webrtc/modules/video_coding/main/source/generic_encoder.cc @@ -57,7 +57,7 @@ VCMGenericEncoder::InitEncode(const VideoCodec* settings, } WebRtc_Word32 -VCMGenericEncoder::Encode(const VideoFrame& inputFrame, +VCMGenericEncoder::Encode(const I420VideoFrame& inputFrame, const CodecSpecificInfo* codecSpecificInfo, const std::vector* frameTypes) { std::vector video_frame_types(frameTypes->size(), @@ -119,7 +119,7 @@ WebRtc_Word32 VCMGenericEncoder::RequestFrame( if (!frame_types) { return 0; } - VideoFrame image; + I420VideoFrame image; std::vector video_frame_types(kVideoFrameDelta); if (frame_types) { VCMEncodedFrame::ConvertFrameTypes(*frame_types, &video_frame_types); diff --git a/webrtc/modules/video_coding/main/source/generic_encoder.h b/webrtc/modules/video_coding/main/source/generic_encoder.h index 9e8ae1694..9f7432be0 100644 --- a/webrtc/modules/video_coding/main/source/generic_encoder.h +++ b/webrtc/modules/video_coding/main/source/generic_encoder.h @@ -99,7 +99,7 @@ public: * cameraFrameRate : request or information from the remote side * frameType : The requested frame type to encode */ - WebRtc_Word32 Encode(const VideoFrame& inputFrame, + WebRtc_Word32 Encode(const I420VideoFrame& inputFrame, const CodecSpecificInfo* codecSpecificInfo, const std::vector* frameTypes); /** diff --git a/webrtc/modules/video_coding/main/source/video_coding_impl.cc b/webrtc/modules/video_coding/main/source/video_coding_impl.cc index 2b08ffd93..0f8f1eb45 100644 --- a/webrtc/modules/video_coding/main/source/video_coding_impl.cc +++ b/webrtc/modules/video_coding/main/source/video_coding_impl.cc @@ -9,6 +9,7 @@ */ #include "video_coding_impl.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "common_types.h" #include "encoded_frame.h" #include "jitter_buffer.h" @@ -652,7 +653,7 @@ VideoCodingModuleImpl::SetVideoProtection(VCMVideoProtection videoProtection, // Add one raw video frame to the encoder, blocking. WebRtc_Word32 -VideoCodingModuleImpl::AddVideoFrame(const VideoFrame& videoFrame, +VideoCodingModuleImpl::AddVideoFrame(const I420VideoFrame& videoFrame, const VideoContentMetrics* contentMetrics, const CodecSpecificInfo* codecSpecificInfo) { @@ -685,10 +686,10 @@ VideoCodingModuleImpl::AddVideoFrame(const VideoFrame& videoFrame, &_nextFrameTypes); if (_encoderInputFile != NULL) { - if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(), - _encoderInputFile) != videoFrame.Length()) { - return -1; - } + if (PrintI420VideoFrame(videoFrame, _encoderInputFile) < 0) + { + return -1; + } } if (ret < 0) { diff --git a/webrtc/modules/video_coding/main/source/video_coding_impl.h b/webrtc/modules/video_coding/main/source/video_coding_impl.h index ed42ced5b..80e3c8192 100644 --- a/webrtc/modules/video_coding/main/source/video_coding_impl.h +++ b/webrtc/modules/video_coding/main/source/video_coding_impl.h @@ -144,7 +144,7 @@ public: // Add one raw video frame to the encoder, blocking. virtual WebRtc_Word32 AddVideoFrame( - const VideoFrame& videoFrame, + const I420VideoFrame& videoFrame, const VideoContentMetrics* _contentMetrics = NULL, const CodecSpecificInfo* codecSpecificInfo = NULL); diff --git a/webrtc/modules/video_coding/main/source/video_coding_impl_unittest.cc b/webrtc/modules/video_coding/main/source/video_coding_impl_unittest.cc index bfb8227e4..43cca9862 100644 --- a/webrtc/modules/video_coding/main/source/video_coding_impl_unittest.cc +++ b/webrtc/modules/video_coding/main/source/video_coding_impl_unittest.cc @@ -54,7 +54,6 @@ class TestVideoCodingModule : public ::testing::Test { virtual void TearDown() { VideoCodingModule::Destroy(vcm_); - input_frame_.Free(); } void ExpectIntraRequest(int stream) { @@ -88,7 +87,7 @@ class TestVideoCodingModule : public ::testing::Test { VideoCodingModule* vcm_; NiceMock encoder_; - VideoFrame input_frame_; + I420VideoFrame input_frame_; VideoCodec settings_; }; diff --git a/webrtc/modules/video_coding/main/test/codec_database_test.cc b/webrtc/modules/video_coding/main/test/codec_database_test.cc index 98671ea52..06c038c56 100644 --- a/webrtc/modules/video_coding/main/test/codec_database_test.cc +++ b/webrtc/modules/video_coding/main/test/codec_database_test.cc @@ -114,15 +114,20 @@ CodecDataBaseTest::Perform(CmdArgs& args) // registering the callback - encode and decode with the same vcm (could be later changed) _encodeCompleteCallback->RegisterReceiverVCM(_vcm); // preparing a frame to be encoded - VideoFrame sourceFrame; - sourceFrame.VerifyAndAllocate(_lengthSourceFrame); WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame]; TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0); - sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer); - sourceFrame.SetHeight(_height); - sourceFrame.SetWidth(_width); + I420VideoFrame sourceFrame; + int half_width = (_width + 1) / 2; + int half_height = (_height + 1) / 2; + int size_y = _width * _height; + int size_uv = half_width * half_height; + sourceFrame.CreateFrame(size_y, tmpBuffer, + size_uv, tmpBuffer + size_y, + size_uv, tmpBuffer + size_y + size_uv, + _width, _height, + _width, half_width, half_width); _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate); - sourceFrame.SetTimeStamp(_timeStamp); + sourceFrame.set_timestamp(_timeStamp); // Encoder registration TEST (VideoCodingModule::NumberOfCodecs() > 0); TEST(VideoCodingModule::Codec(-1, &sendCodec) < 0); @@ -199,7 +204,7 @@ CodecDataBaseTest::Perform(CmdArgs& args) TEST(_vcm->Decode() == VCM_OK); waitEvent->Wait(33); _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate); - sourceFrame.SetTimeStamp(_timeStamp); + sourceFrame.set_timestamp(_timeStamp); TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK); TEST(_vcm->Decode() == VCM_OK); @@ -234,14 +239,14 @@ CodecDataBaseTest::Perform(CmdArgs& args) TEST(_vcm->ResetDecoder() == VCM_OK); waitEvent->Wait(33); _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate); - sourceFrame.SetTimeStamp(_timeStamp); + sourceFrame.set_timestamp(_timeStamp); TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK); // Try to decode a delta frame. Should get a warning since we have enabled the "require key frame" setting // and because no frame type request callback has been registered. TEST(_vcm->Decode() == VCM_MISSING_CALLBACK); TEST(_vcm->IntraFrameRequest(0) == VCM_OK); _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate); - sourceFrame.SetTimeStamp(_timeStamp); + sourceFrame.set_timestamp(_timeStamp); TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK); TEST(_vcm->Decode() == VCM_OK); @@ -254,13 +259,13 @@ CodecDataBaseTest::Perform(CmdArgs& args) TEST(_vcm->IntraFrameRequest(0) == VCM_OK); waitEvent->Wait(33); _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate); - sourceFrame.SetTimeStamp(_timeStamp); + sourceFrame.set_timestamp(_timeStamp); TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK); TEST(_vcm->Decode() == VCM_OK); TEST(_vcm->RegisterReceiveCodec(&sendCodec, 1) == VCM_OK); waitEvent->Wait(33); _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate); - sourceFrame.SetTimeStamp(_timeStamp); + sourceFrame.set_timestamp(_timeStamp); TEST(_vcm->IntraFrameRequest(0) == VCM_OK); TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK); TEST(_vcm->Decode() == VCM_OK); @@ -280,7 +285,6 @@ CodecDataBaseTest::Perform(CmdArgs& args) rewind(_sourceFile); _vcm->InitializeReceiver(); _vcm->InitializeSender(); - sourceFrame.Free(); VCMDecodeCompleteCallback* decodeCallCDT = new VCMDecodeCompleteCallback(_decodedFile); VCMEncodeCompleteCallback* encodeCallCDT = new VCMEncodeCompleteCallback(_encodedFile); _vcm->RegisterReceiveCallback(decodeCallCDT); @@ -290,8 +294,8 @@ CodecDataBaseTest::Perform(CmdArgs& args) { // Register all available decoders. int i, j; - //double psnr; - sourceFrame.VerifyAndAllocate(_lengthSourceFrame); + sourceFrame.CreateEmptyFrame(_width, _height, _width, + (_width + 1) / 2, (_width + 1) / 2); _vcm->RegisterReceiveCallback(decodeCallCDT); for (i=0; i < VideoCodingModule::NumberOfCodecs(); i++) { @@ -326,17 +330,18 @@ CodecDataBaseTest::Perform(CmdArgs& args) _vcm->EnableFrameDropper(false); printf("Encoding with %s \n\n", sendCodec.plName); - for (j=0; j < int(300/VideoCodingModule::NumberOfCodecs()); j++)// assuming 300 frames, NumberOfCodecs <= 10 + // Assuming 300 frames, NumberOfCodecs <= 10. + for (j=0; j < int(300/VideoCodingModule::NumberOfCodecs()); j++) { frameCnt++; TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0); - // building source frame - sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer); - sourceFrame.SetHeight(_height); - sourceFrame.SetWidth(_width); - sourceFrame.SetLength(_lengthSourceFrame); + sourceFrame.CreateFrame(size_y, tmpBuffer, + size_uv, tmpBuffer + size_y, + size_uv, tmpBuffer + size_y + size_uv, + _width, _height, + _width, half_width, half_width); _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate); - sourceFrame.SetTimeStamp(_timeStamp); + sourceFrame.set_timestamp(_timeStamp); // send frame to the encoder TEST (_vcm->AddVideoFrame(sourceFrame) == VCM_OK); waitEvent->Wait(33); // was 100 @@ -373,7 +378,6 @@ CodecDataBaseTest::Perform(CmdArgs& args) } } // end: iterate codecs rewind(_sourceFile); - sourceFrame.Free(); delete [] tmpBuffer; delete decodeCallCDT; delete encodeCallCDT; diff --git a/webrtc/modules/video_coding/main/test/generic_codec_test.cc b/webrtc/modules/video_coding/main/test/generic_codec_test.cc index 773f7abe8..2220d3e3f 100644 --- a/webrtc/modules/video_coding/main/test/generic_codec_test.cc +++ b/webrtc/modules/video_coding/main/test/generic_codec_test.cc @@ -13,7 +13,7 @@ #include #include "../source/event.h" #include "rtp_rtcp.h" -#include "module_common_types.h" +#include "common_video/interface/i420_video_frame.h" #include "test_macros.h" #include "modules/video_coding/main/source/mock/fake_tick_time.h" @@ -122,8 +122,7 @@ GenericCodecTest::Perform(CmdArgs& args) _vcm->Codec(0, &_sendCodec); TEST(_vcm->RegisterSendCodec(&_sendCodec, 4, 1440) == VCM_OK); // sanity on encoder registration - VideoFrame sourceFrame; - sourceFrame.VerifyAndAllocate(_lengthSourceFrame); + I420VideoFrame sourceFrame; _vcm->InitializeSender(); TEST(_vcm->Codec(kVideoCodecVP8, &sendCodec) == 0); TEST(_vcm->RegisterSendCodec(&sendCodec, -1, 1440) < 0); // bad number of cores @@ -147,12 +146,16 @@ GenericCodecTest::Perform(CmdArgs& args) } WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame]; TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0); - // building source frame - sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer); - sourceFrame.SetHeight(_height); - sourceFrame.SetWidth(_width); - sourceFrame.SetTimeStamp(_timeStamp++); - // encode/decode + int half_width = (_width + 1) / 2; + int half_height = (_height + 1) / 2; + int size_y = _width * _height; + int size_uv = half_width * half_height; + sourceFrame.CreateFrame(size_y, tmpBuffer, + size_uv, tmpBuffer + size_y, + size_uv, tmpBuffer + size_y + size_uv, + _width, _height, + _width, half_width, half_width); + sourceFrame.set_timestamp(_timeStamp++); TEST(_vcm->AddVideoFrame(sourceFrame) < 0 ); // encoder uninitialized _vcm->InitializeReceiver(); TEST(_vcm->SetChannelParameters(100, 0, 0) < 0);// setting rtt when receiver uninitialized @@ -162,7 +165,6 @@ GenericCodecTest::Perform(CmdArgs& args) /**************************************/ //Register both encoder and decoder, reset decoder - encode, set up decoder, reset encoder - decode. rewind(_sourceFile); - sourceFrame.Free(); _vcm->InitializeReceiver(); _vcm->InitializeSender(); NumberOfCodecs = _vcm->NumberOfCodecs(); @@ -195,11 +197,13 @@ GenericCodecTest::Perform(CmdArgs& args) for (i = 0; i < _frameRate; i++) { TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0); - sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer); - sourceFrame.SetHeight(_height); - sourceFrame.SetWidth(_width); + sourceFrame.CreateFrame(size_y, tmpBuffer, + size_uv, tmpBuffer + size_y, + size_uv, tmpBuffer + size_y + size_uv, + _width, _height, + _width, half_width, half_width); _timeStamp += (WebRtc_UWord32)(9e4 / static_cast(_frameRate)); - sourceFrame.SetTimeStamp(_timeStamp); + sourceFrame.set_timestamp(_timeStamp); TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK); IncrementDebugClock(_frameRate); _vcm->Process(); @@ -245,7 +249,7 @@ GenericCodecTest::Perform(CmdArgs& args) TEST(_vcm->RegisterReceiveCodec(&sendCodec, 1) == VCM_OK); TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK); _timeStamp += (WebRtc_UWord32)(9e4 / static_cast(_frameRate)); - sourceFrame.SetTimeStamp(_timeStamp); + sourceFrame.set_timestamp(_timeStamp); // First packet of a subsequent frame required before the jitter buffer // will allow decoding an incomplete frame. TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK); @@ -269,8 +273,8 @@ GenericCodecTest::Perform(CmdArgs& args) _vcm->InitializeSender(); _vcm->InitializeReceiver(); rewind(_sourceFile); - sourceFrame.Free(); - sourceFrame.VerifyAndAllocate(_lengthSourceFrame); + sourceFrame.CreateEmptyFrame(_width, _height, _width, + (_width + 1) / 2, (_width + 1) / 2); const float bitRate[] = {100, 400, 600, 1000, 2000}; const float nBitrates = sizeof(bitRate)/sizeof(*bitRate); float _bitRate = 0; @@ -315,11 +319,14 @@ GenericCodecTest::Perform(CmdArgs& args) _lengthSourceFrame) { _frameCnt++; - sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer); - sourceFrame.SetHeight(_height); - sourceFrame.SetWidth(_width); + sourceFrame.CreateFrame(size_y, tmpBuffer, + size_uv, tmpBuffer + size_y, + size_uv, tmpBuffer + size_y + size_uv, + _width, _height, + _width, (_width + 1) / 2, + (_width + 1) / 2); _timeStamp += (WebRtc_UWord32)(9e4 / static_cast(_frameRate)); - sourceFrame.SetTimeStamp(_timeStamp); + sourceFrame.set_timestamp(_timeStamp); ret = _vcm->AddVideoFrame(sourceFrame); IncrementDebugClock(_frameRate); @@ -364,8 +371,6 @@ GenericCodecTest::Perform(CmdArgs& args) /* Encoder Pipeline Delay Test */ /******************************/ _vcm->InitializeSender(); - sourceFrame.Free(); - sourceFrame.VerifyAndAllocate(_lengthSourceFrame); NumberOfCodecs = _vcm->NumberOfCodecs(); bool encodeComplete = false; // going over all available codecs @@ -383,11 +388,13 @@ GenericCodecTest::Perform(CmdArgs& args) { TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0); _frameCnt++; - sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer); - sourceFrame.SetHeight(_height); - sourceFrame.SetWidth(_width); + sourceFrame.CreateFrame(size_y, tmpBuffer, + size_uv, tmpBuffer + size_y, + size_uv, tmpBuffer + size_y + size_uv, + _width, _height, + _width, half_width, half_width); _timeStamp += (WebRtc_UWord32)(9e4 / static_cast(_frameRate)); - sourceFrame.SetTimeStamp(_timeStamp); + sourceFrame.set_timestamp(_timeStamp); _vcm->AddVideoFrame(sourceFrame); encodeComplete = _encodeCompleteCallback->EncodeComplete(); } // first frame encoded @@ -410,47 +417,6 @@ GenericCodecTest::Perform(CmdArgs& args) VCMRTPEncodeCompleteCallback encCompleteCallback(&rtpModule); _vcm->InitializeSender(); - // TEST DISABLED FOR NOW SINCE VP8 DOESN'T HAVE THIS FEATURE - -// sourceFrame.Free(); -// sourceFrame.VerifyAndAllocate(_lengthSourceFrame); -// NumberOfCodecs = _vcm->NumberOfCodecs(); -// WebRtc_UWord32 targetPayloadSize = 500; -// rtpModule.SetMaxTransferUnit(targetPayloadSize); -// // going over all available codecs -// for (int k = 0; k < NumberOfCodecs; k++) -// { -// _vcm->Codec(k, &_sendCodec); -// if (strncmp(_sendCodec.plName, "VP8", 3) == 0) -// { -// // Only test with VP8 -// continue; -// } -// rtpModule.RegisterSendPayload(_sendCodec.plName, _sendCodec.plType); -// // Make sure we only get one NAL unit per packet -// _vcm->InitializeSender(); -// _vcm->RegisterSendCodec(&_sendCodec, 4, targetPayloadSize); -// sendCallback.SetMaxPayloadSize(targetPayloadSize); -// _vcm->RegisterTransportCallback(&encCompleteCallback); -// sendCallback.Reset(); -// _frameCnt = 0; -// rewind(_sourceFile); -// while (!feof(_sourceFile)) -// { -// fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile); -// _frameCnt++; -// sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer); -// sourceFrame.SetHeight(_height); -// sourceFrame.SetWidth(_width); -// _timeStamp += (WebRtc_UWord32)(9e4 / static_cast(_frameRate)); -// sourceFrame.SetTimeStamp(_timeStamp); -// ret = _vcm->AddVideoFrame(sourceFrame); -// } // first frame encoded -// printf ("\n Codec type = %s \n",_sendCodec.plName); -// printf(" Average payload size = %f bytes, target = %u bytes\n", sendCallback.AveragePayloadSize(), targetPayloadSize); -// } // end for all codecs - - // Test temporal decimation settings for (int k = 0; k < NumberOfCodecs; k++) { @@ -474,13 +440,14 @@ GenericCodecTest::Perform(CmdArgs& args) _vcm->RegisterSendStatisticsCallback(&sendStats); rewind(_sourceFile); while (fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) == - _lengthSourceFrame) - { - sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer); - sourceFrame.SetHeight(_height); - sourceFrame.SetWidth(_width); + _lengthSourceFrame) { + sourceFrame.CreateFrame(size_y, tmpBuffer, + size_uv, tmpBuffer + size_y, + size_uv, tmpBuffer + size_y + size_uv, + _width, _height, + _width, half_width, half_width); _timeStamp += (WebRtc_UWord32)(9e4 / static_cast(_frameRate)); - sourceFrame.SetTimeStamp(_timeStamp); + sourceFrame.set_timestamp(_timeStamp); ret = _vcm->AddVideoFrame(sourceFrame); if (_vcm->TimeUntilNextProcess() <= 0) { diff --git a/webrtc/modules/video_coding/main/test/media_opt_test.cc b/webrtc/modules/video_coding/main/test/media_opt_test.cc index 692305b26..8d398d234 100644 --- a/webrtc/modules/video_coding/main/test/media_opt_test.cc +++ b/webrtc/modules/video_coding/main/test/media_opt_test.cc @@ -290,8 +290,7 @@ MediaOptTest::Perform() } // START TEST - VideoFrame sourceFrame; - sourceFrame.VerifyAndAllocate(_lengthSourceFrame); + I420VideoFrame sourceFrame; WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame]; _vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, (WebRtc_UWord8)_lossRate, _rttMS); _vcm->RegisterReceiveCallback(&receiveCallback); @@ -299,17 +298,22 @@ MediaOptTest::Perform() _frameCnt = 0; _sumEncBytes = 0.0; _numFramesDropped = 0; + int half_width = (_width + 1) / 2; + int half_height = (_height + 1) / 2; + int size_y = _width * _height; + int size_uv = half_width * half_height; while (feof(_sourceFile)== 0) { TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0); _frameCnt++; - - sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer); - sourceFrame.SetHeight(_height); - sourceFrame.SetWidth(_width); + sourceFrame.CreateFrame(size_y, tmpBuffer, + size_uv, tmpBuffer + size_y, + size_uv, tmpBuffer + size_y + size_uv, + _width, _height, + _width, half_width, half_width); _timeStamp += (WebRtc_UWord32)(9e4 / static_cast(_frameRate)); - sourceFrame.SetTimeStamp(_timeStamp); + sourceFrame.set_timestamp(_timeStamp); TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK); // inform RTP Module of error resilience features //_rtp->SetFECCodeRate(protectionCallback.FECKeyRate(),protectionCallback.FECDeltaRate()); @@ -331,8 +335,7 @@ MediaOptTest::Perform() else { // write frame to file - if (fwrite(sourceFrame.Buffer(), 1, sourceFrame.Length(), - _actualSourceFile) != sourceFrame.Length()) { + if (PrintI420VideoFrame(sourceFrame, _actualSourceFile) < 0) { return -1; } } diff --git a/webrtc/modules/video_coding/main/test/mt_rx_tx_test.cc b/webrtc/modules/video_coding/main/test/mt_rx_tx_test.cc index 3eac93912..6ec938981 100644 --- a/webrtc/modules/video_coding/main/test/mt_rx_tx_test.cc +++ b/webrtc/modules/video_coding/main/test/mt_rx_tx_test.cc @@ -34,12 +34,11 @@ MainSenderThread(void* obj) SendSharedState* state = static_cast(obj); EventWrapper& waitEvent = *EventWrapper::Create(); // preparing a frame for encoding - VideoFrame sourceFrame; + I420VideoFrame sourceFrame; WebRtc_Word32 width = state->_args.width; WebRtc_Word32 height = state->_args.height; float frameRate = state->_args.frameRate; WebRtc_Word32 lengthSourceFrame = 3*width*height/2; - sourceFrame.VerifyAndAllocate(lengthSourceFrame); WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[lengthSourceFrame]; if (state->_sourceFile == NULL) @@ -58,11 +57,17 @@ MainSenderThread(void* obj) TEST(fread(tmpBuffer, 1, lengthSourceFrame,state->_sourceFile) > 0 || feof(state->_sourceFile)); state->_frameCnt++; - sourceFrame.CopyFrame(lengthSourceFrame, tmpBuffer); - sourceFrame.SetHeight(height); - sourceFrame.SetWidth(width); + int size_y = width * height; + int half_width = (width + 1) / 2; + int half_height = (height + 1) / 2; + int size_uv = half_width * half_height; + sourceFrame.CreateFrame(size_y, tmpBuffer, + size_uv, tmpBuffer + size_y, + size_uv, tmpBuffer + size_y + size_uv, + width, height, + width, half_width, half_width); state->_timestamp += (WebRtc_UWord32)(9e4 / frameRate); - sourceFrame.SetTimeStamp(state->_timestamp); + sourceFrame.set_timestamp(state->_timestamp); WebRtc_Word32 ret = state->_vcm.AddVideoFrame(sourceFrame); if (ret < 0) diff --git a/webrtc/modules/video_coding/main/test/normal_test.cc b/webrtc/modules/video_coding/main/test/normal_test.cc index bd3776678..2eabc1043 100644 --- a/webrtc/modules/video_coding/main/test/normal_test.cc +++ b/webrtc/modules/video_coding/main/test/normal_test.cc @@ -16,6 +16,7 @@ #include #include "../source/event.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "common_types.h" #include "modules/video_coding/main/source/mock/fake_tick_time.h" #include "test_callbacks.h" @@ -152,13 +153,13 @@ VCMNTDecodeCompleCallback::~VCMNTDecodeCompleCallback() fclose(_decodedFile); } WebRtc_Word32 -VCMNTDecodeCompleCallback::FrameToRender(webrtc::VideoFrame& videoFrame) +VCMNTDecodeCompleCallback::FrameToRender(webrtc::I420VideoFrame& videoFrame) { - if (videoFrame.Width() != _currentWidth || - videoFrame.Height() != _currentHeight) + if (videoFrame.width() != _currentWidth || + videoFrame.height() != _currentHeight) { - _currentWidth = videoFrame.Width(); - _currentHeight = videoFrame.Height(); + _currentWidth = videoFrame.width(); + _currentHeight = videoFrame.height(); if (_decodedFile != NULL) { fclose(_decodedFile); @@ -166,11 +167,11 @@ VCMNTDecodeCompleCallback::FrameToRender(webrtc::VideoFrame& videoFrame) } _decodedFile = fopen(_outname.c_str(), "wb"); } - if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(), - _decodedFile) != videoFrame.Length()) { + if (PrintI420VideoFrame(videoFrame, _decodedFile) < 0) { return -1; } - _decodedBytes+= videoFrame.Length(); + _decodedBytes+= webrtc::CalcBufferSize(webrtc::kI420, + videoFrame.width(), videoFrame.height()); return VCM_OK; } @@ -270,8 +271,13 @@ NormalTest::Perform(CmdArgs& args) /////////////////////// /// Start Test /////////////////////// - VideoFrame sourceFrame; - sourceFrame.VerifyAndAllocate(_lengthSourceFrame); + I420VideoFrame sourceFrame; + int size_y = _width * _height; + int half_width = (_width + 1) / 2; + int half_height = (_height + 1) / 2; + int size_uv = half_width * half_height; + sourceFrame.CreateEmptyFrame(_width, _height, + _width, half_width, half_width); WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame]; double startTime = clock()/(double)CLOCKS_PER_SEC; _vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, 0, 0); @@ -288,23 +294,29 @@ NormalTest::Perform(CmdArgs& args) TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0 || feof(_sourceFile)); _frameCnt++; - sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer); - sourceFrame.SetHeight(_height); - sourceFrame.SetWidth(_width); + sourceFrame.CreateFrame(size_y, tmpBuffer, + size_uv, tmpBuffer + size_y, + size_uv, tmpBuffer + size_y + size_uv, + _width, _height, + _width, half_width, half_width); _timeStamp += (WebRtc_UWord32)(9e4 / static_cast(_sendCodec.maxFramerate)); - sourceFrame.SetTimeStamp(_timeStamp); - _encodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC; + sourceFrame.set_timestamp(_timeStamp); + _encodeTimes[int(sourceFrame.timestamp())] = + clock()/(double)CLOCKS_PER_SEC; WebRtc_Word32 ret = _vcm->AddVideoFrame(sourceFrame); - double encodeTime = clock()/(double)CLOCKS_PER_SEC - _encodeTimes[int(sourceFrame.TimeStamp())]; + double encodeTime = clock()/(double)CLOCKS_PER_SEC - + _encodeTimes[int(sourceFrame.timestamp())]; _totalEncodeTime += encodeTime; if (ret < 0) { printf("Error in AddFrame: %d\n", ret); //exit(1); } - _decodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC; // same timestamp value for encode and decode + _decodeTimes[int(sourceFrame.timestamp())] = + clock()/(double)CLOCKS_PER_SEC; ret = _vcm->Decode(); - _totalDecodeTime += clock()/(double)CLOCKS_PER_SEC - _decodeTimes[int(sourceFrame.TimeStamp())]; + _totalDecodeTime += clock()/(double)CLOCKS_PER_SEC - + _decodeTimes[int(sourceFrame.timestamp())]; if (ret < 0) { printf("Error in Decode: %d\n", ret); diff --git a/webrtc/modules/video_coding/main/test/normal_test.h b/webrtc/modules/video_coding/main/test/normal_test.h index 982fba422..c76178b96 100644 --- a/webrtc/modules/video_coding/main/test/normal_test.h +++ b/webrtc/modules/video_coding/main/test/normal_test.h @@ -68,14 +68,14 @@ public: virtual ~VCMNTDecodeCompleCallback(); void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback); // will write decoded frame into file - WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame); + WebRtc_Word32 FrameToRender(webrtc::I420VideoFrame& videoFrame); WebRtc_Word32 DecodedBytes(); private: FILE* _decodedFile; std::string _outname; - WebRtc_UWord32 _decodedBytes; - WebRtc_UWord32 _currentWidth; - WebRtc_UWord32 _currentHeight; + int _decodedBytes; + int _currentWidth; + int _currentHeight; }; // end of VCMDecodeCompleCallback class @@ -89,8 +89,8 @@ public: static int RunTest(CmdArgs& args); WebRtc_Word32 Perform(CmdArgs& args); // option:: turn into private and call from perform - WebRtc_UWord32 Width() const { return _width; }; - WebRtc_UWord32 Height() const { return _height; }; + int Width() const { return _width; }; + int Height() const { return _height; }; webrtc::VideoCodecType VideoType() const { return _videoType; }; @@ -118,8 +118,8 @@ protected: FILE* _decodedFile; FILE* _encodedFile; std::fstream _log; - WebRtc_UWord32 _width; - WebRtc_UWord32 _height; + int _width; + int _height; float _frameRate; float _bitRate; WebRtc_UWord32 _lengthSourceFrame; diff --git a/webrtc/modules/video_coding/main/test/quality_modes_test.cc b/webrtc/modules/video_coding/main/test/quality_modes_test.cc index 6a804768d..5ab045994 100644 --- a/webrtc/modules/video_coding/main/test/quality_modes_test.cc +++ b/webrtc/modules/video_coding/main/test/quality_modes_test.cc @@ -15,11 +15,11 @@ #include #include "../source/event.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/video_coding/main/source/tick_time_base.h" #include "test_callbacks.h" #include "test_macros.h" #include "testsupport/metrics/video_metrics.h" -#include "common_video/libyuv/include/webrtc_libyuv.h" using namespace webrtc; @@ -215,9 +215,8 @@ QualityModesTest::Perform() // disabling internal VCM frame dropper _vcm->EnableFrameDropper(false); - VideoFrame sourceFrame; - VideoFrame *decimatedFrame = NULL; - sourceFrame.VerifyAndAllocate(_lengthSourceFrame); + I420VideoFrame sourceFrame; + I420VideoFrame *decimatedFrame = NULL; WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame]; double startTime = clock()/(double)CLOCKS_PER_SEC; _vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, 0, 0); @@ -238,18 +237,22 @@ QualityModesTest::Perform() WebRtc_Word32 ret = 0; - _numFramesDroppedVPM = 0; - + _numFramesDroppedVPM = 0; while (feof(_sourceFile)== 0) { TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0); _frameCnt++; - sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer); - sourceFrame.SetHeight(_nativeHeight); - sourceFrame.SetWidth(_nativeWidth); + int size_y = _nativeWidth * _nativeHeight; + int size_uv = ((_nativeWidth + 1) / 2) * ((_nativeHeight + 1) / 2); + sourceFrame.CreateFrame(size_y, tmpBuffer, + size_uv, tmpBuffer + size_y, + size_uv, tmpBuffer + size_y + size_uv, + _nativeWidth, _nativeHeight, + _nativeWidth, (_nativeWidth + 1) / 2, + (_nativeWidth + 1) / 2); _timeStamp += (WebRtc_UWord32)(9e4 / static_cast(codec.maxFramerate)); - sourceFrame.SetTimeStamp(_timeStamp); + sourceFrame.set_timestamp(_timeStamp); ret = _vpm->PreprocessFrame(sourceFrame, &decimatedFrame); if (ret == 1) @@ -270,20 +273,24 @@ QualityModesTest::Perform() } // counting only encoding time - _encodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC; + _encodeTimes[int(sourceFrame.timestamp())] = + clock()/(double)CLOCKS_PER_SEC; WebRtc_Word32 ret = _vcm->AddVideoFrame(*decimatedFrame, contentMetrics); - _totalEncodeTime += clock()/(double)CLOCKS_PER_SEC - _encodeTimes[int(sourceFrame.TimeStamp())]; + _totalEncodeTime += clock()/(double)CLOCKS_PER_SEC - + _encodeTimes[int(sourceFrame.timestamp())]; if (ret < 0) { printf("Error in AddFrame: %d\n", ret); //exit(1); } - _decodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC; // same timestamp value for encode and decode + _decodeTimes[int(sourceFrame.timestamp())] = clock() / + (double)CLOCKS_PER_SEC - _decodeTimes[int(sourceFrame.timestamp())]; ret = _vcm->Decode(); - _totalDecodeTime += clock()/(double)CLOCKS_PER_SEC - _decodeTimes[int(sourceFrame.TimeStamp())]; + _totalDecodeTime += clock()/(double)CLOCKS_PER_SEC - + _decodeTimes[int(sourceFrame.timestamp())]; if (ret < 0) { printf("Error in Decode: %d\n", ret); @@ -308,7 +315,7 @@ QualityModesTest::Perform() _frameRate = frameRateUpdate[change]; codec.startBitrate = (int)_bitRate; codec.maxFramerate = (WebRtc_UWord8) _frameRate; - TEST(_vcm->RegisterSendCodec(&codec, 2, 1440) == VCM_OK);// will also set and init the desired codec + TEST(_vcm->RegisterSendCodec(&codec, 2, 1440) == VCM_OK); change++; } } @@ -326,8 +333,6 @@ QualityModesTest::Perform() return 0; } - -// implementing callback to be called from VCM to update VPM of frame rate and size QMTestVideoSettingsCallback::QMTestVideoSettingsCallback(): _vpm(NULL), _vcm(NULL) @@ -415,48 +420,32 @@ VCMQMDecodeCompleCallback::~VCMQMDecodeCompleCallback() } } WebRtc_Word32 -VCMQMDecodeCompleCallback::FrameToRender(VideoFrame& videoFrame) +VCMQMDecodeCompleCallback::FrameToRender(I420VideoFrame& videoFrame) { - if ((_origWidth == videoFrame.Width()) && (_origHeight == videoFrame.Height())) + if ((_origWidth == videoFrame.width()) && + (_origHeight == videoFrame.height())) { - if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(), - _decodedFile) != videoFrame.Length()) { + if (PrintI420VideoFrame(videoFrame, _decodedFile) < 0) { return -1; } _frameCnt++; - //printf("frame dec # %d", _frameCnt); // no need for interpolator and decBuffer if (_decBuffer != NULL) { delete [] _decBuffer; _decBuffer = NULL; } -// if (_interpolator != NULL) -// { -// deleteInterpolator(_interpolator); -// _interpolator = NULL; -// } _decWidth = 0; _decHeight = 0; } else { - if ((_decWidth != videoFrame.Width()) || (_decHeight != videoFrame.Height())) - { - _decWidth = videoFrame.Width(); - _decHeight = videoFrame.Height(); - buildInterpolator(); - } - -// interpolateFrame(_interpolator, videoFrame.Buffer(),_decBuffer); - if (fwrite(_decBuffer, 1, _origWidth*_origHeight * 3/2, - _decodedFile) != _origWidth*_origHeight * 3/2) { - return -1; - } - _frameCnt++; + // TODO(mikhal): Add support for scaling. + return -1; } - _decodedBytes += videoFrame.Length(); + _decodedBytes += CalcBufferSize(kI420, videoFrame.width(), + videoFrame.height()); return VCM_OK; } @@ -467,7 +456,8 @@ VCMQMDecodeCompleCallback::DecodedBytes() } void -VCMQMDecodeCompleCallback::SetOriginalFrameDimensions(WebRtc_Word32 width, WebRtc_Word32 height) +VCMQMDecodeCompleCallback::SetOriginalFrameDimensions(WebRtc_Word32 width, + WebRtc_Word32 height) { _origWidth = width; _origHeight = height; diff --git a/webrtc/modules/video_coding/main/test/quality_modes_test.h b/webrtc/modules/video_coding/main/test/quality_modes_test.h index 87fa01fde..5b80c8108 100644 --- a/webrtc/modules/video_coding/main/test/quality_modes_test.h +++ b/webrtc/modules/video_coding/main/test/quality_modes_test.h @@ -34,11 +34,11 @@ private: webrtc::VideoProcessingModule* _vpm; - WebRtc_UWord32 _width; - WebRtc_UWord32 _height; + int _width; + int _height; float _frameRate; - WebRtc_UWord32 _nativeWidth; - WebRtc_UWord32 _nativeHeight; + int _nativeWidth; + int _nativeHeight; float _nativeFrameRate; WebRtc_UWord32 _numFramesDroppedVPM; @@ -54,7 +54,7 @@ public: virtual ~VCMQMDecodeCompleCallback(); void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback); // will write decoded frame into file - WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame); + WebRtc_Word32 FrameToRender(webrtc::I420VideoFrame& videoFrame); WebRtc_Word32 DecodedBytes(); void SetOriginalFrameDimensions(WebRtc_Word32 width, WebRtc_Word32 height); WebRtc_Word32 buildInterpolator(); @@ -62,10 +62,10 @@ private: FILE* _decodedFile; WebRtc_UWord32 _decodedBytes; // QualityModesTest& _test; - WebRtc_UWord32 _origWidth; - WebRtc_UWord32 _origHeight; - WebRtc_UWord32 _decWidth; - WebRtc_UWord32 _decHeight; + int _origWidth; + int _origHeight; + int _decWidth; + int _decHeight; // VideoInterpolator* _interpolator; WebRtc_UWord8* _decBuffer; WebRtc_UWord32 _frameCnt; // debug diff --git a/webrtc/modules/video_coding/main/test/receiver_tests.h b/webrtc/modules/video_coding/main/test/receiver_tests.h index 403b22a7e..a07aca362 100644 --- a/webrtc/modules/video_coding/main/test/receiver_tests.h +++ b/webrtc/modules/video_coding/main/test/receiver_tests.h @@ -47,7 +47,7 @@ public: virtual ~FrameReceiveCallback(); - WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame); + WebRtc_Word32 FrameToRender(webrtc::I420VideoFrame& videoFrame); private: static void SplitFilename(std::string filename, std::string* basename, diff --git a/webrtc/modules/video_coding/main/test/test_callbacks.cc b/webrtc/modules/video_coding/main/test/test_callbacks.cc index 2f18dd1e9..12824ba44 100644 --- a/webrtc/modules/video_coding/main/test/test_callbacks.cc +++ b/webrtc/modules/video_coding/main/test/test_callbacks.cc @@ -12,6 +12,7 @@ #include +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/video_coding/main/source/tick_time_base.h" #include "rtp_dump.h" #include "test_macros.h" @@ -187,13 +188,13 @@ VCMRTPEncodeCompleteCallback::EncodeComplete() // Decoded Frame Callback Implementation WebRtc_Word32 -VCMDecodeCompleteCallback::FrameToRender(VideoFrame& videoFrame) +VCMDecodeCompleteCallback::FrameToRender(I420VideoFrame& videoFrame) { - if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(), - _decodedFile) != videoFrame.Length()) { + if (PrintI420VideoFrame(videoFrame, _decodedFile) < 0) { return -1; } - _decodedBytes+= videoFrame.Length(); + _decodedBytes+= CalcBufferSize(kI420, videoFrame.width(), + videoFrame.height()); return VCM_OK; } diff --git a/webrtc/modules/video_coding/main/test/test_callbacks.h b/webrtc/modules/video_coding/main/test/test_callbacks.h index 6731f8cce..f2c419bc9 100644 --- a/webrtc/modules/video_coding/main/test/test_callbacks.h +++ b/webrtc/modules/video_coding/main/test/test_callbacks.h @@ -142,7 +142,7 @@ public: _decodedFile(decodedFile), _decodedBytes(0) {} virtual ~VCMDecodeCompleteCallback() {} // Write decoded frame into file - WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame); + WebRtc_Word32 FrameToRender(webrtc::I420VideoFrame& videoFrame); WebRtc_Word32 DecodedBytes(); private: FILE* _decodedFile; diff --git a/webrtc/modules/video_coding/main/test/video_rtp_play.cc b/webrtc/modules/video_coding/main/test/video_rtp_play.cc index 2b7f800ef..8a45f392a 100644 --- a/webrtc/modules/video_coding/main/test/video_rtp_play.cc +++ b/webrtc/modules/video_coding/main/test/video_rtp_play.cc @@ -8,6 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "receiver_tests.h" #include "video_coding.h" #include "rtp_rtcp.h" @@ -45,7 +46,7 @@ FrameReceiveCallback::~FrameReceiveCallback() } WebRtc_Word32 -FrameReceiveCallback::FrameToRender(VideoFrame& videoFrame) +FrameReceiveCallback::FrameToRender(I420VideoFrame& videoFrame) { if (_timingFile == NULL) { @@ -56,15 +57,16 @@ FrameReceiveCallback::FrameToRender(VideoFrame& videoFrame) return -1; } } - if (_outFile == NULL || videoFrame.Width() != width_ || - videoFrame.Height() != height_) + if (_outFile == NULL || + videoFrame.width() != static_cast(width_) || + videoFrame.height() != static_cast(height_)) { if (_outFile) { fclose(_outFile); } - printf("New size: %ux%u\n", videoFrame.Width(), videoFrame.Height()); - width_ = videoFrame.Width(); - height_ = videoFrame.Height(); + printf("New size: %ux%u\n", videoFrame.width(), videoFrame.height()); + width_ = videoFrame.width(); + height_ = videoFrame.height(); std::string filename_with_width_height = AppendWidthAndHeight( _outFilename, width_, height_); _outFile = fopen(filename_with_width_height.c_str(), "wb"); @@ -74,10 +76,9 @@ FrameReceiveCallback::FrameToRender(VideoFrame& videoFrame) } } fprintf(_timingFile, "%u, %u\n", - videoFrame.TimeStamp(), - MaskWord64ToUWord32(videoFrame.RenderTimeMs())); - if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(), - _outFile) != videoFrame.Length()) { + videoFrame.timestamp(), + MaskWord64ToUWord32(videoFrame.render_time_ms())); + if (PrintI420VideoFrame(videoFrame, _outFile) < 0) { return -1; } return 0; diff --git a/webrtc/modules/video_processing/main/interface/video_processing.h b/webrtc/modules/video_processing/main/interface/video_processing.h index 8008b035d..fbee00d93 100644 --- a/webrtc/modules/video_processing/main/interface/video_processing.h +++ b/webrtc/modules/video_processing/main/interface/video_processing.h @@ -18,6 +18,7 @@ #ifndef WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_H #define WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_H +#include "common_video/interface/i420_video_frame.h" #include "module.h" #include "module_common_types.h" #include "video_processing_defines.h" @@ -120,7 +121,7 @@ public: \return 0 on success, -1 on failure. */ static WebRtc_Word32 GetFrameStats(FrameStats* stats, - const VideoFrame& frame); + const I420VideoFrame& frame); /** Checks the validity of a FrameStats struct. Currently, valid implies only @@ -148,7 +149,7 @@ public: \param[in,out] frame Pointer to the video frame. */ - static WebRtc_Word32 ColorEnhancement(VideoFrame* frame); + static WebRtc_Word32 ColorEnhancement(I420VideoFrame* frame); /** Increases/decreases the luminance value. @@ -162,7 +163,7 @@ public: \return 0 on success, -1 on failure. */ - static WebRtc_Word32 Brighten(VideoFrame* frame, int delta); + static WebRtc_Word32 Brighten(I420VideoFrame* frame, int delta); /** Detects and removes camera flicker from a video stream. Every frame from @@ -179,7 +180,7 @@ public: \return 0 on success, -1 on failure. */ - virtual WebRtc_Word32 Deflickering(VideoFrame* frame, + virtual WebRtc_Word32 Deflickering(I420VideoFrame* frame, FrameStats* stats) = 0; /** @@ -191,7 +192,7 @@ public: \return The number of modified pixels on success, -1 on failure. */ - virtual WebRtc_Word32 Denoising(VideoFrame* frame) = 0; + virtual WebRtc_Word32 Denoising(I420VideoFrame* frame) = 0; /** Detects if a video frame is excessively bright or dark. Returns a @@ -206,7 +207,7 @@ public: \return A member of BrightnessWarning on success, -1 on error */ - virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame, + virtual WebRtc_Word32 BrightnessDetection(const I420VideoFrame& frame, const FrameStats& stats) = 0; /** @@ -283,8 +284,8 @@ public: \return VPM_OK on success, a negative value on error (see error codes) */ - virtual WebRtc_Word32 PreprocessFrame(const VideoFrame& frame, - VideoFrame** processedFrame) = 0; + virtual WebRtc_Word32 PreprocessFrame(const I420VideoFrame& frame, + I420VideoFrame** processedFrame) = 0; /** Return content metrics for the last processed frame diff --git a/webrtc/modules/video_processing/main/source/brighten.cc b/webrtc/modules/video_processing/main/source/brighten.cc index 68b0c2523..4c356e217 100644 --- a/webrtc/modules/video_processing/main/source/brighten.cc +++ b/webrtc/modules/video_processing/main/source/brighten.cc @@ -17,21 +17,21 @@ namespace webrtc { namespace VideoProcessing { -WebRtc_Word32 Brighten(VideoFrame* frame, int delta) { +WebRtc_Word32 Brighten(I420VideoFrame* frame, int delta) { assert(frame); - if (frame->Buffer() == NULL) { + if (frame->IsZeroSize()) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, - "Null frame pointer"); + "zero size frame"); return VPM_PARAMETER_ERROR; } - if (frame->Width() <= 0 || frame->Height() <= 0) { + if (frame->width() <= 0 || frame->height() <= 0) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Invalid frame size"); return VPM_PARAMETER_ERROR; } - int numPixels = frame->Width() * frame->Height(); + int numPixels = frame->width() * frame->height(); int lookUp[256]; for (int i = 0; i < 256; i++) { @@ -39,7 +39,7 @@ WebRtc_Word32 Brighten(VideoFrame* frame, int delta) { lookUp[i] = ((((val < 0) ? 0 : val) > 255) ? 255 : val); } - WebRtc_UWord8* tempPtr = frame->Buffer(); + WebRtc_UWord8* tempPtr = frame->buffer(kYPlane); for (int i = 0; i < numPixels; i++) { *tempPtr = static_cast(lookUp[*tempPtr]); diff --git a/webrtc/modules/video_processing/main/source/brighten.h b/webrtc/modules/video_processing/main/source/brighten.h index 319cc6f58..2347286f6 100644 --- a/webrtc/modules/video_processing/main/source/brighten.h +++ b/webrtc/modules/video_processing/main/source/brighten.h @@ -17,7 +17,7 @@ namespace webrtc { namespace VideoProcessing { -WebRtc_Word32 Brighten(VideoFrame* frame, int delta); +WebRtc_Word32 Brighten(I420VideoFrame* frame, int delta); } // namespace VideoProcessing } // namespace webrtc diff --git a/webrtc/modules/video_processing/main/source/brightness_detection.cc b/webrtc/modules/video_processing/main/source/brightness_detection.cc index 07ca7e7c8..a6d9c3d2d 100644 --- a/webrtc/modules/video_processing/main/source/brightness_detection.cc +++ b/webrtc/modules/video_processing/main/source/brightness_detection.cc @@ -41,25 +41,18 @@ VPMBrightnessDetection::Reset() } WebRtc_Word32 -VPMBrightnessDetection::ProcessFrame(const VideoFrame& frame, +VPMBrightnessDetection::ProcessFrame(const I420VideoFrame& frame, const VideoProcessingModule::FrameStats& stats) { - if (frame.Buffer() == NULL) + if (frame.IsZeroSize()) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer"); return VPM_PARAMETER_ERROR; } - int width = frame.Width(); - int height = frame.Height(); - - if (width == 0 || height == 0) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, - "Invalid frame size"); - return VPM_PARAMETER_ERROR; - } + int width = frame.width(); + int height = frame.height(); if (!VideoProcessingModule::ValidFrameStats(stats)) { @@ -93,11 +86,11 @@ VPMBrightnessDetection::ProcessFrame(const VideoFrame& frame, if (stats.mean < 90 || stats.mean > 170) { // Standard deviation of Y + const uint8_t* buffer = frame.buffer(kYPlane); float stdY = 0; - uint8_t* buffer = frame.Buffer(); for (int h = 0; h < height; h += (1 << stats.subSamplHeight)) { - WebRtc_UWord32 row = h*width; + int row = h*width; for (int w = 0; w < width; w += (1 << stats.subSamplWidth)) { stdY += (buffer[w + row] - stats.mean) * (buffer[w + row] - diff --git a/webrtc/modules/video_processing/main/source/brightness_detection.h b/webrtc/modules/video_processing/main/source/brightness_detection.h index 63f481695..3ea41189f 100644 --- a/webrtc/modules/video_processing/main/source/brightness_detection.h +++ b/webrtc/modules/video_processing/main/source/brightness_detection.h @@ -29,7 +29,7 @@ public: void Reset(); - WebRtc_Word32 ProcessFrame(const VideoFrame& frame, + WebRtc_Word32 ProcessFrame(const I420VideoFrame& frame, const VideoProcessingModule::FrameStats& stats); private: diff --git a/webrtc/modules/video_processing/main/source/color_enhancement.cc b/webrtc/modules/video_processing/main/source/color_enhancement.cc index 2cc39d329..a2feb98fd 100644 --- a/webrtc/modules/video_processing/main/source/color_enhancement.cc +++ b/webrtc/modules/video_processing/main/source/color_enhancement.cc @@ -18,38 +18,35 @@ namespace webrtc { namespace VideoProcessing { WebRtc_Word32 - ColorEnhancement(VideoFrame* frame) + ColorEnhancement(I420VideoFrame* frame) { assert(frame); // pointers to U and V color pixels WebRtc_UWord8* ptrU; WebRtc_UWord8* ptrV; WebRtc_UWord8 tempChroma; - const unsigned int size_y = frame->Width() * frame->Height(); - const unsigned int size_uv = ((frame->Width() + 1) / 2) * - ((frame->Height() + 1 ) / 2); - - if (frame->Buffer() == NULL) + if (frame->IsZeroSize()) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Null frame pointer"); return VPM_GENERAL_ERROR; } - if (frame->Width() == 0 || frame->Height() == 0) + if (frame->width() == 0 || frame->height() == 0) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Invalid frame size"); return VPM_GENERAL_ERROR; } - + // set pointers to first U and V pixels (skip luminance) - ptrU = frame->Buffer() + size_y; - ptrV = ptrU + size_uv; + ptrU = frame->buffer(kUPlane); + ptrV = frame->buffer(kVPlane); + int size_uv = ((frame->width() + 1) / 2) * ((frame->height() + 1) / 2); // loop through all chrominance pixels and modify color - for (unsigned int ix = 0; ix < size_uv; ix++) + for (int ix = 0; ix < size_uv; ix++) { tempChroma = colorTable[*ptrU][*ptrV]; *ptrV = colorTable[*ptrV][*ptrU]; diff --git a/webrtc/modules/video_processing/main/source/color_enhancement.h b/webrtc/modules/video_processing/main/source/color_enhancement.h index 338465762..67ba1727d 100644 --- a/webrtc/modules/video_processing/main/source/color_enhancement.h +++ b/webrtc/modules/video_processing/main/source/color_enhancement.h @@ -21,7 +21,7 @@ namespace webrtc { namespace VideoProcessing { - WebRtc_Word32 ColorEnhancement(VideoFrame* frame); + WebRtc_Word32 ColorEnhancement(I420VideoFrame* frame); } } //namespace diff --git a/webrtc/modules/video_processing/main/source/content_analysis.cc b/webrtc/modules/video_processing/main/source/content_analysis.cc index 77cdcd42d..18fc4a048 100644 --- a/webrtc/modules/video_processing/main/source/content_analysis.cc +++ b/webrtc/modules/video_processing/main/source/content_analysis.cc @@ -56,25 +56,23 @@ VPMContentAnalysis::~VPMContentAnalysis() VideoContentMetrics* -VPMContentAnalysis::ComputeContentMetrics(const VideoFrame& inputFrame) +VPMContentAnalysis::ComputeContentMetrics(const I420VideoFrame& inputFrame) { - if (inputFrame.Buffer() == NULL) + if (inputFrame.IsZeroSize()) { return NULL; } // Init if needed (native dimension change) - if (_width != static_cast(inputFrame.Width()) || - _height != static_cast(inputFrame.Height())) + if (_width != inputFrame.width() || _height != inputFrame.height()) { - if (VPM_OK != Initialize(static_cast(inputFrame.Width()), - static_cast(inputFrame.Height()))) + if (VPM_OK != Initialize(inputFrame.width(), inputFrame.height())) { return NULL; } } - - _origFrame = inputFrame.Buffer(); + // Only interested in the Y plane. + _origFrame = inputFrame.buffer(kYPlane); // compute spatial metrics: 3 spatial prediction errors (this->*ComputeSpatialMetrics)(); diff --git a/webrtc/modules/video_processing/main/source/content_analysis.h b/webrtc/modules/video_processing/main/source/content_analysis.h index 6724af516..385674cb7 100644 --- a/webrtc/modules/video_processing/main/source/content_analysis.h +++ b/webrtc/modules/video_processing/main/source/content_analysis.h @@ -11,6 +11,7 @@ #ifndef VPM_CONTENT_ANALYSIS_H #define VPM_CONTENT_ANALYSIS_H +#include "common_video/interface/i420_video_frame.h" #include "typedefs.h" #include "module_common_types.h" #include "video_processing_defines.h" @@ -35,7 +36,8 @@ public: // Input: new frame // Return value: pointer to structure containing content Analysis // metrics or NULL value upon error - VideoContentMetrics* ComputeContentMetrics(const VideoFrame& inputFrame); + VideoContentMetrics* ComputeContentMetrics(const I420VideoFrame& + inputFrame); // Release all allocated memory // Output: 0 if OK, negative value upon error diff --git a/webrtc/modules/video_processing/main/source/deflickering.cc b/webrtc/modules/video_processing/main/source/deflickering.cc index f17dc8d88..aaf30d7bb 100644 --- a/webrtc/modules/video_processing/main/source/deflickering.cc +++ b/webrtc/modules/video_processing/main/source/deflickering.cc @@ -89,7 +89,7 @@ VPMDeflickering::Reset() } WebRtc_Word32 -VPMDeflickering::ProcessFrame(VideoFrame* frame, +VPMDeflickering::ProcessFrame(I420VideoFrame* frame, VideoProcessingModule::FrameStats* stats) { assert(frame); @@ -103,10 +103,10 @@ VPMDeflickering::ProcessFrame(VideoFrame* frame, WebRtc_UWord16 tmpUW16; WebRtc_UWord32 tmpUW32; - int width = frame->Width(); - int height = frame->Height(); + int width = frame->width(); + int height = frame->height(); - if (frame->Buffer() == NULL) + if (frame->IsZeroSize()) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer"); @@ -114,7 +114,7 @@ VPMDeflickering::ProcessFrame(VideoFrame* frame, } // Stricter height check due to subsampling size calculation below. - if (width == 0 || height < 2) + if (height < 2) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size"); @@ -128,7 +128,7 @@ VPMDeflickering::ProcessFrame(VideoFrame* frame, return VPM_GENERAL_ERROR; } - if (PreDetection(frame->TimeStamp(), *stats) == -1) + if (PreDetection(frame->timestamp(), *stats) == -1) { return VPM_GENERAL_ERROR; } @@ -154,7 +154,7 @@ VPMDeflickering::ProcessFrame(VideoFrame* frame, for (int i = 0; i < height; i += kDownsamplingFactor) { memcpy(ySorted + sortRowIdx * width, - frame->Buffer() + i * width, width); + frame->buffer(kYPlane) + i * width, width); sortRowIdx++; } @@ -258,7 +258,7 @@ VPMDeflickering::ProcessFrame(VideoFrame* frame, } // Map to the output frame. - uint8_t* buffer = frame->Buffer(); + uint8_t* buffer = frame->buffer(kYPlane); for (WebRtc_UWord32 i = 0; i < ySize; i++) { buffer[i] = mapUW8[buffer[i]]; diff --git a/webrtc/modules/video_processing/main/source/deflickering.h b/webrtc/modules/video_processing/main/source/deflickering.h index dfe7d9d19..fc796ee40 100644 --- a/webrtc/modules/video_processing/main/source/deflickering.h +++ b/webrtc/modules/video_processing/main/source/deflickering.h @@ -32,7 +32,7 @@ public: void Reset(); - WebRtc_Word32 ProcessFrame(VideoFrame* frame, + WebRtc_Word32 ProcessFrame(I420VideoFrame* frame, VideoProcessingModule::FrameStats* stats); private: WebRtc_Word32 PreDetection(WebRtc_UWord32 timestamp, diff --git a/webrtc/modules/video_processing/main/source/denoising.cc b/webrtc/modules/video_processing/main/source/denoising.cc index 33608a092..f7cc32ad6 100644 --- a/webrtc/modules/video_processing/main/source/denoising.cc +++ b/webrtc/modules/video_processing/main/source/denoising.cc @@ -72,7 +72,7 @@ VPMDenoising::Reset() } WebRtc_Word32 -VPMDenoising::ProcessFrame(VideoFrame* frame) +VPMDenoising::ProcessFrame(I420VideoFrame* frame) { assert(frame); WebRtc_Word32 thevar; @@ -84,21 +84,15 @@ VPMDenoising::ProcessFrame(VideoFrame* frame) WebRtc_UWord32 tmp; WebRtc_Word32 numPixelsChanged = 0; - if (frame->Buffer() == NULL) + if (frame->IsZeroSize()) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, - "Null frame pointer"); + "zero size frame"); return VPM_GENERAL_ERROR; } - int width = frame->Width(); - int height = frame->Height(); - if (width == 0 || height == 0) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, - "Invalid frame size"); - return VPM_GENERAL_ERROR; - } + int width = frame->width(); + int height = frame->height(); /* Size of luminance component */ const WebRtc_UWord32 ysize = height * width; @@ -127,7 +121,7 @@ VPMDenoising::ProcessFrame(VideoFrame* frame) } /* Apply de-noising on each pixel, but update variance sub-sampled */ - uint8_t* buffer = frame->Buffer(); + uint8_t* buffer = frame->buffer(kYPlane); for (int i = 0; i < height; i++) { // Collect over height k = i * width; diff --git a/webrtc/modules/video_processing/main/source/denoising.h b/webrtc/modules/video_processing/main/source/denoising.h index eed772f5b..18ed1afa1 100644 --- a/webrtc/modules/video_processing/main/source/denoising.h +++ b/webrtc/modules/video_processing/main/source/denoising.h @@ -29,7 +29,7 @@ public: void Reset(); - WebRtc_Word32 ProcessFrame(VideoFrame* frame); + WebRtc_Word32 ProcessFrame(I420VideoFrame* frame); private: WebRtc_Word32 _id; diff --git a/webrtc/modules/video_processing/main/source/frame_preprocessor.cc b/webrtc/modules/video_processing/main/source/frame_preprocessor.cc index d9609f930..588054165 100644 --- a/webrtc/modules/video_processing/main/source/frame_preprocessor.cc +++ b/webrtc/modules/video_processing/main/source/frame_preprocessor.cc @@ -32,7 +32,6 @@ VPMFramePreprocessor::~VPMFramePreprocessor() delete _spatialResampler; delete _ca; delete _vd; - _resampledFrame.Free(); // is this needed? } WebRtc_Word32 @@ -136,10 +135,10 @@ VPMFramePreprocessor::DecimatedHeight() const WebRtc_Word32 -VPMFramePreprocessor::PreprocessFrame(const VideoFrame& frame, - VideoFrame** processedFrame) +VPMFramePreprocessor::PreprocessFrame(const I420VideoFrame& frame, + I420VideoFrame** processedFrame) { - if (frame.Buffer() == NULL || frame.Height() == 0 || frame.Width() == 0) + if (frame.IsZeroSize()) { return VPM_PARAMETER_ERROR; } @@ -157,9 +156,9 @@ VPMFramePreprocessor::PreprocessFrame(const VideoFrame& frame, // Note that we must make a copy of it. // We are not allowed to resample the input frame. *processedFrame = NULL; - if (_spatialResampler->ApplyResample(frame.Width(), frame.Height())) { + if (_spatialResampler->ApplyResample(frame.width(), frame.height())) { WebRtc_Word32 ret = _spatialResampler->ResampleFrame(frame, - _resampledFrame); + &_resampledFrame); if (ret != VPM_OK) return ret; *processedFrame = &_resampledFrame; diff --git a/webrtc/modules/video_processing/main/source/frame_preprocessor.h b/webrtc/modules/video_processing/main/source/frame_preprocessor.h index f85d5c0c2..7fd8a5286 100644 --- a/webrtc/modules/video_processing/main/source/frame_preprocessor.h +++ b/webrtc/modules/video_processing/main/source/frame_preprocessor.h @@ -62,8 +62,8 @@ public: WebRtc_UWord32 DecimatedHeight() const; //Preprocess output: - WebRtc_Word32 PreprocessFrame(const VideoFrame& frame, - VideoFrame** processedFrame); + WebRtc_Word32 PreprocessFrame(const I420VideoFrame& frame, + I420VideoFrame** processedFrame); VideoContentMetrics* ContentMetrics() const; private: @@ -74,7 +74,7 @@ private: WebRtc_Word32 _id; VideoContentMetrics* _contentMetrics; WebRtc_UWord32 _maxFrameRate; - VideoFrame _resampledFrame; + I420VideoFrame _resampledFrame; VPMSpatialResampler* _spatialResampler; VPMContentAnalysis* _ca; VPMVideoDecimator* _vd; diff --git a/webrtc/modules/video_processing/main/source/spatial_resampler.cc b/webrtc/modules/video_processing/main/source/spatial_resampler.cc index 0d7c2e592..dc1ff44db 100644 --- a/webrtc/modules/video_processing/main/source/spatial_resampler.cc +++ b/webrtc/modules/video_processing/main/source/spatial_resampler.cc @@ -62,32 +62,32 @@ VPMSimpleSpatialResampler::Reset() } WebRtc_Word32 -VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame, - VideoFrame& outFrame) +VPMSimpleSpatialResampler::ResampleFrame(const I420VideoFrame& inFrame, + I420VideoFrame* outFrame) { if (_resamplingMode == kNoRescaling) - return outFrame.CopyFrame(inFrame); + return outFrame->CopyFrame(inFrame); // Check if re-sampling is needed - if ((inFrame.Width() == (WebRtc_UWord32)_targetWidth) && - (inFrame.Height() == (WebRtc_UWord32)_targetHeight)) { - return outFrame.CopyFrame(inFrame); + if ((inFrame.width() == _targetWidth) && + (inFrame.height() == _targetHeight)) { + return outFrame->CopyFrame(inFrame); } // Setting scaler // TODO(mikhal/marpan): Should we allow for setting the filter mode in // _scale.Set() with |_resamplingMode|? int retVal = 0; - retVal = _scaler.Set(inFrame.Width(), inFrame.Height(), + retVal = _scaler.Set(inFrame.width(), inFrame.height(), _targetWidth, _targetHeight, kI420, kI420, kScaleBox); if (retVal < 0) return retVal; // Setting time parameters to the output frame - all the rest will be // set by the scaler. - outFrame.SetTimeStamp(inFrame.TimeStamp()); - outFrame.SetRenderTime(inFrame.RenderTimeMs()); + outFrame->set_timestamp(inFrame.timestamp()); + outFrame->set_render_time_ms(inFrame.render_time_ms()); - retVal = _scaler.Scale(inFrame, &outFrame); + retVal = _scaler.Scale(inFrame, outFrame); if (retVal == 0) return VPM_OK; else diff --git a/webrtc/modules/video_processing/main/source/spatial_resampler.h b/webrtc/modules/video_processing/main/source/spatial_resampler.h index 28a5a6c86..55dd817fc 100644 --- a/webrtc/modules/video_processing/main/source/spatial_resampler.h +++ b/webrtc/modules/video_processing/main/source/spatial_resampler.h @@ -34,8 +34,8 @@ public: virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode) = 0; virtual void Reset() = 0; - virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame, - VideoFrame& outFrame) = 0; + virtual WebRtc_Word32 ResampleFrame(const I420VideoFrame& inFrame, + I420VideoFrame* outFrame) = 0; virtual WebRtc_Word32 TargetWidth() = 0; virtual WebRtc_Word32 TargetHeight() = 0; virtual bool ApplyResample(WebRtc_Word32 width, WebRtc_Word32 height) = 0; @@ -50,8 +50,8 @@ public: WebRtc_Word32 height); virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode); virtual void Reset(); - virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame, - VideoFrame& outFrame); + virtual WebRtc_Word32 ResampleFrame(const I420VideoFrame& inFrame, + I420VideoFrame* outFrame); virtual WebRtc_Word32 TargetWidth(); virtual WebRtc_Word32 TargetHeight(); virtual bool ApplyResample(WebRtc_Word32 width, WebRtc_Word32 height); diff --git a/webrtc/modules/video_processing/main/source/video_processing_impl.cc b/webrtc/modules/video_processing/main/source/video_processing_impl.cc index 346f65568..35ca81815 100644 --- a/webrtc/modules/video_processing/main/source/video_processing_impl.cc +++ b/webrtc/modules/video_processing/main/source/video_processing_impl.cc @@ -8,6 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ + #include "video_processing_impl.h" #include "critical_section_wrapper.h" #include "trace.h" @@ -115,29 +116,22 @@ VideoProcessingModuleImpl::Reset() WebRtc_Word32 VideoProcessingModule::GetFrameStats(FrameStats* stats, - const VideoFrame& frame) + const I420VideoFrame& frame) { - if (frame.Buffer() == NULL) + if (frame.IsZeroSize()) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, - "Null frame pointer"); + "zero size frame"); return VPM_PARAMETER_ERROR; } - int width = frame.Width(); - int height = frame.Height(); - - if (width == 0 || height == 0) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, - "Invalid frame size"); - return VPM_PARAMETER_ERROR; - } + int width = frame.width(); + int height = frame.height(); ClearFrameStats(stats); // The histogram needs to be zeroed out. SetSubSampling(stats, width, height); - uint8_t* buffer = frame.Buffer(); + const uint8_t* buffer = frame.buffer(kYPlane); // Compute histogram and sum of frame for (int i = 0; i < height; i += (1 << stats->subSamplHeight)) { @@ -182,33 +176,34 @@ VideoProcessingModule::ClearFrameStats(FrameStats* stats) } WebRtc_Word32 -VideoProcessingModule::ColorEnhancement(VideoFrame* frame) +VideoProcessingModule::ColorEnhancement(I420VideoFrame* frame) { return VideoProcessing::ColorEnhancement(frame); } WebRtc_Word32 -VideoProcessingModule::Brighten(VideoFrame* frame, int delta) +VideoProcessingModule::Brighten(I420VideoFrame* frame, int delta) { return VideoProcessing::Brighten(frame, delta); } WebRtc_Word32 -VideoProcessingModuleImpl::Deflickering(VideoFrame* frame, FrameStats* stats) +VideoProcessingModuleImpl::Deflickering(I420VideoFrame* frame, + FrameStats* stats) { CriticalSectionScoped mutex(&_mutex); return _deflickering.ProcessFrame(frame, stats); } WebRtc_Word32 -VideoProcessingModuleImpl::Denoising(VideoFrame* frame) +VideoProcessingModuleImpl::Denoising(I420VideoFrame* frame) { CriticalSectionScoped mutex(&_mutex); return _denoising.ProcessFrame(frame); } WebRtc_Word32 -VideoProcessingModuleImpl::BrightnessDetection(const VideoFrame& frame, +VideoProcessingModuleImpl::BrightnessDetection(const I420VideoFrame& frame, const FrameStats& stats) { CriticalSectionScoped mutex(&_mutex); @@ -273,8 +268,8 @@ VideoProcessingModuleImpl::DecimatedHeight() const } WebRtc_Word32 -VideoProcessingModuleImpl::PreprocessFrame(const VideoFrame& frame, - VideoFrame **processedFrame) +VideoProcessingModuleImpl::PreprocessFrame(const I420VideoFrame& frame, + I420VideoFrame **processedFrame) { CriticalSectionScoped mutex(&_mutex); return _framePreProcessor.PreprocessFrame(frame, processedFrame); diff --git a/webrtc/modules/video_processing/main/source/video_processing_impl.h b/webrtc/modules/video_processing/main/source/video_processing_impl.h index 43c4318a7..b2c12c343 100644 --- a/webrtc/modules/video_processing/main/source/video_processing_impl.h +++ b/webrtc/modules/video_processing/main/source/video_processing_impl.h @@ -36,12 +36,12 @@ public: virtual void Reset(); - virtual WebRtc_Word32 Deflickering(VideoFrame* frame, + virtual WebRtc_Word32 Deflickering(I420VideoFrame* frame, FrameStats* stats); - virtual WebRtc_Word32 Denoising(VideoFrame* frame); + virtual WebRtc_Word32 Denoising(I420VideoFrame* frame); - virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame, + virtual WebRtc_Word32 BrightnessDetection(const I420VideoFrame& frame, const FrameStats& stats); //Frame pre-processor functions @@ -72,8 +72,8 @@ public: // Pre-process incoming frame: Sample when needed and compute content // metrics when enabled. // If no resampling takes place - processedFrame is set to NULL. - virtual WebRtc_Word32 PreprocessFrame(const VideoFrame& frame, - VideoFrame** processedFrame); + virtual WebRtc_Word32 PreprocessFrame(const I420VideoFrame& frame, + I420VideoFrame** processedFrame); virtual VideoContentMetrics* ContentMetrics() const; private: diff --git a/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc b/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc index e8e688336..ffb1c41ea 100644 --- a/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc +++ b/webrtc/modules/video_processing/main/test/unit_test/brightness_detection_test.cc @@ -18,9 +18,16 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection) WebRtc_UWord32 frameNum = 0; WebRtc_Word32 brightnessWarning = 0; WebRtc_UWord32 warningCount = 0; - while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == - _frameLength) + scoped_array video_buffer(new uint8_t[_frame_length]); + while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) == + _frame_length) { + _videoFrame.CreateFrame(_size_y, video_buffer.get(), + _size_uv, video_buffer.get() + _size_y, + _size_uv, video_buffer.get() + _size_y + + _size_uv, + _width, _height, + _width, _half_width, _half_width); frameNum++; VideoProcessingModule::FrameStats stats; ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame)); @@ -42,15 +49,21 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection) rewind(_sourceFile); frameNum = 0; warningCount = 0; - while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == - _frameLength && + while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) == + _frame_length && frameNum < 300) { + _videoFrame.CreateFrame(_size_y, video_buffer.get(), + _size_uv, video_buffer.get() + _size_y, + _size_uv, video_buffer.get() + _size_y + + _size_uv, + _width, _height, + _width, _half_width, _half_width); frameNum++; - WebRtc_UWord8* frame = _videoFrame.Buffer(); + WebRtc_UWord8* frame = _videoFrame.buffer(kYPlane); WebRtc_UWord32 yTmp = 0; - for (WebRtc_UWord32 yIdx = 0; yIdx < _width * _height; yIdx++) + for (int yIdx = 0; yIdx < _width * _height; yIdx++) { yTmp = frame[yIdx] << 1; if (yTmp > 255) @@ -80,17 +93,23 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection) rewind(_sourceFile); frameNum = 0; warningCount = 0; - while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength && - frameNum < 300) + while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) == + _frame_length && frameNum < 300) { + _videoFrame.CreateFrame(_size_y, video_buffer.get(), + _size_uv, video_buffer.get() + _size_y, + _size_uv, video_buffer.get() + _size_y + + _size_uv, + _width, _height, + _width, _half_width, _half_width); frameNum++; - WebRtc_UWord8* frame = _videoFrame.Buffer(); + WebRtc_UWord8* y_plane = _videoFrame.buffer(kYPlane); WebRtc_Word32 yTmp = 0; - for (WebRtc_UWord32 yIdx = 0; yIdx < _width * _height; yIdx++) + for (int yIdx = 0; yIdx < _width * _height; yIdx++) { - yTmp = frame[yIdx] >> 1; - frame[yIdx] = static_cast(yTmp); + yTmp = y_plane[yIdx] >> 1; + y_plane[yIdx] = static_cast(yTmp); } VideoProcessingModule::FrameStats stats; diff --git a/webrtc/modules/video_processing/main/test/unit_test/color_enhancement_test.cc b/webrtc/modules/video_processing/main/test/unit_test/color_enhancement_test.cc index 68bf43ecf..85029fea4 100644 --- a/webrtc/modules/video_processing/main/test/unit_test/color_enhancement_test.cc +++ b/webrtc/modules/video_processing/main/test/unit_test/color_enhancement_test.cc @@ -39,15 +39,22 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement) ASSERT_TRUE(modFile != NULL) << "Could not open output file.\n"; WebRtc_UWord32 frameNum = 0; - while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength) + scoped_array video_buffer(new uint8_t[_frame_length]); + while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) == + _frame_length) { + _videoFrame.CreateFrame(_size_y, video_buffer.get(), + _size_uv, video_buffer.get() + _size_y, + _size_uv, video_buffer.get() + _size_y + + _size_uv, + _width, _height, + _width, _half_width, _half_width); frameNum++; t0 = TickTime::Now(); ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&_videoFrame)); t1 = TickTime::Now(); accTicks += t1 - t0; - if (fwrite(_videoFrame.Buffer(), 1, _frameLength, - modFile) != _frameLength) { + if (PrintI420VideoFrame(_videoFrame, modFile) < 0) { return; } } @@ -76,44 +83,70 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement) rewind(modFile); ASSERT_EQ(refLen, testLen) << "File lengths differ."; - VideoFrame refVideoFrame; - refVideoFrame.VerifyAndAllocate(_frameLength); - refVideoFrame.SetWidth(_width); - refVideoFrame.SetHeight(_height); + I420VideoFrame refVideoFrame; // Compare frame-by-frame. - while (fread(_videoFrame.Buffer(), 1, _frameLength, modFile) == _frameLength) + scoped_array ref_buffer(new uint8_t[_frame_length]); + while (fread(video_buffer.get(), 1, _frame_length, modFile) == + _frame_length) { - ASSERT_EQ(_frameLength, fread(refVideoFrame.Buffer(), 1, _frameLength, refFile)); - EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), refVideoFrame.Buffer(), _frameLength)); + _videoFrame.CreateFrame(_size_y, video_buffer.get(), + _size_uv, video_buffer.get() + _size_y, + _size_uv, video_buffer.get() + _size_y + + _size_uv, + _width, _height, + _width, _half_width, _half_width); + ASSERT_EQ(_frame_length, fread(ref_buffer.get(), 1, _frame_length, + refFile)); + refVideoFrame.CreateFrame(_size_y, ref_buffer.get(), + _size_uv, ref_buffer.get() + _size_y, + _size_uv, ref_buffer.get() + _size_y + + _size_uv, + _width, _height, + _width, _half_width, _half_width); + EXPECT_EQ(0, memcmp(_videoFrame.buffer(kYPlane), + refVideoFrame.buffer(kYPlane), + _size_y)); + EXPECT_EQ(0, memcmp(_videoFrame.buffer(kUPlane), + refVideoFrame.buffer(kUPlane), + _size_uv)); + EXPECT_EQ(0, memcmp(_videoFrame.buffer(kVPlane), + refVideoFrame.buffer(kVPlane), + _size_uv)); } ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file"; // Verify that all color pixels are enhanced, and no luminance values are // altered. - WebRtc_UWord8 *testFrame = new WebRtc_UWord8[_frameLength]; + scoped_array testFrame(new WebRtc_UWord8[_frame_length]); // Use value 128 as probe value, since we know that this will be changed // in the enhancement. - memset(testFrame, 128, _frameLength); + memset(testFrame.get(), 128, _frame_length); + + I420VideoFrame testVideoFrame; + testVideoFrame.CreateFrame(_size_y, testFrame.get(), + _size_uv, testFrame.get() + _size_y, + _size_uv, testFrame.get() + _size_y + _size_uv, + _width, _height, + _width, _half_width, _half_width); - VideoFrame testVideoFrame; - testVideoFrame.CopyFrame(_frameLength, testFrame); - testVideoFrame.SetWidth(_width); - testVideoFrame.SetHeight(_height); ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testVideoFrame)); - EXPECT_EQ(0, memcmp(testVideoFrame.Buffer(), testFrame, _width * _height)) + EXPECT_EQ(0, memcmp(testVideoFrame.buffer(kYPlane), testFrame.get(), + _size_y)) << "Function is modifying the luminance."; - EXPECT_NE(0, memcmp(testVideoFrame.Buffer() + _width * _height, - &testFrame[_width * _height], _width * _height / 2)) << - "Function is not modifying all chrominance pixels"; + EXPECT_NE(0, memcmp(testVideoFrame.buffer(kUPlane), + testFrame.get() + _size_y, _size_uv)) << + "Function is not modifying all chrominance pixels"; + EXPECT_NE(0, memcmp(testVideoFrame.buffer(kVPlane), + testFrame.get() + _size_y + _size_uv, _size_uv)) << + "Function is not modifying all chrominance pixels"; ASSERT_EQ(0, fclose(refFile)); ASSERT_EQ(0, fclose(modFile)); - delete [] testFrame; } } // namespace webrtc diff --git a/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc b/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc index 0247e9954..cf784c74e 100644 --- a/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc +++ b/webrtc/modules/video_processing/main/test/unit_test/content_metrics_test.cc @@ -23,9 +23,16 @@ TEST_F(VideoProcessingModuleTest, ContentAnalysis) _ca_c.Initialize(_width,_height); _ca_sse.Initialize(_width,_height); - while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) - == _frameLength) + scoped_array video_buffer(new uint8_t[_frame_length]); + while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) + == _frame_length) { + _videoFrame.CreateFrame(_size_y, video_buffer.get(), + _size_uv, video_buffer.get() + _size_y, + _size_uv, video_buffer.get() + _size_y + + _size_uv, + _width, _height, + _width, _half_width, _half_width); _cM_c = _ca_c.ComputeContentMetrics(_videoFrame); _cM_SSE = _ca_sse.ComputeContentMetrics(_videoFrame); diff --git a/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc b/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc index 7119bdb87..bf284fcb6 100644 --- a/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc +++ b/webrtc/modules/video_processing/main/test/unit_test/deflickering_test.cc @@ -11,6 +11,7 @@ #include #include +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/video_processing/main/interface/video_processing.h" #include "modules/video_processing/main/test/unit_test/unit_test.h" #include "system_wrappers/interface/tick_util.h" @@ -42,6 +43,7 @@ TEST_F(VideoProcessingModuleTest, Deflickering) "Could not open output file: " << output_file << "\n"; printf("\nRun time [us / frame]:\n"); + scoped_array video_buffer(new uint8_t[_frame_length]); for (WebRtc_UWord32 runIdx = 0; runIdx < NumRuns; runIdx++) { TickTime t0; @@ -50,10 +52,17 @@ TEST_F(VideoProcessingModuleTest, Deflickering) WebRtc_UWord32 timeStamp = 1; frameNum = 0; - while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength) + while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) == + _frame_length) { frameNum++; - _videoFrame.SetTimeStamp(timeStamp); + _videoFrame.CreateFrame(_size_y, video_buffer.get(), + _size_uv, video_buffer.get() + _size_y, + _size_uv, video_buffer.get() + _size_y + + _size_uv, + _width, _height, + _width, _half_width, _half_width); + _videoFrame.set_timestamp(timeStamp); t0 = TickTime::Now(); VideoProcessingModule::FrameStats stats; @@ -64,8 +73,7 @@ TEST_F(VideoProcessingModuleTest, Deflickering) if (runIdx == 0) { - if (fwrite(_videoFrame.Buffer(), 1, _frameLength, - deflickerFile) != _frameLength) { + if (PrintI420VideoFrame(_videoFrame, deflickerFile) < 0) { return; } } diff --git a/webrtc/modules/video_processing/main/test/unit_test/denoising_test.cc b/webrtc/modules/video_processing/main/test/unit_test/denoising_test.cc index a4d97619b..1d0f9a28d 100644 --- a/webrtc/modules/video_processing/main/test/unit_test/denoising_test.cc +++ b/webrtc/modules/video_processing/main/test/unit_test/denoising_test.cc @@ -11,6 +11,7 @@ #include #include +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/video_processing/main/interface/video_processing.h" #include "modules/video_processing/main/test/unit_test/unit_test.h" #include "system_wrappers/interface/tick_util.h" @@ -47,21 +48,27 @@ TEST_F(VideoProcessingModuleTest, Denoising) WebRtc_Word32 modifiedPixels = 0; frameNum = 0; - while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength) + scoped_array video_buffer(new uint8_t[_frame_length]); + while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) == + _frame_length) { + _videoFrame.CreateFrame(_size_y, video_buffer.get(), + _size_uv, video_buffer.get() + _size_y, + _size_uv, + video_buffer.get() + _size_y + _size_uv, + _width, _height, + _width, _half_width, _half_width); frameNum++; - WebRtc_UWord8* sourceBuffer = _videoFrame.Buffer(); + WebRtc_UWord8* sourceBuffer = _videoFrame.buffer(kYPlane); // Add noise to a part in video stream // Random noise // TODO: investigate the effectiveness of this test. - //for(WebRtc_UWord32 ir = 0; ir < _frameLength; ir++) - // sourceBuffer[ir] = 128 - for (WebRtc_UWord32 ir = 0; ir < _height; ir++) + for (int ir = 0; ir < _height; ir++) { WebRtc_UWord32 ik = ir * _width; - for (WebRtc_UWord32 ic = 0; ic < _width; ic++) + for (int ic = 0; ic < _width; ic++) { WebRtc_UWord8 r = rand() % 16; r -= 8; @@ -92,8 +99,7 @@ TEST_F(VideoProcessingModuleTest, Denoising) if (runIdx == 0) { - if (fwrite(_videoFrame.Buffer(), 1, _frameLength, - noiseFile) != _frameLength) { + if (PrintI420VideoFrame(_videoFrame, noiseFile) < 0) { return; } } @@ -105,8 +111,7 @@ TEST_F(VideoProcessingModuleTest, Denoising) if (runIdx == 0) { - if (fwrite(_videoFrame.Buffer(), 1, _frameLength, - denoiseFile) != _frameLength) { + if (PrintI420VideoFrame(_videoFrame, noiseFile) < 0) { return; } } diff --git a/webrtc/modules/video_processing/main/test/unit_test/unit_test.cc b/webrtc/modules/video_processing/main/test/unit_test/unit_test.cc index f6d7d10b1..2b06ccd6e 100644 --- a/webrtc/modules/video_processing/main/test/unit_test/unit_test.cc +++ b/webrtc/modules/video_processing/main/test/unit_test/unit_test.cc @@ -23,16 +23,21 @@ namespace webrtc { // quality when the resampled frame is scaled back up/down to the // original/source size. |expected_psnr| is set to be ~0.1/0.05dB lower than // actual PSNR verified under the same conditions. -void TestSize(const VideoFrame& sourceFrame, int target_width, +void TestSize(const I420VideoFrame& sourceFrame, int target_width, int target_height, int mode, double expected_psnr, VideoProcessingModule* vpm); +bool CompareFrames(const webrtc::I420VideoFrame& frame1, + const webrtc::I420VideoFrame& frame2); VideoProcessingModuleTest::VideoProcessingModuleTest() : _vpm(NULL), _sourceFile(NULL), _width(352), + _half_width(_width / 2), _height(288), - _frameLength(CalcBufferSize(kI420, 352, 288)) + _size_y(_width * _height), + _size_uv(_half_width * _height /2), + _frame_length(CalcBufferSize(kI420, _width, _height)) { } @@ -41,9 +46,8 @@ void VideoProcessingModuleTest::SetUp() _vpm = VideoProcessingModule::Create(0); ASSERT_TRUE(_vpm != NULL); - ASSERT_EQ(0, _videoFrame.VerifyAndAllocate(_frameLength)); - _videoFrame.SetWidth(_width); - _videoFrame.SetHeight(_height); + ASSERT_EQ(0, _videoFrame.CreateEmptyFrame(_width, _height, _width, + _half_width, _half_width)); const std::string video_file = webrtc::test::ResourcePath("foreman_cif", "yuv"); @@ -70,9 +74,9 @@ TEST_F(VideoProcessingModuleTest, HandleNullBuffer) VideoProcessingModule::FrameStats stats; ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame)); // Video frame with unallocated buffer. - VideoFrame videoFrame; - videoFrame.SetWidth(_width); - videoFrame.SetHeight(_height); + I420VideoFrame videoFrame; + videoFrame.set_width(_width); + videoFrame.set_height(_height); EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, videoFrame)); @@ -88,12 +92,15 @@ TEST_F(VideoProcessingModuleTest, HandleNullBuffer) TEST_F(VideoProcessingModuleTest, HandleBadStats) { VideoProcessingModule::FrameStats stats; + scoped_array video_buffer(new uint8_t[_frame_length]); + ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length, + _sourceFile)); + _videoFrame.CreateFrame(_size_y, video_buffer.get(), + _size_uv, video_buffer.get() + _size_y, + _size_uv, video_buffer.get() + _size_y + _size_uv, + _width, _height, + _width, _half_width, _half_width); - ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength, - _sourceFile)); - - _videoFrame.SetWidth(_width); - _videoFrame.SetHeight(_height); EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats)); EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats)); @@ -104,21 +111,9 @@ TEST_F(VideoProcessingModuleTest, HandleBadSize) VideoProcessingModule::FrameStats stats; ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame)); - // Bad width - _videoFrame.SetWidth(0); - EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, _videoFrame)); - - EXPECT_EQ(-1, _vpm->ColorEnhancement(&_videoFrame)); - - EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats)); - - EXPECT_EQ(-1, _vpm->Denoising(&_videoFrame)); - - EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats)); - - // Bad height - _videoFrame.SetWidth(_width); - _videoFrame.SetHeight(0); + _videoFrame.ResetSize(); + _videoFrame.set_width(_width); + _videoFrame.set_height(0); EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, _videoFrame)); EXPECT_EQ(-1, _vpm->ColorEnhancement(&_videoFrame)); @@ -132,58 +127,73 @@ TEST_F(VideoProcessingModuleTest, HandleBadSize) EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetTargetResolution(0,0,0)); EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetMaxFrameRate(0)); - VideoFrame *outFrame = NULL; + I420VideoFrame *outFrame = NULL; EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(_videoFrame, &outFrame)); } TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset) { - VideoFrame videoFrame2; + I420VideoFrame videoFrame2; VideoProcessingModule::FrameStats stats; - - ASSERT_EQ(0, videoFrame2.VerifyAndAllocate(_frameLength)); - videoFrame2.SetWidth(_width); - videoFrame2.SetHeight(_height); - // Only testing non-static functions here. - ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength, + scoped_array video_buffer(new uint8_t[_frame_length]); + ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length, _sourceFile)); + ASSERT_EQ(0, _videoFrame.CreateFrame(_size_y, video_buffer.get(), + _size_uv, video_buffer.get() + _size_y, + _size_uv, video_buffer.get() + _size_y + _size_uv, + _width, _height, + _width, _half_width, _half_width)); ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame)); - memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength); + ASSERT_EQ(0, videoFrame2.CopyFrame(_videoFrame)); ASSERT_EQ(0, _vpm->Deflickering(&_videoFrame, &stats)); _vpm->Reset(); // Retrieve frame stats again in case Deflickering() has zeroed them. ASSERT_EQ(0, _vpm->GetFrameStats(&stats, videoFrame2)); ASSERT_EQ(0, _vpm->Deflickering(&videoFrame2, &stats)); - EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(), - _frameLength)); + EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2)); - ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength, - _sourceFile)); - memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength); + ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length, + _sourceFile)); + _videoFrame.CreateFrame(_size_y, video_buffer.get(), + _size_uv, video_buffer.get() + _size_y, + _size_uv, video_buffer.get() + _size_y + _size_uv, + _width, _height, + _width, _half_width, _half_width); + videoFrame2.CopyFrame(_videoFrame); + EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2)); ASSERT_GE(_vpm->Denoising(&_videoFrame), 0); _vpm->Reset(); ASSERT_GE(_vpm->Denoising(&videoFrame2), 0); - EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(), - _frameLength)); + EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2)); - ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength, - _sourceFile)); + ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length, + _sourceFile)); + _videoFrame.CreateFrame(_size_y, video_buffer.get(), + _size_uv, video_buffer.get() + _size_y, + _size_uv, video_buffer.get() + _size_y + _size_uv, + _width, _height, + _width, _half_width, _half_width); ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame)); - memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength); + videoFrame2.CopyFrame(_videoFrame); ASSERT_EQ(0, _vpm->BrightnessDetection(_videoFrame, stats)); _vpm->Reset(); ASSERT_EQ(0, _vpm->BrightnessDetection(videoFrame2, stats)); - EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(), - _frameLength)); + EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2)); } TEST_F(VideoProcessingModuleTest, FrameStats) { VideoProcessingModule::FrameStats stats; - ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength, - _sourceFile)); + scoped_array video_buffer(new uint8_t[_frame_length]); + ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length, + _sourceFile)); + _videoFrame.CreateFrame(_size_y, video_buffer.get(), + _size_uv, video_buffer.get() + _size_y, + _size_uv, video_buffer.get() + _size_y + _size_uv, + _width, _height, + _width, _half_width, _half_width); EXPECT_FALSE(_vpm->ValidFrameStats(stats)); EXPECT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame)); @@ -214,7 +224,7 @@ TEST_F(VideoProcessingModuleTest, PreprocessorLogic) // Disable spatial sampling _vpm->SetInputFrameResampleMode(kNoRescaling); ASSERT_EQ(VPM_OK, _vpm->SetTargetResolution(100, 100, 30)); - VideoFrame *outFrame = NULL; + I420VideoFrame *outFrame = NULL; ASSERT_EQ(VPM_OK, _vpm->PreprocessFrame(_videoFrame, &outFrame)); // No rescaling=> output frame = NULL ASSERT_TRUE(outFrame == NULL); @@ -230,9 +240,6 @@ TEST_F(VideoProcessingModuleTest, Resampler) TickTime t0; TickTime t1; TickInterval accTicks; - WebRtc_Word32 height = 288; - WebRtc_Word32 width = 352; - WebRtc_Word32 lengthSourceFrame = width*height*3/2; rewind(_sourceFile); ASSERT_TRUE(_sourceFile != NULL) << @@ -244,12 +251,15 @@ TEST_F(VideoProcessingModuleTest, Resampler) _vpm->EnableTemporalDecimation(false); // Reading test frame - VideoFrame sourceFrame; - ASSERT_EQ(0, sourceFrame.VerifyAndAllocate(lengthSourceFrame)); - EXPECT_GT(fread(sourceFrame.Buffer(), 1, lengthSourceFrame, _sourceFile), 0u); - ASSERT_EQ(0, sourceFrame.SetLength(lengthSourceFrame)); - sourceFrame.SetHeight(height); - sourceFrame.SetWidth(width); + I420VideoFrame sourceFrame; + scoped_array video_buffer(new uint8_t[_frame_length]); + ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length, + _sourceFile)); + sourceFrame.CreateFrame(_size_y, video_buffer.get(), + _size_uv, video_buffer.get() + _size_y, + _size_uv, video_buffer.get() + _size_y + _size_uv, + _width, _height, + _width, _half_width, _half_width); for (WebRtc_UWord32 runIdx = 0; runIdx < NumRuns; runIdx++) { @@ -282,8 +292,6 @@ TEST_F(VideoProcessingModuleTest, Resampler) avgRuntime += accTicks.Microseconds(); } - sourceFrame.Free(); - printf("\nAverage run time = %d us / frame\n", //static_cast(avgRuntime / frameNum / NumRuns)); static_cast(avgRuntime)); @@ -292,12 +300,12 @@ TEST_F(VideoProcessingModuleTest, Resampler) static_cast(minRuntime)); } -void TestSize(const VideoFrame& source_frame, int target_width, +void TestSize(const I420VideoFrame& source_frame, int target_width, int target_height, int mode, double expected_psnr, VideoProcessingModule* vpm) { - int source_width = source_frame.Width(); - int source_height = source_frame.Height(); - VideoFrame* out_frame = NULL; + int source_width = source_frame.width(); + int source_height = source_frame.height(); + I420VideoFrame* out_frame = NULL; ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30)); ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(source_frame, &out_frame)); @@ -309,12 +317,6 @@ void TestSize(const VideoFrame& source_frame, int target_width, // (3) write out the processed frame for viewing. if (target_width != static_cast(source_width) || target_height != static_cast(source_height)) { - int target_half_width = (target_width + 1) >> 1; - int target_half_height = (target_height + 1) >> 1; - int required_size_resampled = target_width * target_height + - 2 * (target_half_width * target_half_height); - ASSERT_EQ(required_size_resampled, static_cast(out_frame->Length())); - // Write the processed frame to file for visual inspection. std::ostringstream filename; filename << webrtc::test::OutputPath() << "Resampler_"<< mode << "_" << @@ -323,15 +325,14 @@ void TestSize(const VideoFrame& source_frame, int target_width, std::cout << "Watch " << filename.str() << " and verify that it is okay." << std::endl; FILE* stand_alone_file = fopen(filename.str().c_str(), "wb"); - if (fwrite(out_frame->Buffer(), 1, - out_frame->Length(), stand_alone_file) != out_frame->Length()) { + if (PrintI420VideoFrame(*out_frame, stand_alone_file) < 0) { fprintf(stderr, "Failed to write frame for scaling to width/height: " " %d %d \n", target_width, target_height); return; } fclose(stand_alone_file); - VideoFrame resampled_source_frame; + I420VideoFrame resampled_source_frame; resampled_source_frame.CopyFrame(*out_frame); // Scale |resampled_source_frame| back to original/source size. @@ -349,24 +350,36 @@ void TestSize(const VideoFrame& source_frame, int target_width, std::cout << "Watch " << filename2.str() << " and verify that it is okay." << std::endl; stand_alone_file = fopen(filename2.str().c_str(), "wb"); - if (fwrite(out_frame->Buffer(), 1, - out_frame->Length(), stand_alone_file) != out_frame->Length()) { + if (PrintI420VideoFrame(*out_frame, stand_alone_file) < 0) { fprintf(stderr, "Failed to write frame for scaling to width/height " - "%d %d \n", source_width, source_height); + "%d %d \n", source_width, source_height); return; } fclose(stand_alone_file); // Compute the PSNR and check expectation. - double psnr = I420PSNR(source_frame.Buffer(), out_frame->Buffer(), - source_width, source_height); + double psnr = I420PSNR(&source_frame, out_frame); EXPECT_GT(psnr, expected_psnr); printf("PSNR: %f. PSNR is between source of size %d %d, and a modified " "source which is scaled down/up to: %d %d, and back to source size \n", psnr, source_width, source_height, target_width, target_height); - - resampled_source_frame.Free(); } } +bool CompareFrames(const webrtc::I420VideoFrame& frame1, + const webrtc::I420VideoFrame& frame2) { + for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) { + webrtc::PlaneType plane_type = static_cast(plane); + int allocated_size1 = frame1.allocated_size(plane_type); + int allocated_size2 = frame2.allocated_size(plane_type); + if (allocated_size1 != allocated_size2) + return false; + const uint8_t* plane_buffer1 = frame1.buffer(plane_type); + const uint8_t* plane_buffer2 = frame2.buffer(plane_type); + if (memcmp(plane_buffer1, plane_buffer2, allocated_size1)) + return false; + } + return true; +} + } // namespace webrtc diff --git a/webrtc/modules/video_processing/main/test/unit_test/unit_test.h b/webrtc/modules/video_processing/main/test/unit_test/unit_test.h index 2363e1a14..67496d11c 100644 --- a/webrtc/modules/video_processing/main/test/unit_test/unit_test.h +++ b/webrtc/modules/video_processing/main/test/unit_test/unit_test.h @@ -36,10 +36,13 @@ protected: } VideoProcessingModule* _vpm; FILE* _sourceFile; - VideoFrame _videoFrame; - const WebRtc_UWord32 _width; - const WebRtc_UWord32 _height; - const WebRtc_UWord32 _frameLength; + I420VideoFrame _videoFrame; + const int _width; + const int _half_width; + const int _height; + const int _size_y; + const int _size_uv; + const unsigned int _frame_length; }; } // namespace webrtc diff --git a/webrtc/modules/video_render/main/interface/video_render.h b/webrtc/modules/video_render/main/interface/video_render.h index 9ca133f39..8d079fd7c 100644 --- a/webrtc/modules/video_render/main/interface/video_render.h +++ b/webrtc/modules/video_render/main/interface/video_render.h @@ -172,7 +172,7 @@ public: */ virtual WebRtc_Word32 GetLastRenderedFrame(const WebRtc_UWord32 streamId, - VideoFrame &frame) const = 0; + I420VideoFrame &frame) const = 0; /************************************************************************** * @@ -272,13 +272,13 @@ public: */ virtual WebRtc_Word32 SetStartImage(const WebRtc_UWord32 streamId, - const VideoFrame& videoFrame) = 0; + const I420VideoFrame& videoFrame) = 0; /* * Set a timout image. The image is rendered if no videoframe has been delivered */ virtual WebRtc_Word32 SetTimeoutImage(const WebRtc_UWord32 streamId, - const VideoFrame& videoFrame, + const I420VideoFrame& videoFrame, const WebRtc_UWord32 timeout)= 0; virtual WebRtc_Word32 MirrorRenderStream(const int renderId, diff --git a/webrtc/modules/video_render/main/interface/video_render_defines.h b/webrtc/modules/video_render/main/interface/video_render_defines.h index b1034a64a..dbfd88e40 100644 --- a/webrtc/modules/video_render/main/interface/video_render_defines.h +++ b/webrtc/modules/video_render/main/interface/video_render_defines.h @@ -13,6 +13,7 @@ // Includes #include "common_types.h" +#include "common_video/interface/i420_video_frame.h" #include "modules/interface/module_common_types.h" namespace webrtc @@ -48,7 +49,7 @@ class VideoRenderCallback { public: virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, - VideoFrame& videoFrame) = 0; + I420VideoFrame& videoFrame) = 0; protected: virtual ~VideoRenderCallback() diff --git a/webrtc/modules/video_render/main/source/android/video_render_android_native_opengl2.cc b/webrtc/modules/video_render/main/source/android/video_render_android_native_opengl2.cc index 0bcf676a0..bfea5e0c2 100644 --- a/webrtc/modules/video_render/main/source/android/video_render_android_native_opengl2.cc +++ b/webrtc/modules/video_render/main/source/android/video_render_android_native_opengl2.cc @@ -382,10 +382,10 @@ WebRtc_Word32 AndroidNativeOpenGl2Channel::Init(WebRtc_Word32 zOrder, WebRtc_Word32 AndroidNativeOpenGl2Channel::RenderFrame( const WebRtc_UWord32 /*streamId*/, - VideoFrame& videoFrame) { + I420VideoFrame& videoFrame) { // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__); _renderCritSect.Enter(); - _bufferToRender.SwapFrame(videoFrame); + _bufferToRender.SwapFrame(&videoFrame); _renderCritSect.Leave(); _renderer.ReDraw(); return 0; diff --git a/webrtc/modules/video_render/main/source/android/video_render_android_native_opengl2.h b/webrtc/modules/video_render/main/source/android/video_render_android_native_opengl2.h index c69f17d12..b8fd3370a 100644 --- a/webrtc/modules/video_render/main/source/android/video_render_android_native_opengl2.h +++ b/webrtc/modules/video_render/main/source/android/video_render_android_native_opengl2.h @@ -38,7 +38,7 @@ class AndroidNativeOpenGl2Channel: public AndroidStream { //Implement VideoRenderCallback virtual WebRtc_Word32 RenderFrame( const WebRtc_UWord32 streamId, - VideoFrame& videoFrame); + I420VideoFrame& videoFrame); //Implements AndroidStream virtual void DeliverFrame(JNIEnv* jniEnv); @@ -57,7 +57,7 @@ class AndroidNativeOpenGl2Channel: public AndroidStream { WebRtc_UWord32 _id; CriticalSectionWrapper& _renderCritSect; - VideoFrame _bufferToRender; + I420VideoFrame _bufferToRender; VideoRenderAndroid& _renderer; JavaVM* _jvm; jobject _javaRenderObj; diff --git a/webrtc/modules/video_render/main/source/android/video_render_android_surface_view.cc b/webrtc/modules/video_render/main/source/android/video_render_android_surface_view.cc index d78ec4ba1..15533497b 100644 --- a/webrtc/modules/video_render/main/source/android/video_render_android_surface_view.cc +++ b/webrtc/modules/video_render/main/source/android/video_render_android_surface_view.cc @@ -412,10 +412,10 @@ WebRtc_Word32 AndroidSurfaceViewChannel::Init( WebRtc_Word32 AndroidSurfaceViewChannel::RenderFrame( const WebRtc_UWord32 /*streamId*/, - VideoFrame& videoFrame) { + I420VideoFrame& videoFrame) { // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__); _renderCritSect.Enter(); - _bufferToRender.SwapFrame(videoFrame); + _bufferToRender.SwapFrame(&videoFrame); _renderCritSect.Leave(); _renderer.ReDraw(); return 0; @@ -428,11 +428,11 @@ WebRtc_Word32 AndroidSurfaceViewChannel::RenderFrame( void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) { _renderCritSect.Enter(); - if (_bitmapWidth != _bufferToRender.Width() || - _bitmapHeight != _bufferToRender.Height()) { + if (_bitmapWidth != _bufferToRender.width() || + _bitmapHeight != _bufferToRender.height()) { WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d " "%d",__FUNCTION__, - _bufferToRender.Width(), _bufferToRender.Height()); + _bufferToRender.width(), _bufferToRender.height()); if (_javaByteBufferObj) { jniEnv->DeleteGlobalRef(_javaByteBufferObj); _javaByteBufferObj = NULL; @@ -441,8 +441,8 @@ void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) { jobject javaByteBufferObj = jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid, - _bufferToRender.Width(), - _bufferToRender.Height()); + _bufferToRender.width(), + _bufferToRender.height()); _javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj); if (!_javaByteBufferObj) { WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not " @@ -452,15 +452,14 @@ void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) { } else { _directBuffer = static_cast (jniEnv->GetDirectBufferAddress(_javaByteBufferObj)); - _bitmapWidth = _bufferToRender.Width(); - _bitmapHeight = _bufferToRender.Height(); + _bitmapWidth = _bufferToRender.width(); + _bitmapHeight = _bufferToRender.height(); } } if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) { const int conversionResult = - ConvertFromI420(_bufferToRender, _bitmapWidth, - kRGB565, 0, _directBuffer); + ConvertFromI420(_bufferToRender, kRGB565, 0, _directBuffer); if (conversionResult < 0) { WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion" diff --git a/webrtc/modules/video_render/main/source/android/video_render_android_surface_view.h b/webrtc/modules/video_render/main/source/android/video_render_android_surface_view.h index 1122a79ac..8283794dc 100644 --- a/webrtc/modules/video_render/main/source/android/video_render_android_surface_view.h +++ b/webrtc/modules/video_render/main/source/android/video_render_android_surface_view.h @@ -36,7 +36,7 @@ class AndroidSurfaceViewChannel : public AndroidStream { //Implement VideoRenderCallback virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, - VideoFrame& videoFrame); + I420VideoFrame& videoFrame); //Implements AndroidStream virtual void DeliverFrame(JNIEnv* jniEnv); @@ -45,7 +45,7 @@ class AndroidSurfaceViewChannel : public AndroidStream { WebRtc_UWord32 _id; CriticalSectionWrapper& _renderCritSect; - VideoFrame _bufferToRender; + I420VideoFrame _bufferToRender; VideoRenderAndroid& _renderer; JavaVM* _jvm; jobject _javaRenderObj; @@ -56,8 +56,8 @@ class AndroidSurfaceViewChannel : public AndroidStream { jmethodID _drawByteBufferCid; jmethodID _setCoordinatesCid; - unsigned int _bitmapWidth; - unsigned int _bitmapHeight; + int _bitmapWidth; + int _bitmapHeight; }; class AndroidSurfaceViewRenderer : private VideoRenderAndroid { diff --git a/webrtc/modules/video_render/main/source/android/video_render_opengles20.cc b/webrtc/modules/video_render/main/source/android/video_render_opengles20.cc index 28bf9ae20..5bcf148c2 100644 --- a/webrtc/modules/video_render/main/source/android/video_render_opengles20.cc +++ b/webrtc/modules/video_render/main/source/android/video_render_opengles20.cc @@ -215,9 +215,10 @@ WebRtc_Word32 VideoRenderOpenGles20::SetCoordinates(WebRtc_Word32 zOrder, return 0; } -WebRtc_Word32 VideoRenderOpenGles20::Render(const VideoFrame& frameToRender) { +WebRtc_Word32 VideoRenderOpenGles20::Render(const I420VideoFrame& + frameToRender) { - if (frameToRender.Length() == 0) { + if (frameToRender.IsZeroSize()) { return -1; } @@ -227,8 +228,8 @@ WebRtc_Word32 VideoRenderOpenGles20::Render(const VideoFrame& frameToRender) { glUseProgram(_program); checkGlError("glUseProgram"); - if (_textureWidth != (GLsizei) frameToRender.Width() || - _textureHeight != (GLsizei) frameToRender.Height()) { + if (_textureWidth != (GLsizei) frameToRender.width() || + _textureHeight != (GLsizei) frameToRender.height()) { SetupTextures(frameToRender); } else { @@ -327,14 +328,13 @@ void VideoRenderOpenGles20::checkGlError(const char* op) { #endif } -void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) { +void VideoRenderOpenGles20::SetupTextures(const I420VideoFrame& frameToRender) { WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, - "%s: width %d, height %d length %u", __FUNCTION__, - frameToRender.Width(), frameToRender.Height(), - frameToRender.Length()); + "%s: width %d, height %d", __FUNCTION__, + frameToRender.width(), frameToRender.height()); - const GLsizei width = frameToRender.Width(); - const GLsizei height = frameToRender.Height(); + const GLsizei width = frameToRender.width(); + const GLsizei height = frameToRender.height(); glGenTextures(3, _textureIds); //Generate the Y, U and V texture GLuint currentTextureId = _textureIds[0]; // Y @@ -349,7 +349,7 @@ void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) { glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, - (const GLvoid*) frameToRender.Buffer()); + (const GLvoid*) frameToRender.buffer(kYPlane)); currentTextureId = _textureIds[1]; // U glActiveTexture( GL_TEXTURE1); @@ -361,7 +361,7 @@ void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) { glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height; + const WebRtc_UWord8* uComponent = frameToRender.buffer(kUPlane); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent); @@ -374,7 +374,7 @@ void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) { glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4; + const WebRtc_UWord8* vComponent = frameToRender.buffer(kVPlane); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent); checkGlError("SetupTextures"); @@ -383,27 +383,29 @@ void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) { _textureHeight = height; } -void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender) { - const GLsizei width = frameToRender.Width(); - const GLsizei height = frameToRender.Height(); +void VideoRenderOpenGles20::UpdateTextures(const + I420VideoFrame& frameToRender) { + const GLsizei width = frameToRender.width(); + const GLsizei height = frameToRender.height(); GLuint currentTextureId = _textureIds[0]; // Y glActiveTexture( GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, currentTextureId); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE, - GL_UNSIGNED_BYTE, (const GLvoid*) frameToRender.Buffer()); + GL_UNSIGNED_BYTE, + (const GLvoid*) frameToRender.buffer(kYPlane)); currentTextureId = _textureIds[1]; // U glActiveTexture( GL_TEXTURE1); glBindTexture(GL_TEXTURE_2D, currentTextureId); - const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height; + const WebRtc_UWord8* uComponent = frameToRender.buffer(kUPlane); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2, GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent); currentTextureId = _textureIds[2]; // V glActiveTexture( GL_TEXTURE2); glBindTexture(GL_TEXTURE_2D, currentTextureId); - const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4; + const WebRtc_UWord8* vComponent = frameToRender.buffer(kVPlane); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2, GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent); checkGlError("UpdateTextures"); diff --git a/webrtc/modules/video_render/main/source/android/video_render_opengles20.h b/webrtc/modules/video_render/main/source/android/video_render_opengles20.h index 8f1743eca..bf9deb013 100644 --- a/webrtc/modules/video_render/main/source/android/video_render_opengles20.h +++ b/webrtc/modules/video_render/main/source/android/video_render_opengles20.h @@ -25,7 +25,7 @@ class VideoRenderOpenGles20 { ~VideoRenderOpenGles20(); WebRtc_Word32 Setup(WebRtc_Word32 widht, WebRtc_Word32 height); - WebRtc_Word32 Render(const VideoFrame& frameToRender); + WebRtc_Word32 Render(const I420VideoFrame& frameToRender); WebRtc_Word32 SetCoordinates(WebRtc_Word32 zOrder, const float left, const float top, @@ -38,8 +38,8 @@ class VideoRenderOpenGles20 { GLuint loadShader(GLenum shaderType, const char* pSource); GLuint createProgram(const char* pVertexSource, const char* pFragmentSource); - void SetupTextures(const VideoFrame& frameToRender); - void UpdateTextures(const VideoFrame& frameToRender); + void SetupTextures(const I420VideoFrame& frameToRender); + void UpdateTextures(const I420VideoFrame& frameToRender); WebRtc_Word32 _id; GLuint _textureIds[3]; // Texture id of Y,U and V texture. diff --git a/webrtc/modules/video_render/main/source/external/video_render_external_impl.cc b/webrtc/modules/video_render/main/source/external/video_render_external_impl.cc index a46003376..59eb065d1 100644 --- a/webrtc/modules/video_render/main/source/external/video_render_external_impl.cc +++ b/webrtc/modules/video_render/main/source/external/video_render_external_impl.cc @@ -197,7 +197,7 @@ WebRtc_Word32 VideoRenderExternalImpl::SetBitmap(const void* bitMap, // VideoRenderCallback WebRtc_Word32 VideoRenderExternalImpl::RenderFrame( const WebRtc_UWord32 streamId, - VideoFrame& videoFrame) + I420VideoFrame& videoFrame) { return 0; } diff --git a/webrtc/modules/video_render/main/source/external/video_render_external_impl.h b/webrtc/modules/video_render/main/source/external/video_render_external_impl.h index 547c4105f..8d0785995 100644 --- a/webrtc/modules/video_render/main/source/external/video_render_external_impl.h +++ b/webrtc/modules/video_render/main/source/external/video_render_external_impl.h @@ -119,7 +119,7 @@ public: // VideoRenderCallback virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, - VideoFrame& videoFrame); + I420VideoFrame& videoFrame); private: WebRtc_Word32 _id; diff --git a/webrtc/modules/video_render/main/source/incoming_video_stream.cc b/webrtc/modules/video_render/main/source/incoming_video_stream.cc index 637d60719..8088a1d24 100644 --- a/webrtc/modules/video_render/main/source/incoming_video_stream.cc +++ b/webrtc/modules/video_render/main/source/incoming_video_stream.cc @@ -89,11 +89,11 @@ VideoRenderCallback* IncomingVideoStream::ModuleCallback() { } WebRtc_Word32 IncomingVideoStream::RenderFrame(const WebRtc_UWord32 stream_id, - VideoFrame& video_frame) { + I420VideoFrame& video_frame) { CriticalSectionScoped csS(&stream_critsect_); WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_, "%s for stream %d, render time: %u", __FUNCTION__, stream_id_, - video_frame.RenderTimeMs()); + video_frame.render_time_ms()); if (!running_) { WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_, @@ -102,22 +102,20 @@ WebRtc_Word32 IncomingVideoStream::RenderFrame(const WebRtc_UWord32 stream_id, } if (true == mirror_frames_enabled_) { - transformed_video_frame_.VerifyAndAllocate(video_frame.Length()); + transformed_video_frame_.CreateEmptyFrame(video_frame.width(), + video_frame.height(), + video_frame.stride(kYPlane), + video_frame.stride(kUPlane), + video_frame.stride(kVPlane)); if (mirroring_.mirror_x_axis) { - transformed_video_frame_.SetLength(video_frame.Length()); - transformed_video_frame_.SetWidth(video_frame.Width()); - transformed_video_frame_.SetHeight(video_frame.Height()); MirrorI420UpDown(&video_frame, &transformed_video_frame_); - video_frame.SwapFrame(transformed_video_frame_); + video_frame.SwapFrame(&transformed_video_frame_); } if (mirroring_.mirror_y_axis) { - transformed_video_frame_.SetLength(video_frame.Length()); - transformed_video_frame_.SetWidth(video_frame.Width()); - transformed_video_frame_.SetHeight(video_frame.Height()); MirrorI420LeftRight(&video_frame, &transformed_video_frame_); - video_frame.SwapFrame(transformed_video_frame_); + video_frame.SwapFrame(&transformed_video_frame_); } } @@ -141,13 +139,13 @@ WebRtc_Word32 IncomingVideoStream::RenderFrame(const WebRtc_UWord32 stream_id, } WebRtc_Word32 IncomingVideoStream::SetStartImage( - const VideoFrame& video_frame) { + const I420VideoFrame& video_frame) { CriticalSectionScoped csS(&thread_critsect_); return start_image_.CopyFrame(video_frame); } WebRtc_Word32 IncomingVideoStream::SetTimeoutImage( - const VideoFrame& video_frame, const WebRtc_UWord32 timeout) { + const I420VideoFrame& video_frame, const WebRtc_UWord32 timeout) { CriticalSectionScoped csS(&thread_critsect_); timeout_time_ = timeout; return timeout_image_.CopyFrame(video_frame); @@ -300,7 +298,7 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() { } thread_critsect_.Enter(); - VideoFrame* frame_to_render = NULL; + I420VideoFrame* frame_to_render = NULL; // Get a new frame to render and the time for the frame after this one. buffer_critsect_.Enter(); @@ -316,13 +314,13 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() { if (!frame_to_render) { if (render_callback_) { - if (last_rendered_frame_.RenderTimeMs() == 0 && - start_image_.Size()) { + if (last_rendered_frame_.render_time_ms() == 0 && + !start_image_.IsZeroSize()) { // We have not rendered anything and have a start image. temp_frame_.CopyFrame(start_image_); render_callback_->RenderFrame(stream_id_, temp_frame_); - } else if (timeout_image_.Size() && - last_rendered_frame_.RenderTimeMs() + timeout_time_ < + } else if (!timeout_image_.IsZeroSize() && + last_rendered_frame_.render_time_ms() + timeout_time_ < TickTime::MillisecondTimestamp()) { // Render a timeout image. temp_frame_.CopyFrame(timeout_image_); @@ -339,13 +337,13 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() { if (external_callback_) { WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_, "%s: executing external renderer callback to deliver frame", - __FUNCTION__, frame_to_render->RenderTimeMs()); + __FUNCTION__, frame_to_render->render_time_ms()); external_callback_->RenderFrame(stream_id_, *frame_to_render); } else { if (render_callback_) { WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_, "%s: Render frame, time: ", __FUNCTION__, - frame_to_render->RenderTimeMs()); + frame_to_render->render_time_ms()); render_callback_->RenderFrame(stream_id_, *frame_to_render); } } @@ -356,7 +354,7 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() { // We're done with this frame, delete it. if (frame_to_render) { CriticalSectionScoped cs(&buffer_critsect_); - last_rendered_frame_.SwapFrame(*frame_to_render); + last_rendered_frame_.SwapFrame(frame_to_render); render_buffers_.ReturnFrame(frame_to_render); } } @@ -364,7 +362,7 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() { } WebRtc_Word32 IncomingVideoStream::GetLastRenderedFrame( - VideoFrame& video_frame) const { + I420VideoFrame& video_frame) const { CriticalSectionScoped cs(&buffer_critsect_); return video_frame.CopyFrame(last_rendered_frame_); } diff --git a/webrtc/modules/video_render/main/source/incoming_video_stream.h b/webrtc/modules/video_render/main/source/incoming_video_stream.h index a2f28fc76..130341188 100644 --- a/webrtc/modules/video_render/main/source/incoming_video_stream.h +++ b/webrtc/modules/video_render/main/source/incoming_video_stream.h @@ -38,7 +38,7 @@ class IncomingVideoStream : public VideoRenderCallback { // Get callback to deliver frames to the module. VideoRenderCallback* ModuleCallback(); virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 stream_id, - VideoFrame& video_frame); + I420VideoFrame& video_frame); // Set callback to the platform dependent code. WebRtc_Word32 SetRenderCallback(VideoRenderCallback* render_callback); @@ -57,11 +57,11 @@ class IncomingVideoStream : public VideoRenderCallback { WebRtc_UWord32 StreamId() const; WebRtc_UWord32 IncomingRate() const; - WebRtc_Word32 GetLastRenderedFrame(VideoFrame& video_frame) const; + WebRtc_Word32 GetLastRenderedFrame(I420VideoFrame& video_frame) const; - WebRtc_Word32 SetStartImage(const VideoFrame& video_frame); + WebRtc_Word32 SetStartImage(const I420VideoFrame& video_frame); - WebRtc_Word32 SetTimeoutImage(const VideoFrame& video_frame, + WebRtc_Word32 SetTimeoutImage(const I420VideoFrame& video_frame, const WebRtc_UWord32 timeout); WebRtc_Word32 EnableMirroring(const bool enable, @@ -100,15 +100,15 @@ class IncomingVideoStream : public VideoRenderCallback { WebRtc_UWord32 incoming_rate_; WebRtc_Word64 last_rate_calculation_time_ms_; WebRtc_UWord16 num_frames_since_last_calculation_; - VideoFrame last_rendered_frame_; - VideoFrame temp_frame_; - VideoFrame start_image_; - VideoFrame timeout_image_; + I420VideoFrame last_rendered_frame_; + I420VideoFrame temp_frame_; + I420VideoFrame start_image_; + I420VideoFrame timeout_image_; WebRtc_UWord32 timeout_time_; bool mirror_frames_enabled_; VideoMirroring mirroring_; - VideoFrame transformed_video_frame_; + I420VideoFrame transformed_video_frame_; }; } // namespace webrtc diff --git a/webrtc/modules/video_render/main/source/linux/video_x11_channel.cc b/webrtc/modules/video_render/main/source/linux/video_x11_channel.cc index e14374071..547a813df 100644 --- a/webrtc/modules/video_render/main/source/linux/video_x11_channel.cc +++ b/webrtc/modules/video_render/main/source/linux/video_x11_channel.cc @@ -44,11 +44,11 @@ VideoX11Channel::~VideoX11Channel() } WebRtc_Word32 VideoX11Channel::RenderFrame(const WebRtc_UWord32 streamId, - VideoFrame& videoFrame) { + I420VideoFrame& videoFrame) { CriticalSectionScoped cs(&_crit); - if (_width != (WebRtc_Word32) videoFrame.Width() || _height - != (WebRtc_Word32) videoFrame.Height()) { - if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1) { + if (_width != videoFrame.width() || _height + != videoFrame.height()) { + if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) { return -1; } } @@ -72,7 +72,7 @@ WebRtc_Word32 VideoX11Channel::FrameSizeChange(WebRtc_Word32 width, return 0; } -WebRtc_Word32 VideoX11Channel::DeliverFrame(const VideoFrame& videoFrame) { +WebRtc_Word32 VideoX11Channel::DeliverFrame(const I420VideoFrame& videoFrame) { CriticalSectionScoped cs(&_crit); if (!_prepared) { return 0; @@ -82,8 +82,7 @@ WebRtc_Word32 VideoX11Channel::DeliverFrame(const VideoFrame& videoFrame) { return -1; } - // convert to RGB32, setting stride = width. - ConvertFromI420(videoFrame, _width, kARGB, 0, _buffer); + ConvertFromI420(videoFrame, kARGB, 0, _buffer); // Put image in window. XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width, diff --git a/webrtc/modules/video_render/main/source/linux/video_x11_channel.h b/webrtc/modules/video_render/main/source/linux/video_x11_channel.h index a7ed4127b..d22b0fe50 100644 --- a/webrtc/modules/video_render/main/source/linux/video_x11_channel.h +++ b/webrtc/modules/video_render/main/source/linux/video_x11_channel.h @@ -34,11 +34,11 @@ public: virtual ~VideoX11Channel(); virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, - VideoFrame& videoFrame); + I420VideoFrame& videoFrame); WebRtc_Word32 FrameSizeChange(WebRtc_Word32 width, WebRtc_Word32 height, WebRtc_Word32 numberOfStreams); - WebRtc_Word32 DeliverFrame(const VideoFrame& videoFrame); + WebRtc_Word32 DeliverFrame(const I420VideoFrame& videoFrame); WebRtc_Word32 GetFrameSize(WebRtc_Word32& width, WebRtc_Word32& height); WebRtc_Word32 Init(Window window, float left, float top, float right, float bottom); diff --git a/webrtc/modules/video_render/main/source/mac/video_render_agl.cc b/webrtc/modules/video_render/main/source/mac/video_render_agl.cc index 80d340df0..261b76be1 100644 --- a/webrtc/modules/video_render/main/source/mac/video_render_agl.cc +++ b/webrtc/modules/video_render/main/source/mac/video_render_agl.cc @@ -81,11 +81,11 @@ VideoChannelAGL::~VideoChannelAGL() } WebRtc_Word32 VideoChannelAGL::RenderFrame(const WebRtc_UWord32 streamId, - VideoFrame& videoFrame) { + I420VideoFrame& videoFrame) { _owner->LockAGLCntx(); - if (_width != videoFrame.Width() || - _height != videoFrame.Height()) { - if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1) { + if (_width != videoFrame.width() || + _height != videoFrame.height()) { + if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) { WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d FrameSize Change returned an error", __FUNCTION__, __LINE__); _owner->UnlockAGLCntx(); @@ -220,7 +220,7 @@ int VideoChannelAGL::FrameSizeChange(int width, int height, int numberOfStreams) } // Called from video engine when a new frame should be rendered. -int VideoChannelAGL::DeliverFrame(const VideoFrame& videoFrame) { +int VideoChannelAGL::DeliverFrame(const I420VideoFrame& videoFrame) { _owner->LockAGLCntx(); if (_texture == 0) { @@ -228,14 +228,14 @@ int VideoChannelAGL::DeliverFrame(const VideoFrame& videoFrame) { return 0; } - if (bufferSize != _incommingBufferSize) { + int length = CalcBufferSize(kI420, videoFrame.width(), videoFrame.height()); + if (length != _incommingBufferSize) { _owner->UnlockAGLCntx(); return -1; } // Setting stride = width. - int rgbret = ConvertFromYV12(videoFrame.Buffer(), _width, kBGRA, 0, _width, - _height, _buffer); + int rgbret = ConvertFromYV12(videoFrame, kBGRA, 0, _buffer); if (rgbret < 0) { _owner->UnlockAGLCntx(); return -1; diff --git a/webrtc/modules/video_render/main/source/mac/video_render_agl.h b/webrtc/modules/video_render/main/source/mac/video_render_agl.h index 58302f13c..05bf0b668 100644 --- a/webrtc/modules/video_render/main/source/mac/video_render_agl.h +++ b/webrtc/modules/video_render/main/source/mac/video_render_agl.h @@ -45,14 +45,15 @@ public: VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner); virtual ~VideoChannelAGL(); virtual int FrameSizeChange(int width, int height, int numberOfStreams); - virtual int DeliverFrame(const VideoFrame& videoFrame); + virtual int DeliverFrame(const I420VideoFrame& videoFrame); virtual int UpdateSize(int width, int height); int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight); int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight); int RenderOffScreenBuffer(); int IsUpdated(bool& isUpdated); - virtual int UpdateStretchSize(int stretchHeight, int stretchWidth); - virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame); + virtual int UpdateStretchSize(int stretchHeight, int stretchWidth); + virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, + I420VideoFrame& videoFrame); private: diff --git a/webrtc/modules/video_render/main/source/mac/video_render_nsopengl.h b/webrtc/modules/video_render/main/source/mac/video_render_nsopengl.h index be1fb7509..63e479ba0 100644 --- a/webrtc/modules/video_render/main/source/mac/video_render_nsopengl.h +++ b/webrtc/modules/video_render/main/source/mac/video_render_nsopengl.h @@ -44,7 +44,7 @@ public: virtual ~VideoChannelNSOpenGL(); // A new frame is delivered - virtual int DeliverFrame(const VideoFrame& videoFrame); + virtual int DeliverFrame(const I420VideoFrame& videoFrame); // Called when the incomming frame size and/or number of streams in mix changes virtual int FrameSizeChange(int width, int height, int numberOfStreams); @@ -63,7 +63,8 @@ public: virtual int UpdateStretchSize(int stretchHeight, int stretchWidth); // ********** new module functions ************ // - virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame); + virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, + I420VideoFrame& videoFrame); // ********** new module helper functions ***** // int ChangeContext(NSOpenGLContext *nsglContext); diff --git a/webrtc/modules/video_render/main/source/mac/video_render_nsopengl.mm b/webrtc/modules/video_render/main/source/mac/video_render_nsopengl.mm index 7727784d0..69642288e 100644 --- a/webrtc/modules/video_render/main/source/mac/video_render_nsopengl.mm +++ b/webrtc/modules/video_render/main/source/mac/video_render_nsopengl.mm @@ -92,13 +92,13 @@ WebRtc_Word32 VideoChannelNSOpenGL::GetChannelProperties(float& left, } WebRtc_Word32 VideoChannelNSOpenGL::RenderFrame( - const WebRtc_UWord32 /*streamId*/, VideoFrame& videoFrame) { + const WebRtc_UWord32 /*streamId*/, I420VideoFrame& videoFrame) { _owner->LockAGLCntx(); - if(_width != (int)videoFrame.Width() || - _height != (int)videoFrame.Height()) { - if(FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1) { + if(_width != videoFrame.width() || + _height != videoFrame.height()) { + if(FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) { _owner->UnlockAGLCntx(); return -1; } @@ -208,7 +208,7 @@ int VideoChannelNSOpenGL::FrameSizeChange(int width, int height, int numberOfStr return 0; } -int VideoChannelNSOpenGL::DeliverFrame(const VideoFrame& videoFrame) { +int VideoChannelNSOpenGL::DeliverFrame(const I420VideoFrame& videoFrame) { _owner->LockAGLCntx(); @@ -217,13 +217,17 @@ int VideoChannelNSOpenGL::DeliverFrame(const VideoFrame& videoFrame) { return 0; } - if (static_cast(videoFrame.Length()) != _incommingBufferSize) { + int length = CalcBufferSize(kI420, videoFrame.width(), videoFrame.height()); + if (length != _incommingBufferSize) { _owner->UnlockAGLCntx(); return -1; } - int rgbRet = ConvertFromYV12(videoFrame.Buffer(), _width, - kBGRA, 0, _width, _height, _buffer); + // Using the I420VideoFrame for YV12: YV12 is YVU; I420 assumes + // YUV. + // TODO(mikhal) : Use appropriate functionality. + // TODO(wu): See if we are using glTexSubImage2D correctly. + int rgbRet = ConvertFromYV12(videoFrame, kBGRA, 0, _buffer); if (rgbRet < 0) { _owner->UnlockAGLCntx(); return -1; diff --git a/webrtc/modules/video_render/main/source/video_render_frames.cc b/webrtc/modules/video_render/main/source/video_render_frames.cc index 6237e460d..adef341fd 100644 --- a/webrtc/modules/video_render/main/source/video_render_frames.cc +++ b/webrtc/modules/video_render/main/source/video_render_frames.cc @@ -31,26 +31,26 @@ VideoRenderFrames::~VideoRenderFrames() { ReleaseAllFrames(); } -WebRtc_Word32 VideoRenderFrames::AddFrame(VideoFrame* new_frame) { +WebRtc_Word32 VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) { const WebRtc_Word64 time_now = TickTime::MillisecondTimestamp(); - if (new_frame->RenderTimeMs() + KOldRenderTimestampMS < time_now) { + if (new_frame->render_time_ms() + KOldRenderTimestampMS < time_now) { WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1, "%s: too old frame.", __FUNCTION__); return -1; } - if (new_frame->RenderTimeMs() > time_now + KFutureRenderTimestampMS) { + if (new_frame->render_time_ms() > time_now + KFutureRenderTimestampMS) { WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1, "%s: frame too long into the future.", __FUNCTION__); return -1; } // Get an empty frame - VideoFrame* frame_to_add = NULL; + I420VideoFrame* frame_to_add = NULL; if (!empty_frames_.Empty()) { ListItem* item = empty_frames_.First(); if (item) { - frame_to_add = static_cast(item->GetItem()); + frame_to_add = static_cast(item->GetItem()); empty_frames_.Erase(item); } } @@ -69,7 +69,7 @@ WebRtc_Word32 VideoRenderFrames::AddFrame(VideoFrame* new_frame) { "%s: allocating buffer %d", __FUNCTION__, empty_frames_.GetSize() + incoming_frames_.GetSize()); - frame_to_add = new VideoFrame(); + frame_to_add = new I420VideoFrame(); if (!frame_to_add) { WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, "%s: could not create new frame for", __FUNCTION__); @@ -77,32 +77,33 @@ WebRtc_Word32 VideoRenderFrames::AddFrame(VideoFrame* new_frame) { } } - frame_to_add->VerifyAndAllocate(new_frame->Length()); + frame_to_add->CreateEmptyFrame(new_frame->width(), new_frame->height(), + new_frame->stride(kYPlane), + new_frame->stride(kUPlane), + new_frame->stride(kVPlane)); // TODO(mflodman) Change this! // Remove const ness. Copying will be costly. - frame_to_add->SwapFrame(const_cast(*new_frame)); + frame_to_add->SwapFrame(new_frame); incoming_frames_.PushBack(frame_to_add); return incoming_frames_.GetSize(); } -VideoFrame* VideoRenderFrames::FrameToRender() { - VideoFrame* render_frame = NULL; +I420VideoFrame* VideoRenderFrames::FrameToRender() { + I420VideoFrame* render_frame = NULL; while (!incoming_frames_.Empty()) { ListItem* item = incoming_frames_.First(); if (item) { - VideoFrame* oldest_frame_in_list = - static_cast(item->GetItem()); - if (oldest_frame_in_list->RenderTimeMs() <= + I420VideoFrame* oldest_frame_in_list = + static_cast(item->GetItem()); + if (oldest_frame_in_list->render_time_ms() <= TickTime::MillisecondTimestamp() + render_delay_ms_) { // This is the oldest one so far and it's OK to render. if (render_frame) { // This one is older than the newly found frame, remove this one. - render_frame->SetWidth(0); - render_frame->SetHeight(0); - render_frame->SetLength(0); - render_frame->SetRenderTime(0); - render_frame->SetTimeStamp(0); + render_frame->ResetSize(); + render_frame->set_timestamp(0); + render_frame->set_render_time_ms(0); empty_frames_.PushFront(render_frame); } render_frame = oldest_frame_in_list; @@ -118,11 +119,10 @@ VideoFrame* VideoRenderFrames::FrameToRender() { return render_frame; } -WebRtc_Word32 VideoRenderFrames::ReturnFrame(VideoFrame* old_frame) { - old_frame->SetWidth(0); - old_frame->SetHeight(0); - old_frame->SetRenderTime(0); - old_frame->SetLength(0); +WebRtc_Word32 VideoRenderFrames::ReturnFrame(I420VideoFrame* old_frame) { + old_frame->ResetSize(); + old_frame->set_timestamp(0); + old_frame->set_render_time_ms(0); empty_frames_.PushBack(old_frame); return 0; } @@ -131,9 +131,8 @@ WebRtc_Word32 VideoRenderFrames::ReleaseAllFrames() { while (!incoming_frames_.Empty()) { ListItem* item = incoming_frames_.First(); if (item) { - VideoFrame* frame = static_cast(item->GetItem()); + I420VideoFrame* frame = static_cast(item->GetItem()); assert(frame != NULL); - frame->Free(); delete frame; } incoming_frames_.Erase(item); @@ -141,9 +140,8 @@ WebRtc_Word32 VideoRenderFrames::ReleaseAllFrames() { while (!empty_frames_.Empty()) { ListItem* item = empty_frames_.First(); if (item) { - VideoFrame* frame = static_cast(item->GetItem()); + I420VideoFrame* frame = static_cast(item->GetItem()); assert(frame != NULL); - frame->Free(); delete frame; } empty_frames_.Erase(item); @@ -155,8 +153,9 @@ WebRtc_UWord32 VideoRenderFrames::TimeToNextFrameRelease() { WebRtc_Word64 time_to_release = 0; ListItem* item = incoming_frames_.First(); if (item) { - VideoFrame* oldest_frame = static_cast(item->GetItem()); - time_to_release = oldest_frame->RenderTimeMs() - render_delay_ms_ + I420VideoFrame* oldest_frame = + static_cast(item->GetItem()); + time_to_release = oldest_frame->render_time_ms() - render_delay_ms_ - TickTime::MillisecondTimestamp(); if (time_to_release < 0) { time_to_release = 0; diff --git a/webrtc/modules/video_render/main/source/video_render_frames.h b/webrtc/modules/video_render/main/source/video_render_frames.h index bdacc3672..44c1d1e87 100644 --- a/webrtc/modules/video_render/main/source/video_render_frames.h +++ b/webrtc/modules/video_render/main/source/video_render_frames.h @@ -23,13 +23,13 @@ class VideoRenderFrames { ~VideoRenderFrames(); // Add a frame to the render queue - WebRtc_Word32 AddFrame(VideoFrame* new_frame); + WebRtc_Word32 AddFrame(I420VideoFrame* new_frame); // Get a frame for rendering, if it's time to render. - VideoFrame* FrameToRender(); + I420VideoFrame* FrameToRender(); // Return an old frame - WebRtc_Word32 ReturnFrame(VideoFrame* old_frame); + WebRtc_Word32 ReturnFrame(I420VideoFrame* old_frame); // Releases all frames WebRtc_Word32 ReleaseAllFrames(); diff --git a/webrtc/modules/video_render/main/source/video_render_impl.cc b/webrtc/modules/video_render/main/source/video_render_impl.cc index a162ef62a..6daf3086e 100644 --- a/webrtc/modules/video_render/main/source/video_render_impl.cc +++ b/webrtc/modules/video_render/main/source/video_render_impl.cc @@ -813,8 +813,8 @@ WebRtc_Word32 ModuleVideoRenderImpl::SetBitmap(const void* bitMap, } WebRtc_Word32 ModuleVideoRenderImpl::GetLastRenderedFrame( - const WebRtc_UWord32 streamId, - VideoFrame &frame) const + const WebRtc_UWord32 streamId, + I420VideoFrame &frame) const { CriticalSectionScoped cs(&_moduleCrit); @@ -897,8 +897,8 @@ WebRtc_Word32 ModuleVideoRenderImpl::ConfigureRenderer( } WebRtc_Word32 ModuleVideoRenderImpl::SetStartImage( - const WebRtc_UWord32 streamId, - const VideoFrame& videoFrame) + const WebRtc_UWord32 streamId, + const I420VideoFrame& videoFrame) { CriticalSectionScoped cs(&_moduleCrit); @@ -931,9 +931,9 @@ WebRtc_Word32 ModuleVideoRenderImpl::SetStartImage( } WebRtc_Word32 ModuleVideoRenderImpl::SetTimeoutImage( - const WebRtc_UWord32 streamId, - const VideoFrame& videoFrame, - const WebRtc_UWord32 timeout) + const WebRtc_UWord32 streamId, + const I420VideoFrame& videoFrame, + const WebRtc_UWord32 timeout) { CriticalSectionScoped cs(&_moduleCrit); diff --git a/webrtc/modules/video_render/main/source/video_render_impl.h b/webrtc/modules/video_render/main/source/video_render_impl.h index 2665cfb1b..25a55e928 100644 --- a/webrtc/modules/video_render/main/source/video_render_impl.h +++ b/webrtc/modules/video_render/main/source/video_render_impl.h @@ -117,7 +117,7 @@ public: VideoRenderCallback* callbackObj); virtual WebRtc_Word32 GetLastRenderedFrame(const WebRtc_UWord32 streamId, - VideoFrame &frame) const; + I420VideoFrame &frame) const; virtual WebRtc_Word32 SetExpectedRenderDelay(WebRtc_UWord32 stream_id, WebRtc_Word32 delay_ms); @@ -205,10 +205,10 @@ public: const float right, const float bottom); virtual WebRtc_Word32 SetStartImage(const WebRtc_UWord32 streamId, - const VideoFrame& videoFrame); + const I420VideoFrame& videoFrame); virtual WebRtc_Word32 SetTimeoutImage(const WebRtc_UWord32 streamId, - const VideoFrame& videoFrame, + const I420VideoFrame& videoFrame, const WebRtc_UWord32 timeout); virtual WebRtc_Word32 MirrorRenderStream(const int renderId, diff --git a/webrtc/modules/video_render/main/source/windows/video_render_direct3d9.cc b/webrtc/modules/video_render/main/source/windows/video_render_direct3d9.cc index a83a1be8a..60d9902c5 100644 --- a/webrtc/modules/video_render/main/source/windows/video_render_direct3d9.cc +++ b/webrtc/modules/video_render/main/source/windows/video_render_direct3d9.cc @@ -143,12 +143,12 @@ int D3D9Channel::FrameSizeChange(int width, int height, int numberOfStreams) } WebRtc_Word32 D3D9Channel::RenderFrame(const WebRtc_UWord32 streamId, - VideoFrame& videoFrame) + I420VideoFrame& videoFrame) { CriticalSectionScoped cs(_critSect); - if (_width != videoFrame.Width() || _height != videoFrame.Height()) + if (_width != videoFrame.width() || _height != videoFrame.height()) { - if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1) + if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) { return -1; } @@ -157,7 +157,7 @@ WebRtc_Word32 D3D9Channel::RenderFrame(const WebRtc_UWord32 streamId, } // Called from video engine when a new frame should be rendered. -int D3D9Channel::DeliverFrame(const VideoFrame& videoFrame) { +int D3D9Channel::DeliverFrame(const I420VideoFrame& videoFrame) { WEBRTC_TRACE(kTraceStream, kTraceVideo, -1, "DeliverFrame to D3D9Channel"); @@ -192,7 +192,7 @@ int D3D9Channel::DeliverFrame(const VideoFrame& videoFrame) { } UCHAR* pRect = (UCHAR*) lr.pBits; - ConvertFromI420(videoFrame, _width, kARGB, 0, pRect); + ConvertFromI420(videoFrame, kARGB, 0, pRect); if (FAILED(_pTexture->UnlockRect(0))) { WEBRTC_TRACE(kTraceError, kTraceVideo, -1, diff --git a/webrtc/modules/video_render/main/source/windows/video_render_direct3d9.h b/webrtc/modules/video_render/main/source/windows/video_render_direct3d9.h index 84303939f..7f88d0a52 100644 --- a/webrtc/modules/video_render/main/source/windows/video_render_direct3d9.h +++ b/webrtc/modules/video_render/main/source/windows/video_render_direct3d9.h @@ -44,9 +44,9 @@ public: virtual int FrameSizeChange(int width, int height, int numberOfStreams); // A new frame is delivered. - virtual int DeliverFrame(const VideoFrame& videoFrame); + virtual int DeliverFrame(const I420VideoFrame& videoFrame); virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, - VideoFrame& videoFrame); + I420VideoFrame& videoFrame); // Called to check if the video frame is updated. int IsUpdated(bool& isUpdated); diff --git a/webrtc/modules/video_render/main/test/testAPI/testAPI.cc b/webrtc/modules/video_render/main/test/testAPI/testAPI.cc index 5d7a67d12..975453b02 100644 --- a/webrtc/modules/video_render/main/test/testAPI/testAPI.cc +++ b/webrtc/modules/video_render/main/test/testAPI/testAPI.cc @@ -42,9 +42,7 @@ using namespace webrtc; -void GetTestVideoFrame(WebRtc_UWord8* frame, - WebRtc_Word32 width, - WebRtc_Word32 height, +void GetTestVideoFrame(I420VideoFrame* frame, WebRtc_UWord8 startColor); int TestSingleStream(VideoRender* renderModule); int TestFullscreenStream(VideoRender* &renderModule, @@ -264,7 +262,7 @@ public: } ; virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, - VideoFrame& videoFrame) + I420VideoFrame& videoFrame) { _cnt++; if (_cnt % 100 == 0) @@ -276,33 +274,16 @@ public: WebRtc_Word32 _cnt; }; -void GetTestVideoFrame(WebRtc_UWord8* frame, - WebRtc_Word32 width, - WebRtc_Word32 height, +void GetTestVideoFrame(I420VideoFrame* frame, WebRtc_UWord8 startColor) { // changing color static WebRtc_UWord8 color = startColor; - WebRtc_UWord8* destY = frame; - WebRtc_UWord8* destU = &frame[width*height]; - WebRtc_UWord8* destV = &frame[width*height*5/4]; - //Y - for (WebRtc_Word32 y=0; y<(width*height); y++) - { - destY[y] = color; - } - //U - for (WebRtc_Word32 u=0; u<(width*height/4); u++) - { - destU[u] = color; - } - //V - for (WebRtc_Word32 v=0; v<(width*height/4); v++) - { - destV[v] = color; - } + memset(frame->buffer(kYPlane), color, frame->allocated_size(kYPlane)); + memset(frame->buffer(kUPlane), color, frame->allocated_size(kUPlane)); + memset(frame->buffer(kVPlane), color, frame->allocated_size(kVPlane)); - color++; + ++color; } int TestSingleStream(VideoRender* renderModule) { @@ -328,26 +309,24 @@ int TestSingleStream(VideoRender* renderModule) { } // Loop through an I420 file and render each frame - const WebRtc_UWord32 width = 352; - const WebRtc_UWord32 height = 288; - const WebRtc_UWord32 numBytes = (WebRtc_UWord32)(1.5 * width * height); + const int width = 352; + const int half_width = (width + 1) / 2; + const int height = 288; - VideoFrame videoFrame0; - videoFrame0.VerifyAndAllocate(numBytes); + I420VideoFrame videoFrame0; + videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width); const WebRtc_UWord32 renderDelayMs = 500; for (int i=0; iRenderFrame(streamId0, videoFrame0); SleepMs(1000/TEST_FRAME_RATE); } - videoFrame0.Free(); // Shut down printf("Closing...\n"); @@ -403,26 +382,24 @@ int TestBitmapText(VideoRender* renderModule) { error = renderModule->StartRender(streamId0); assert(error == 0); - // Loop through an I420 file and render each frame - const WebRtc_UWord32 width = 352; - const WebRtc_UWord32 height = 288; - const WebRtc_UWord32 numBytes = (WebRtc_UWord32)(1.5 * width * height); + // Loop through an I420 file and render each frame + const int width = 352; + const int half_width = (width + 1) / 2; + const int height = 288; - VideoFrame videoFrame0; - videoFrame0.VerifyAndAllocate(numBytes); + I420VideoFrame videoFrame0; + videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width); const WebRtc_UWord32 renderDelayMs = 500; for (int i=0; iRenderFrame(streamId0, videoFrame0); SleepMs(1000/TEST_FRAME_RATE); } - videoFrame0.Free(); // Sleep and let all frames be rendered before closing SleepMs(renderDelayMs*2); @@ -473,58 +450,47 @@ int TestMultipleStreams(VideoRender* renderModule) { assert(renderModule->StartRender(streamId3) == 0); // Loop through an I420 file and render each frame - const WebRtc_UWord32 width = 352; - const WebRtc_UWord32 height = 288; - const WebRtc_UWord32 numBytes = (WebRtc_UWord32)(1.5 * width * height); + const int width = 352; + const int half_width = (width + 1) / 2; + const int height = 288; - VideoFrame videoFrame0; - videoFrame0.VerifyAndAllocate(numBytes); - VideoFrame videoFrame1; - videoFrame1.VerifyAndAllocate(numBytes); - VideoFrame videoFrame2; - videoFrame2.VerifyAndAllocate(numBytes); - VideoFrame videoFrame3; - videoFrame3.VerifyAndAllocate(numBytes); + I420VideoFrame videoFrame0; + videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width); + I420VideoFrame videoFrame1; + videoFrame1.CreateEmptyFrame(width, height, width, half_width, half_width); + I420VideoFrame videoFrame2; + videoFrame2.CreateEmptyFrame(width, height, width, half_width, half_width); + I420VideoFrame videoFrame3; + videoFrame3.CreateEmptyFrame(width, height, width, half_width, half_width); const WebRtc_UWord32 renderDelayMs = 500; + // Render frames with the specified delay. for (int i=0; iRenderFrame(streamId0, videoFrame0); + GetTestVideoFrame(&videoFrame0, TEST_STREAM0_START_COLOR); - GetTestVideoFrame(videoFrame1.Buffer(), width, height, TEST_STREAM1_START_COLOR); - videoFrame1.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs); // Render this frame with the specified delay - videoFrame1.SetWidth(width); - videoFrame1.SetHeight(height); - videoFrame1.SetLength(numBytes); - renderCallback1->RenderFrame(streamId1, videoFrame1); + videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() + + renderDelayMs); + renderCallback0->RenderFrame(streamId0, videoFrame0); - GetTestVideoFrame(videoFrame2.Buffer(), width, height, TEST_STREAM2_START_COLOR); - videoFrame2.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs); // Render this frame with the specified delay - videoFrame2.SetWidth(width); - videoFrame2.SetHeight(height); - videoFrame2.SetLength(numBytes); - renderCallback2->RenderFrame(streamId2, videoFrame2); + GetTestVideoFrame(&videoFrame1, TEST_STREAM1_START_COLOR); + videoFrame1.set_render_time_ms(TickTime::MillisecondTimestamp() + + renderDelayMs); + renderCallback1->RenderFrame(streamId1, videoFrame1); - GetTestVideoFrame(videoFrame3.Buffer(), width, height, TEST_STREAM3_START_COLOR); - videoFrame3.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs); // Render this frame with the specified delay - videoFrame3.SetWidth(width); - videoFrame3.SetHeight(height); - videoFrame3.SetLength(numBytes); - renderCallback3->RenderFrame(streamId3, videoFrame3); + GetTestVideoFrame(&videoFrame2, TEST_STREAM2_START_COLOR); + videoFrame2.set_render_time_ms(TickTime::MillisecondTimestamp() + + renderDelayMs); + renderCallback2->RenderFrame(streamId2, videoFrame2); - SleepMs(1000/TEST_FRAME_RATE); + GetTestVideoFrame(&videoFrame3, TEST_STREAM3_START_COLOR); + videoFrame3.set_render_time_ms(TickTime::MillisecondTimestamp() + + renderDelayMs); + renderCallback3->RenderFrame(streamId3, videoFrame3); + + SleepMs(1000/TEST_FRAME_RATE); } - videoFrame0.Free(); - videoFrame1.Free(); - videoFrame2.Free(); - videoFrame3.Free(); - // Shut down printf("Closing...\n"); assert(renderModule->StopRender(streamId0) == 0); @@ -552,25 +518,23 @@ int TestExternalRender(VideoRender* renderModule) { assert(renderModule->StartRender(streamId0) == 0); - const WebRtc_UWord32 width = 352; - const WebRtc_UWord32 height = 288; - const WebRtc_UWord32 numBytes = (WebRtc_UWord32) (1.5 * width * height); - VideoFrame videoFrame0; - videoFrame0.VerifyAndAllocate(numBytes); + const int width = 352; + const int half_width = (width + 1) / 2; + const int height = 288; + I420VideoFrame videoFrame0; + videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width); const WebRtc_UWord32 renderDelayMs = 500; int frameCount = TEST_FRAME_NUM; for (int i=0; iRenderFrame(streamId0, videoFrame0); SleepMs(33); } // Sleep and let all frames be rendered before closing SleepMs(2*renderDelayMs); - videoFrame0.Free(); assert(renderModule->StopRender(streamId0) == 0); assert(renderModule->DeleteIncomingRenderStream(streamId0) == 0); diff --git a/webrtc/test/metrics.gyp b/webrtc/test/metrics.gyp index f4c3e2d95..75567d071 100644 --- a/webrtc/test/metrics.gyp +++ b/webrtc/test/metrics.gyp @@ -21,6 +21,7 @@ 'type': '<(library)', 'dependencies': [ '<(webrtc_root)/common_video/common_video.gyp:common_video', + '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', ], 'include_dirs': [ '.', diff --git a/webrtc/video_engine/include/vie_image_process.h b/webrtc/video_engine/include/vie_image_process.h index dfad08d8a..e993b9740 100644 --- a/webrtc/video_engine/include/vie_image_process.h +++ b/webrtc/video_engine/include/vie_image_process.h @@ -18,6 +18,7 @@ #define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_IMAGE_PROCESS_H_ #include "common_types.h" +#include "common_video/interface/i420_video_frame.h" namespace webrtc { @@ -31,10 +32,8 @@ class WEBRTC_DLLEXPORT ViEEffectFilter { public: // This method is called with an I420 video frame allowing the user to // modify the video frame. - virtual int Transform(int size, - unsigned char* frame_buffer, - unsigned int time_stamp90KHz, - unsigned int width, + virtual int Transform(int size, unsigned char* frameBuffer, + unsigned int timeStamp90KHz, unsigned int width, unsigned int height) = 0; protected: ViEEffectFilter() {} diff --git a/webrtc/video_engine/test/android/jni/Android.mk b/webrtc/video_engine/test/android/jni/Android.mk index 1d3586eb8..406660925 100644 --- a/webrtc/video_engine/test/android/jni/Android.mk +++ b/webrtc/video_engine/test/android/jni/Android.mk @@ -297,6 +297,7 @@ LOCAL_STATIC_LIBRARIES := \ libresampler \ libsignal_processing \ libsignal_processing_neon \ + libcommon_video \ libsystem_wrappers \ libcpu_features_android \ libaudio_device \ @@ -306,7 +307,6 @@ LOCAL_STATIC_LIBRARIES := \ libudp_transport \ libwebrtc_utility \ libaudio_conference_mixer \ - libcommon_video \ libyuv \ libwebrtc_i420 \ libwebrtc_vp8 \ diff --git a/webrtc/video_engine/test/auto_test/source/vie_autotest_capture.cc b/webrtc/video_engine/test/auto_test/source/vie_autotest_capture.cc index f232d89eb..580a521ed 100644 --- a/webrtc/video_engine/test/auto_test/source/vie_autotest_capture.cc +++ b/webrtc/video_engine/test/auto_test/source/vie_autotest_capture.cc @@ -84,7 +84,7 @@ class CaptureEffectFilter : public webrtc::ViEEffectFilter { // Implements video_engineEffectFilter. virtual int Transform(int size, unsigned char* frame_buffer, - unsigned int time_stamp90KHz, unsigned int width, + unsigned int timeStamp90KHz, unsigned int width, unsigned int height) { EXPECT_TRUE(frame_buffer != NULL); EXPECT_EQ(expected_width_, width); diff --git a/webrtc/video_engine/test/auto_test/source/vie_autotest_codec.cc b/webrtc/video_engine/test/auto_test/source/vie_autotest_codec.cc index 4cb9c04e5..972225c26 100644 --- a/webrtc/video_engine/test/auto_test/source/vie_autotest_codec.cc +++ b/webrtc/video_engine/test/auto_test/source/vie_autotest_codec.cc @@ -100,12 +100,11 @@ class RenderFilter : public webrtc::ViEEffectFilter { virtual ~RenderFilter() { } - virtual int Transform(int size, unsigned char* frame_buffer, unsigned int time_stamp90KHz, unsigned int width, - unsigned int height) { + unsigned int height) { num_frames_++; last_render_width_ = width; last_render_height_ = height; diff --git a/webrtc/video_engine/test/libvietest/include/tb_I420_codec.h b/webrtc/video_engine/test/libvietest/include/tb_I420_codec.h index 721a5ecbb..2125def4b 100644 --- a/webrtc/video_engine/test/libvietest/include/tb_I420_codec.h +++ b/webrtc/video_engine/test/libvietest/include/tb_I420_codec.h @@ -34,7 +34,7 @@ public: WebRtc_UWord32 maxPayloadSize); virtual WebRtc_Word32 Encode( - const webrtc::VideoFrame& inputImage, + const webrtc::I420VideoFrame& inputImage, const webrtc::CodecSpecificInfo* codecSpecificInfo, const std::vector* frameTypes); @@ -117,7 +117,7 @@ public: private: - webrtc::VideoFrame _decodedImage; + webrtc::I420VideoFrame _decodedImage; WebRtc_Word32 _width; WebRtc_Word32 _height; bool _inited; diff --git a/webrtc/video_engine/test/libvietest/testbed/tb_I420_codec.cc b/webrtc/video_engine/test/libvietest/testbed/tb_I420_codec.cc index cb9e2dcb8..c9f4e93c8 100644 --- a/webrtc/video_engine/test/libvietest/testbed/tb_I420_codec.cc +++ b/webrtc/video_engine/test/libvietest/testbed/tb_I420_codec.cc @@ -119,7 +119,7 @@ WebRtc_Word32 TbI420Encoder::InitEncode(const webrtc::VideoCodec* inst, } WebRtc_Word32 TbI420Encoder::Encode( - const webrtc::VideoFrame& inputImage, + const webrtc::I420VideoFrame& inputImage, const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/, const std::vector* /*frameTypes*/) { @@ -134,10 +134,13 @@ WebRtc_Word32 TbI420Encoder::Encode( } _encodedImage._frameType = webrtc::kKeyFrame; // no coding - _encodedImage._timeStamp = inputImage.TimeStamp(); - _encodedImage._encodedHeight = inputImage.Height(); - _encodedImage._encodedWidth = inputImage.Width(); - if (inputImage.Length() > _encodedImage._size) + _encodedImage._timeStamp = inputImage.timestamp(); + _encodedImage._encodedHeight = inputImage.height(); + _encodedImage._encodedWidth = inputImage.width(); + unsigned int reqSize = webrtc::CalcBufferSize(webrtc::kI420, + _encodedImage._encodedWidth, + _encodedImage._encodedHeight); + if (reqSize > _encodedImage._size) { // allocating encoded memory @@ -147,19 +150,20 @@ WebRtc_Word32 TbI420Encoder::Encode( _encodedImage._buffer = NULL; _encodedImage._size = 0; } - const WebRtc_UWord32 newSize = (3 * _encodedImage._encodedWidth - * _encodedImage._encodedHeight) >> 1; - WebRtc_UWord8* newBuffer = new WebRtc_UWord8[newSize]; + WebRtc_UWord8* newBuffer = new WebRtc_UWord8[reqSize]; if (newBuffer == NULL) { return WEBRTC_VIDEO_CODEC_MEMORY; } - _encodedImage._size = newSize; + _encodedImage._size = reqSize; _encodedImage._buffer = newBuffer; } - assert(_encodedImage._size >= inputImage.Length()); - memcpy(_encodedImage._buffer, inputImage.Buffer(), inputImage.Length()); - _encodedImage._length = inputImage.Length(); + if (ExtractBuffer(inputImage, _encodedImage._size, + _encodedImage._buffer) < 0) { + return -1; + } + + _encodedImage._length = reqSize; _encodedCompleteCallback->Encoded(_encodedImage); return WEBRTC_VIDEO_CODEC_OK; } @@ -257,16 +261,18 @@ WebRtc_Word32 TbI420Decoder::Decode( return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } - // Allocate memory for decoded image. - const WebRtc_UWord32 newSize = webrtc::CalcBufferSize(webrtc::kI420, - _width, _height); - _decodedImage.VerifyAndAllocate(newSize); - - // Set decoded image parameters. - _decodedImage.SetHeight(_height); - _decodedImage.SetWidth(_width); - _decodedImage.SetTimeStamp(inputImage._timeStamp); - _decodedImage.CopyFrame(inputImage._length, inputImage._buffer); + int size_y = _width * _height; + int size_uv = ((_width + 1 ) / 2) * ((_height + 1) / 2); + int ret = _decodedImage.CreateFrame(size_y, inputImage._buffer, + size_uv, inputImage._buffer + size_y, + size_uv, inputImage._buffer + size_y + + size_uv, + _width, _height, + _width, (_width + 1 ) / 2, + (_width + 1 ) / 2); + if (ret < 0) + return WEBRTC_VIDEO_CODEC_ERROR; + _decodedImage.set_timestamp(inputImage._timeStamp); _decodeCompleteCallback->Decoded(_decodedImage); return WEBRTC_VIDEO_CODEC_OK; @@ -283,7 +289,6 @@ WebRtc_Word32 TbI420Decoder::RegisterDecodeCompleteCallback( WebRtc_Word32 TbI420Decoder::Release() { _functionCalls.Release++; - _decodedImage.Free(); _inited = false; return WEBRTC_VIDEO_CODEC_OK; } diff --git a/webrtc/video_engine/vie_capturer.cc b/webrtc/video_engine/vie_capturer.cc index ad9f39d22..225660e40 100644 --- a/webrtc/video_engine/vie_capturer.cc +++ b/webrtc/video_engine/vie_capturer.cc @@ -10,6 +10,7 @@ #include "video_engine/vie_capturer.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/interface/module_common_types.h" #include "modules/utility/interface/process_thread.h" #include "modules/video_capture/main/interface/video_capture_factory.h" @@ -344,7 +345,7 @@ int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame, } void ViECapturer::OnIncomingCapturedFrame(const WebRtc_Word32 capture_id, - VideoFrame& video_frame, + I420VideoFrame& video_frame, VideoCodecType codec_type) { WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_), "%s(capture_id: %d)", __FUNCTION__, capture_id); @@ -352,6 +353,21 @@ void ViECapturer::OnIncomingCapturedFrame(const WebRtc_Word32 capture_id, // Make sure we render this frame earlier since we know the render time set // is slightly off since it's being set when the frame has been received from // the camera, and not when the camera actually captured the frame. + video_frame.set_render_time_ms(video_frame.render_time_ms() - FrameDelay()); + captured_frame_.SwapFrame(&video_frame); + capture_event_.Set(); + return; +} + +void ViECapturer::OnIncomingCapturedEncodedFrame(const WebRtc_Word32 capture_id, + VideoFrame& video_frame, + VideoCodecType codec_type) { + WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_), + "%s(capture_id: %d)", __FUNCTION__, capture_id); + CriticalSectionScoped cs(capture_cs_.get()); + // Make sure we render this frame earlier since we know the render time set + // is slightly off since it's being set when the frame has been received from + // the camera, and not when the camera actually captured the frame. video_frame.SetRenderTime(video_frame.RenderTimeMs() - FrameDelay()); if (codec_type != kVideoCodecUnknown) { if (encoded_frame_.Length() != 0) { @@ -365,10 +381,10 @@ void ViECapturer::OnIncomingCapturedFrame(const WebRtc_Word32 capture_id, deliver_event_.Wait(kMaxDeliverWaitTime); assert(encoded_frame_.Length() == 0); capture_cs_->Enter(); + } else { + assert(false); } encoded_frame_.SwapFrame(video_frame); - } else { - captured_frame_.SwapFrame(video_frame); } capture_event_.Set(); return; @@ -535,21 +551,21 @@ bool ViECapturer::ViECaptureThreadFunction(void* obj) { bool ViECapturer::ViECaptureProcess() { if (capture_event_.Wait(kThreadWaitTimeMs) == kEventSignaled) { deliver_cs_->Enter(); - if (captured_frame_.Length() > 0) { + if (!captured_frame_.IsZeroSize()) { // New I420 frame. capture_cs_->Enter(); - deliver_frame_.SwapFrame(captured_frame_); - captured_frame_.SetLength(0); + deliver_frame_.SwapFrame(&captured_frame_); + captured_frame_.ResetSize(); capture_cs_->Leave(); DeliverI420Frame(&deliver_frame_); } if (encoded_frame_.Length() > 0) { capture_cs_->Enter(); - deliver_frame_.SwapFrame(encoded_frame_); + deliver_encoded_frame_.SwapFrame(encoded_frame_); encoded_frame_.SetLength(0); deliver_event_.Set(); capture_cs_->Leave(); - DeliverCodedFrame(&deliver_frame_); + DeliverCodedFrame(&deliver_encoded_frame_); } deliver_cs_->Leave(); if (current_brightness_level_ != reported_brightness_level_) { @@ -564,7 +580,7 @@ bool ViECapturer::ViECaptureProcess() { return true; } -void ViECapturer::DeliverI420Frame(VideoFrame* video_frame) { +void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) { // Apply image enhancement and effect filter. if (deflicker_frame_stats_) { if (image_proc_module_->GetFrameStats(deflicker_frame_stats_, @@ -602,9 +618,14 @@ void ViECapturer::DeliverI420Frame(VideoFrame* video_frame) { } } if (effect_filter_) { - effect_filter_->Transform(video_frame->Length(), video_frame->Buffer(), - video_frame->TimeStamp(), video_frame->Width(), - video_frame->Height()); + unsigned int length = CalcBufferSize(kI420, + video_frame->width(), + video_frame->height()); + scoped_array video_buffer(new uint8_t[length]); + ExtractBuffer(*video_frame, length, video_buffer.get()); + effect_filter_->Transform(length, video_buffer.get(), + video_frame->timestamp(), video_frame->width(), + video_frame->height()); } // Deliver the captured frame to all observers (channels, renderer or file). ViEFrameProviderBase::DeliverFrame(video_frame); @@ -750,7 +771,7 @@ WebRtc_Word32 ViECapturer::InitEncode(const VideoCodec* codec_settings, } WebRtc_Word32 ViECapturer::Encode( - const VideoFrame& input_image, + const I420VideoFrame& input_image, const CodecSpecificInfo* codec_specific_info, const std::vector* frame_types) { CriticalSectionScoped cs(encoding_cs_.get()); @@ -838,7 +859,8 @@ WebRtc_Word32 ViECapturer::SetRates(WebRtc_UWord32 new_bit_rate, return capture_encoder_->SetRates(new_bit_rate, frame_rate); } -WebRtc_Word32 ViECapturer::FrameToRender(VideoFrame& video_frame) { // NOLINT +WebRtc_Word32 ViECapturer::FrameToRender( + I420VideoFrame& video_frame) { //NOLINT deliver_cs_->Enter(); DeliverI420Frame(&video_frame); deliver_cs_->Leave(); diff --git a/webrtc/video_engine/vie_capturer.h b/webrtc/video_engine/vie_capturer.h index 1102898e3..6f4ec8b5b 100644 --- a/webrtc/video_engine/vie_capturer.h +++ b/webrtc/video_engine/vie_capturer.h @@ -116,8 +116,11 @@ class ViECapturer // Implements VideoCaptureDataCallback. virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id, - VideoFrame& video_frame, + I420VideoFrame& video_frame, VideoCodecType codec_type); + virtual void OnIncomingCapturedEncodedFrame(const WebRtc_Word32 capture_id, + VideoFrame& video_frame, + VideoCodecType codec_type); virtual void OnCaptureDelayChanged(const WebRtc_Word32 id, const WebRtc_Word32 delay); @@ -139,7 +142,7 @@ class ViECapturer virtual WebRtc_Word32 InitEncode(const VideoCodec* codec_settings, WebRtc_Word32 number_of_cores, WebRtc_UWord32 max_payload_size); - virtual WebRtc_Word32 Encode(const VideoFrame& input_image, + virtual WebRtc_Word32 Encode(const I420VideoFrame& input_image, const CodecSpecificInfo* codec_specific_info, const std::vector* frame_types); virtual WebRtc_Word32 RegisterEncodeCompleteCallback( @@ -153,7 +156,7 @@ class ViECapturer // Implements VCMReceiveCallback. // TODO(mflodman) Change input argument to pointer. - virtual WebRtc_Word32 FrameToRender(VideoFrame& video_frame); // NOLINT + virtual WebRtc_Word32 FrameToRender(I420VideoFrame& video_frame); // NOLINT // Implements VideoCaptureFeedBack virtual void OnCaptureFrameRate(const WebRtc_Word32 id, @@ -165,7 +168,7 @@ class ViECapturer static bool ViECaptureThreadFunction(void* obj); bool ViECaptureProcess(); - void DeliverI420Frame(VideoFrame* video_frame); + void DeliverI420Frame(I420VideoFrame* video_frame); void DeliverCodedFrame(VideoFrame* video_frame); private: @@ -182,8 +185,9 @@ class ViECapturer EventWrapper& capture_event_; EventWrapper& deliver_event_; - VideoFrame captured_frame_; - VideoFrame deliver_frame_; + I420VideoFrame captured_frame_; + I420VideoFrame deliver_frame_; + VideoFrame deliver_encoded_frame_; VideoFrame encoded_frame_; // Image processing. @@ -215,7 +219,7 @@ class ViECapturer bool decoder_initialized_; CaptureCapability requested_capability_; - VideoFrame capture_device_image_; + I420VideoFrame capture_device_image_; }; } // namespace webrtc diff --git a/webrtc/video_engine/vie_channel.cc b/webrtc/video_engine/vie_channel.cc index ac1096839..bd8c5ae63 100644 --- a/webrtc/video_engine/vie_channel.cc +++ b/webrtc/video_engine/vie_channel.cc @@ -13,6 +13,7 @@ #include #include +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/rtp_rtcp/interface/rtp_rtcp.h" #include "modules/udp_transport/interface/udp_transport.h" #include "modules/utility/interface/process_thread.h" @@ -2067,7 +2068,8 @@ RtpRtcp* ViEChannel::rtp_rtcp() { return rtp_rtcp_.get(); } -WebRtc_Word32 ViEChannel::FrameToRender(VideoFrame& video_frame) { // NOLINT +WebRtc_Word32 ViEChannel::FrameToRender( + I420VideoFrame& video_frame) { // NOLINT CriticalSectionScoped cs(callback_cs_.get()); if (decoder_reset_) { @@ -2079,8 +2081,8 @@ WebRtc_Word32 ViEChannel::FrameToRender(VideoFrame& video_frame) { // NOLINT // VCM::ReceiveCodec returns the codec set by // RegisterReceiveCodec, which might not be the size we're // actually decoding. - decoder.width = static_cast(video_frame.Width()); - decoder.height = static_cast(video_frame.Height()); + decoder.width = static_cast(video_frame.width()); + decoder.height = static_cast(video_frame.height()); codec_observer_->IncomingCodecChanged(channel_id_, decoder); } else { assert(false); @@ -2091,9 +2093,14 @@ WebRtc_Word32 ViEChannel::FrameToRender(VideoFrame& video_frame) { // NOLINT decoder_reset_ = false; } if (effect_filter_) { - effect_filter_->Transform(video_frame.Length(), video_frame.Buffer(), - video_frame.TimeStamp(), video_frame.Width(), - video_frame.Height()); + unsigned int length = CalcBufferSize(kI420, + video_frame.width(), + video_frame.height()); + scoped_array video_buffer(new uint8_t[length]); + ExtractBuffer(video_frame, length, video_buffer.get()); + effect_filter_->Transform(length, video_buffer.get(), + video_frame.timestamp(), video_frame.width(), + video_frame.height()); } if (color_enhancement_) { VideoProcessingModule::ColorEnhancement(&video_frame); @@ -2109,7 +2116,7 @@ WebRtc_Word32 ViEChannel::FrameToRender(VideoFrame& video_frame) { // NOLINT no_of_csrcs = 1; } WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_), - "%s(timestamp:%u)", __FUNCTION__, video_frame.TimeStamp()); + "%s(timestamp:%u)", __FUNCTION__, video_frame.timestamp()); DeliverFrame(&video_frame, no_of_csrcs, arr_ofCSRC); return 0; } diff --git a/webrtc/video_engine/vie_channel.h b/webrtc/video_engine/vie_channel.h index bab5a8280..107433fee 100644 --- a/webrtc/video_engine/vie_channel.h +++ b/webrtc/video_engine/vie_channel.h @@ -296,7 +296,7 @@ class ViEChannel RtpRtcp* rtp_rtcp(); // Implements VCMReceiveCallback. - virtual WebRtc_Word32 FrameToRender(VideoFrame& video_frame); // NOLINT + virtual WebRtc_Word32 FrameToRender(I420VideoFrame& video_frame); // NOLINT // Implements VCMReceiveCallback. virtual WebRtc_Word32 ReceivedDecodedReferenceFrame( diff --git a/webrtc/video_engine/vie_encoder.cc b/webrtc/video_engine/vie_encoder.cc index ed3b42352..928ae5400 100644 --- a/webrtc/video_engine/vie_encoder.cc +++ b/webrtc/video_engine/vie_encoder.cc @@ -12,6 +12,7 @@ #include +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/rtp_rtcp/interface/rtp_rtcp.h" #include "modules/utility/interface/process_thread.h" #include "modules/video_coding/codecs/interface/video_codec_interface.h" @@ -422,12 +423,12 @@ RtpRtcp* ViEEncoder::SendRtpRtcpModule() { } void ViEEncoder::DeliverFrame(int id, - VideoFrame* video_frame, + I420VideoFrame* video_frame, int num_csrcs, const WebRtc_UWord32 CSRC[kRtpCsrcSize]) { WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, ViEId(engine_id_, channel_id_), "%s: %llu", __FUNCTION__, - video_frame->TimeStamp()); + video_frame->timestamp()); { CriticalSectionScoped cs(data_cs_.get()); @@ -440,7 +441,7 @@ void ViEEncoder::DeliverFrame(int id, WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, ViEId(engine_id_, channel_id_), "%s: Dropping frame %llu after a key fame", __FUNCTION__, - video_frame->TimeStamp()); + video_frame->timestamp()); drop_next_frame_ = false; return; } @@ -450,14 +451,19 @@ void ViEEncoder::DeliverFrame(int id, const int kMsToRtpTimestamp = 90; const WebRtc_UWord32 time_stamp = kMsToRtpTimestamp * - static_cast(video_frame->RenderTimeMs()); - video_frame->SetTimeStamp(time_stamp); + static_cast(video_frame->render_time_ms()); + video_frame->set_timestamp(time_stamp); { CriticalSectionScoped cs(callback_cs_.get()); if (effect_filter_) { - effect_filter_->Transform(video_frame->Length(), video_frame->Buffer(), - video_frame->TimeStamp(), - video_frame->Width(), video_frame->Height()); + unsigned int length = CalcBufferSize(kI420, + video_frame->width(), + video_frame->height()); + scoped_array video_buffer(new uint8_t[length]); + ExtractBuffer(*video_frame, length, video_buffer.get()); + effect_filter_->Transform(length, video_buffer.get(), + video_frame->timestamp(), video_frame->width(), + video_frame->height()); } } // Record raw frame. @@ -494,7 +500,7 @@ void ViEEncoder::DeliverFrame(int id, has_received_sli_ = false; has_received_rpsi_ = false; } - VideoFrame* decimated_frame = NULL; + I420VideoFrame* decimated_frame = NULL; const int ret = vpm_.PreprocessFrame(*video_frame, &decimated_frame); if (ret == 1) { // Drop this frame. @@ -503,7 +509,7 @@ void ViEEncoder::DeliverFrame(int id, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, channel_id_), "%s: Error preprocessing frame %u", __FUNCTION__, - video_frame->TimeStamp()); + video_frame->timestamp()); return; } @@ -520,14 +526,14 @@ void ViEEncoder::DeliverFrame(int id, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, channel_id_), "%s: Error encoding frame %u", __FUNCTION__, - video_frame->TimeStamp()); + video_frame->timestamp()); } return; } #endif // TODO(mflodman) Rewrite this to use code common to VP8 case. // Pass frame via preprocessor. - VideoFrame* decimated_frame = NULL; + I420VideoFrame* decimated_frame = NULL; const int ret = vpm_.PreprocessFrame(*video_frame, &decimated_frame); if (ret == 1) { // Drop this frame. @@ -536,7 +542,7 @@ void ViEEncoder::DeliverFrame(int id, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, channel_id_), "%s: Error preprocessing frame %u", __FUNCTION__, - video_frame->TimeStamp()); + video_frame->timestamp()); return; } @@ -547,7 +553,7 @@ void ViEEncoder::DeliverFrame(int id, if (vcm_.AddVideoFrame(*decimated_frame) != VCM_OK) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_, channel_id_), "%s: Error encoding frame %u", - __FUNCTION__, video_frame->TimeStamp()); + __FUNCTION__, video_frame->timestamp()); } } diff --git a/webrtc/video_engine/vie_encoder.h b/webrtc/video_engine/vie_encoder.h index dc74f3339..339390f23 100644 --- a/webrtc/video_engine/vie_encoder.h +++ b/webrtc/video_engine/vie_encoder.h @@ -84,7 +84,7 @@ class ViEEncoder // Implementing ViEFrameCallback. virtual void DeliverFrame(int id, - VideoFrame* video_frame, + I420VideoFrame* video_frame, int num_csrcs = 0, const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL); virtual void DelayChanged(int id, int frame_delay); diff --git a/webrtc/video_engine/vie_file_image.cc b/webrtc/video_engine/vie_file_image.cc index 1f224fe13..d81f54397 100644 --- a/webrtc/video_engine/vie_file_image.cc +++ b/webrtc/video_engine/vie_file_image.cc @@ -25,7 +25,7 @@ namespace webrtc { int ViEFileImage::ConvertJPEGToVideoFrame(int engine_id, const char* file_nameUTF8, - VideoFrame* video_frame) { + I420VideoFrame* video_frame) { // Read jpeg file into temporary buffer. EncodedImage image_buffer; @@ -87,15 +87,19 @@ int ViEFileImage::ConvertJPEGToVideoFrame(int engine_id, return 0; } -int ViEFileImage::ConvertPictureToVideoFrame(int engine_id, - const ViEPicture& picture, - VideoFrame* video_frame) { - WebRtc_UWord32 picture_length = (WebRtc_UWord32)(picture.width * - picture.height * 1.5); - video_frame->CopyFrame(picture_length, picture.data); - video_frame->SetWidth(picture.width); - video_frame->SetHeight(picture.height); - video_frame->SetLength(picture_length); +int ViEFileImage::ConvertPictureToI420VideoFrame(int engine_id, + const ViEPicture& picture, + I420VideoFrame* video_frame) { + int half_width = (picture.width + 1) / 2; + int half_height = (picture.height + 1) / 2; + int size_uv = half_width * half_height; + int size_y = picture.width * picture.height; + return video_frame->CreateFrame(size_y, picture.data, + size_uv, picture.data + size_y, + size_uv, picture.data + size_y + + size_uv, + picture.width, picture.height, + picture.width, half_width, half_width); return 0; } diff --git a/webrtc/video_engine/vie_file_image.h b/webrtc/video_engine/vie_file_image.h index e3f229a10..3bca244dc 100644 --- a/webrtc/video_engine/vie_file_image.h +++ b/webrtc/video_engine/vie_file_image.h @@ -11,7 +11,7 @@ #ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_IMAGE_H_ #define WEBRTC_VIDEO_ENGINE_VIE_FILE_IMAGE_H_ -#include "modules/interface/module_common_types.h" +#include "common_video/interface/i420_video_frame.h" #include "typedefs.h" // NOLINT #include "video_engine/include/vie_file.h" @@ -21,10 +21,10 @@ class ViEFileImage { public: static int ConvertJPEGToVideoFrame(int engine_id, const char* file_nameUTF8, - VideoFrame* video_frame); - static int ConvertPictureToVideoFrame(int engine_id, - const ViEPicture& picture, - VideoFrame* video_frame); + I420VideoFrame* video_frame); + static int ConvertPictureToI420VideoFrame(int engine_id, + const ViEPicture& picture, + I420VideoFrame* video_frame); }; } // namespace webrtc diff --git a/webrtc/video_engine/vie_file_impl.cc b/webrtc/video_engine/vie_file_impl.cc index 147b76612..bea4ebd7f 100644 --- a/webrtc/video_engine/vie_file_impl.cc +++ b/webrtc/video_engine/vie_file_impl.cc @@ -14,6 +14,7 @@ #ifdef WEBRTC_VIDEO_ENGINE_FILE_API #include "common_video/jpeg/include/jpeg.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "system_wrappers/interface/condition_variable_wrapper.h" #include "system_wrappers/interface/critical_section_wrapper.h" #include "system_wrappers/interface/trace.h" @@ -557,7 +558,7 @@ int ViEFileImpl::GetRenderSnapshot(const int video_channel, return -1; } - VideoFrame video_frame; + I420VideoFrame video_frame; if (renderer->GetLastRenderedFrame(video_channel, video_frame) == -1) { return -1; } @@ -592,20 +593,23 @@ int ViEFileImpl::GetRenderSnapshot(const int video_channel, return -1; } - VideoFrame video_frame; + I420VideoFrame video_frame; if (renderer->GetLastRenderedFrame(video_channel, video_frame) == -1) { return -1; } // Copy from VideoFrame class to ViEPicture struct. - int buffer_length = - static_cast(video_frame.Width() * video_frame.Height() * 1.5); - picture.data = static_cast(malloc( + int buffer_length = CalcBufferSize(kI420, video_frame.width(), + video_frame.height()); + picture.data = static_cast(malloc( buffer_length * sizeof(WebRtc_UWord8))); - memcpy(picture.data, video_frame.Buffer(), buffer_length); + if (ExtractBuffer(video_frame, buffer_length, picture.data) < 0) { + return -1; + } + picture.size = buffer_length; - picture.width = video_frame.Width(); - picture.height = video_frame.Height(); + picture.width = video_frame.width(); + picture.height = video_frame.height(); picture.type = kVideoI420; return 0; } @@ -618,7 +622,7 @@ int ViEFileImpl::GetCaptureDeviceSnapshot(const int capture_id, return -1; } - VideoFrame video_frame; + I420VideoFrame video_frame; if (GetNextCapturedFrame(capture_id, &video_frame) == -1) { WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(), "Could not gain acces to capture device %d video frame " @@ -649,7 +653,7 @@ int ViEFileImpl::GetCaptureDeviceSnapshot(const int capture_id, int ViEFileImpl::GetCaptureDeviceSnapshot(const int capture_id, ViEPicture& picture) { - VideoFrame video_frame; + I420VideoFrame video_frame; ViEInputManagerScoped is(*(shared_data_->input_manager())); ViECapturer* capturer = is.Capture(capture_id); if (!capturer) { @@ -663,14 +667,16 @@ int ViEFileImpl::GetCaptureDeviceSnapshot(const int capture_id, } // Copy from VideoFrame class to ViEPicture struct. - int buffer_length = - static_cast(video_frame.Width() * video_frame.Height() * 1.5); + int buffer_length = CalcBufferSize(kI420, video_frame.width(), + video_frame.height()); picture.data = static_cast(malloc( buffer_length * sizeof(WebRtc_UWord8))); - memcpy(picture.data, video_frame.Buffer(), buffer_length); + if (ExtractBuffer(video_frame, buffer_length, picture.data) < 0) { + return -1; + } picture.size = buffer_length; - picture.width = video_frame.Width(); - picture.height = video_frame.Height(); + picture.width = video_frame.width(); + picture.height = video_frame.height(); picture.type = kVideoI420; return 0; } @@ -701,7 +707,7 @@ int ViEFileImpl::SetRenderStartImage(const int video_channel, return -1; } - VideoFrame start_image; + I420VideoFrame start_image; if (ViEFileImage::ConvertJPEGToVideoFrame( ViEId(shared_data_->instance_id(), video_channel), file_nameUTF8, &start_image) != 0) { @@ -740,8 +746,8 @@ int ViEFileImpl::SetRenderStartImage(const int video_channel, return -1; } - VideoFrame start_image; - if (ViEFileImage::ConvertPictureToVideoFrame( + I420VideoFrame start_image; + if (ViEFileImage::ConvertPictureToI420VideoFrame( ViEId(shared_data_->instance_id(), video_channel), picture, &start_image) != 0) { WEBRTC_TRACE(kTraceError, kTraceVideo, @@ -770,7 +776,7 @@ int ViEFileImpl::SetRenderTimeoutImage(const int video_channel, shared_data_->SetLastError(kViEFileInvalidRenderId); return -1; } - VideoFrame timeout_image; + I420VideoFrame timeout_image; if (ViEFileImage::ConvertJPEGToVideoFrame( ViEId(shared_data_->instance_id(), video_channel), file_nameUTF8, &timeout_image) != 0) { @@ -825,8 +831,8 @@ const unsigned int timeout_ms) { shared_data_->SetLastError(kViEFileSetRenderTimeoutError); return -1; } - VideoFrame timeout_image; - if (ViEFileImage::ConvertPictureToVideoFrame( + I420VideoFrame timeout_image; + if (ViEFileImage::ConvertPictureToI420VideoFrame( ViEId(shared_data_->instance_id(), video_channel), picture, &timeout_image) != 0) { WEBRTC_TRACE(kTraceError, kTraceVideo, @@ -859,7 +865,7 @@ const unsigned int timeout_ms) { } WebRtc_Word32 ViEFileImpl::GetNextCapturedFrame(WebRtc_Word32 capture_id, - VideoFrame* video_frame) { + I420VideoFrame* video_frame) { ViEInputManagerScoped is(*(shared_data_->input_manager())); ViECapturer* capturer = is.Capture(capture_id); if (!capturer) { @@ -921,12 +927,12 @@ ViECaptureSnapshot::~ViECaptureSnapshot() { } bool ViECaptureSnapshot::GetSnapshot(unsigned int max_wait_time, - VideoFrame* video_frame) { + I420VideoFrame* video_frame) { crit_->Enter(); - video_frame_ = new VideoFrame(); + video_frame_ = new I420VideoFrame(); if (condition_varaible_->SleepCS(*(crit_.get()), max_wait_time)) { // Snapshot taken. - video_frame->SwapFrame(*video_frame_); + video_frame->SwapFrame(video_frame_); delete video_frame_; video_frame_ = NULL; crit_->Leave(); @@ -937,14 +943,14 @@ bool ViECaptureSnapshot::GetSnapshot(unsigned int max_wait_time, } void ViECaptureSnapshot::DeliverFrame(int id, - VideoFrame* video_frame, + I420VideoFrame* video_frame, int num_csrcs, const WebRtc_UWord32 CSRC[kRtpCsrcSize]) { CriticalSectionScoped cs(crit_.get()); if (!video_frame_) { return; } - video_frame_->SwapFrame(*video_frame); + video_frame_->SwapFrame(video_frame); condition_varaible_->WakeAll(); return; } diff --git a/webrtc/video_engine/vie_file_impl.h b/webrtc/video_engine/vie_file_impl.h index d19cc4d3c..b0b283021 100644 --- a/webrtc/video_engine/vie_file_impl.h +++ b/webrtc/video_engine/vie_file_impl.h @@ -30,11 +30,11 @@ class ViECaptureSnapshot : public ViEFrameCallback { ViECaptureSnapshot(); ~ViECaptureSnapshot(); - bool GetSnapshot(unsigned int max_wait_time, VideoFrame* video_frame); + bool GetSnapshot(unsigned int max_wait_time, I420VideoFrame* video_frame); // Implements ViEFrameCallback. virtual void DeliverFrame(int id, - VideoFrame* video_frame, + I420VideoFrame* video_frame, int num_csrcs = 0, const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL); virtual void DelayChanged(int id, int frame_delay) {} @@ -48,7 +48,7 @@ class ViECaptureSnapshot : public ViEFrameCallback { private: scoped_ptr crit_; scoped_ptr condition_varaible_; - VideoFrame* video_frame_; + I420VideoFrame* video_frame_; }; class ViEFileImpl @@ -126,7 +126,7 @@ class ViEFileImpl private: WebRtc_Word32 GetNextCapturedFrame(WebRtc_Word32 capture_id, - VideoFrame* video_frame); + I420VideoFrame* video_frame); ViESharedData* shared_data_; }; diff --git a/webrtc/video_engine/vie_file_player.cc b/webrtc/video_engine/vie_file_player.cc index e7e45b9ae..3e304163d 100644 --- a/webrtc/video_engine/vie_file_player.cc +++ b/webrtc/video_engine/vie_file_player.cc @@ -215,17 +215,17 @@ bool ViEFilePlayer::FilePlayDecodeProcess() { if (file_player_->GetVideoFromFile(decoded_video_) != 0) { } } - if (decoded_video_.Length() > 0) { + if (!decoded_video_.IsZeroSize()) { if (local_audio_channel_ != -1 && voe_video_sync_) { // We are playing audio locally. int audio_delay = 0; if (voe_video_sync_->GetPlayoutBufferSize(audio_delay) == 0) { - decoded_video_.SetRenderTime(decoded_video_.RenderTimeMs() + - audio_delay); + decoded_video_.set_render_time_ms(decoded_video_.render_time_ms() + + audio_delay); } } DeliverFrame(&decoded_video_); - decoded_video_.SetLength(0); + decoded_video_.ResetSize(); } } return true; diff --git a/webrtc/video_engine/vie_file_player.h b/webrtc/video_engine/vie_file_player.h index 15ac35a4e..1f9025825 100644 --- a/webrtc/video_engine/vie_file_player.h +++ b/webrtc/video_engine/vie_file_player.h @@ -15,6 +15,7 @@ #include #include "common_types.h" // NOLINT +#include "common_video/interface/i420_video_frame.h" #include "modules/media_file/interface/media_file_defines.h" #include "system_wrappers/interface/file_wrapper.h" #include "typedefs.h" // NOLINT @@ -130,7 +131,7 @@ class ViEFilePlayer std::set audio_channels_sending_; // Frame receiving decoded video from file. - VideoFrame decoded_video_; + I420VideoFrame decoded_video_; }; } // namespace webrtc diff --git a/webrtc/video_engine/vie_file_recorder.cc b/webrtc/video_engine/vie_file_recorder.cc index cdb0d50ea..54610420f 100644 --- a/webrtc/video_engine/vie_file_recorder.cc +++ b/webrtc/video_engine/vie_file_recorder.cc @@ -179,7 +179,7 @@ bool ViEFileRecorder::IsRecordingFileFormat(const FileFormats file_format) { return (file_recorder_->RecordingFileFormat() == file_format) ? true : false; } -void ViEFileRecorder::RecordVideoFrame(const VideoFrame& video_frame) { +void ViEFileRecorder::RecordVideoFrame(const I420VideoFrame& video_frame) { CriticalSectionScoped lock(recorder_cs_); if (file_recorder_ && file_recorder_->IsRecording()) { @@ -188,16 +188,17 @@ void ViEFileRecorder::RecordVideoFrame(const VideoFrame& video_frame) { // Compensate for frame delay in order to get audio/video sync when // recording local video. - const WebRtc_UWord32 time_stamp = video_frame.TimeStamp(); - const WebRtc_Word64 render_time_stamp = video_frame.RenderTimeMs(); - VideoFrame& unconst_video_frame = const_cast(video_frame); - unconst_video_frame.SetTimeStamp(time_stamp - 90 * frame_delay_); - unconst_video_frame.SetRenderTime(render_time_stamp - frame_delay_); + const WebRtc_UWord32 time_stamp = video_frame.timestamp(); + const WebRtc_Word64 render_time_stamp = video_frame.render_time_ms(); + I420VideoFrame& unconst_video_frame = + const_cast(video_frame); + unconst_video_frame.set_timestamp(time_stamp - 90 * frame_delay_); + unconst_video_frame.set_render_time_ms(render_time_stamp - frame_delay_); file_recorder_->RecordVideoToFile(unconst_video_frame); - unconst_video_frame.SetRenderTime(render_time_stamp); - unconst_video_frame.SetTimeStamp(time_stamp); + unconst_video_frame.set_render_time_ms(render_time_stamp); + unconst_video_frame.set_timestamp(time_stamp); } } diff --git a/webrtc/video_engine/vie_file_recorder.h b/webrtc/video_engine/vie_file_recorder.h index cc964faf2..5560a0421 100644 --- a/webrtc/video_engine/vie_file_recorder.h +++ b/webrtc/video_engine/vie_file_recorder.h @@ -37,7 +37,7 @@ class ViEFileRecorder : protected OutStream { bool RecordingStarted(); // Records incoming decoded video frame to file. - void RecordVideoFrame(const VideoFrame& video_frame); + void RecordVideoFrame(const I420VideoFrame& video_frame); protected: bool FirstFrameRecorded(); diff --git a/webrtc/video_engine/vie_frame_provider_base.cc b/webrtc/video_engine/vie_frame_provider_base.cc index 134f5c340..60f29379f 100644 --- a/webrtc/video_engine/vie_frame_provider_base.cc +++ b/webrtc/video_engine/vie_frame_provider_base.cc @@ -12,7 +12,7 @@ #include -#include "modules/interface/module_common_types.h" +#include "common_video/interface/i420_video_frame.h" #include "system_wrappers/interface/critical_section_wrapper.h" #include "system_wrappers/interface/tick_util.h" #include "system_wrappers/interface/trace.h" @@ -46,7 +46,7 @@ int ViEFrameProviderBase::Id() { } void ViEFrameProviderBase::DeliverFrame( - VideoFrame* video_frame, + I420VideoFrame* video_frame, int num_csrcs, const WebRtc_UWord32 CSRC[kRtpCsrcSize]) { #ifdef DEBUG_ @@ -64,7 +64,7 @@ void ViEFrameProviderBase::DeliverFrame( for (FrameCallbacks::iterator it = frame_callbacks_.begin(); it != frame_callbacks_.end(); ++it) { if (!extra_frame_.get()) { - extra_frame_.reset(new VideoFrame()); + extra_frame_.reset(new I420VideoFrame()); } extra_frame_->CopyFrame(*video_frame); (*it)->DeliverFrame(id_, extra_frame_.get(), num_csrcs, CSRC); diff --git a/webrtc/video_engine/vie_frame_provider_base.h b/webrtc/video_engine/vie_frame_provider_base.h index 2f75adb79..d9a50088d 100644 --- a/webrtc/video_engine/vie_frame_provider_base.h +++ b/webrtc/video_engine/vie_frame_provider_base.h @@ -21,14 +21,14 @@ namespace webrtc { class CriticalSectionWrapper; class VideoEncoder; -class VideoFrame; +class I420VideoFrame; // ViEFrameCallback shall be implemented by all classes receiving frames from a // frame provider. class ViEFrameCallback { public: virtual void DeliverFrame(int id, - VideoFrame* video_frame, + I420VideoFrame* video_frame, int num_csrcs = 0, const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL) = 0; @@ -75,7 +75,7 @@ class ViEFrameProviderBase { virtual int FrameCallbackChanged() = 0; protected: - void DeliverFrame(VideoFrame* video_frame, + void DeliverFrame(I420VideoFrame* video_frame, int num_csrcs = 0, const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL); void SetFrameDelay(int frame_delay); @@ -93,7 +93,7 @@ class ViEFrameProviderBase { scoped_ptr provider_cs_; private: - scoped_ptr extra_frame_; + scoped_ptr extra_frame_; int frame_delay_; }; diff --git a/webrtc/video_engine/vie_renderer.cc b/webrtc/video_engine/vie_renderer.cc index 588f8f60a..4efb39d9f 100644 --- a/webrtc/video_engine/vie_renderer.cc +++ b/webrtc/video_engine/vie_renderer.cc @@ -51,7 +51,7 @@ WebRtc_Word32 ViERenderer::StopRender() { } WebRtc_Word32 ViERenderer::GetLastRenderedFrame(const WebRtc_Word32 renderID, - VideoFrame& video_frame) { + I420VideoFrame& video_frame) { return render_module_.GetLastRenderedFrame(renderID, video_frame); } @@ -80,13 +80,14 @@ WebRtc_Word32 ViERenderer::EnableMirroring(const WebRtc_Word32 render_id, mirror_yaxis); } -WebRtc_Word32 ViERenderer::SetTimeoutImage(const VideoFrame& timeout_image, +WebRtc_Word32 ViERenderer::SetTimeoutImage(const I420VideoFrame& timeout_image, const WebRtc_Word32 timeout_value) { return render_module_.SetTimeoutImage(render_id_, timeout_image, timeout_value); } -WebRtc_Word32 ViERenderer::SetRenderStartImage(const VideoFrame& start_image) { +WebRtc_Word32 ViERenderer::SetRenderStartImage( + const I420VideoFrame& start_image) { return render_module_.SetStartImage(render_id_, start_image); } @@ -130,7 +131,7 @@ WebRtc_Word32 ViERenderer::Init(const WebRtc_UWord32 z_order, } void ViERenderer::DeliverFrame(int id, - VideoFrame* video_frame, + I420VideoFrame* video_frame, int num_csrcs, const WebRtc_UWord32 CSRC[kRtpCsrcSize]) { render_callback_->RenderFrame(render_id_, *video_frame); @@ -167,14 +168,14 @@ int ViEExternalRendererImpl::SetViEExternalRenderer( WebRtc_Word32 ViEExternalRendererImpl::RenderFrame( const WebRtc_UWord32 stream_id, - VideoFrame& video_frame) { + I420VideoFrame& video_frame) { VideoFrame* out_frame = converted_frame_.get(); // Convert to requested format. VideoType type = RawVideoTypeToCommonVideoVideoType(external_renderer_format_); - int buffer_size = CalcBufferSize(type, video_frame.Width(), - video_frame.Height()); + int buffer_size = CalcBufferSize(type, video_frame.width(), + video_frame.height()); if (buffer_size <= 0) { // Unsupported video format. assert(false); @@ -183,9 +184,16 @@ WebRtc_Word32 ViEExternalRendererImpl::RenderFrame( converted_frame_->VerifyAndAllocate(buffer_size); switch (external_renderer_format_) { - case kVideoI420: - out_frame = &video_frame; + case kVideoI420: { + // TODO(mikhal): need to copy the buffer as is. + // can the output here be a I420 frame? + int length = ExtractBuffer(video_frame, out_frame->Size(), + out_frame->Buffer()); + if (length < 0) + return -1; + out_frame->SetLength(length); break; + } case kVideoYV12: case kVideoYUY2: case kVideoUYVY: @@ -195,8 +203,7 @@ WebRtc_Word32 ViEExternalRendererImpl::RenderFrame( case kVideoARGB4444: case kVideoARGB1555 : { - ConvertFromI420(video_frame, video_frame.Width(), type, 0, - converted_frame_->Buffer()); + ConvertFromI420(video_frame, type, 0, converted_frame_->Buffer()); } break; case kVideoIYUV: @@ -208,10 +215,10 @@ WebRtc_Word32 ViEExternalRendererImpl::RenderFrame( break; } - if (external_renderer_width_ != video_frame.Width() || - external_renderer_height_ != video_frame.Height()) { - external_renderer_width_ = video_frame.Width(); - external_renderer_height_ = video_frame.Height(); + if (external_renderer_width_ != video_frame.width() || + external_renderer_height_ != video_frame.height()) { + external_renderer_width_ = video_frame.width(); + external_renderer_height_ = video_frame.height(); external_renderer_->FrameSizeChange(external_renderer_width_, external_renderer_height_, stream_id); } @@ -219,8 +226,8 @@ WebRtc_Word32 ViEExternalRendererImpl::RenderFrame( if (out_frame) { external_renderer_->DeliverFrame(out_frame->Buffer(), out_frame->Length(), - video_frame.TimeStamp(), - video_frame.RenderTimeMs()); + video_frame.timestamp(), + video_frame.render_time_ms()); } return 0; } diff --git a/webrtc/video_engine/vie_renderer.h b/webrtc/video_engine/vie_renderer.h index 85380f745..2afd78971 100644 --- a/webrtc/video_engine/vie_renderer.h +++ b/webrtc/video_engine/vie_renderer.h @@ -33,13 +33,14 @@ class ViEExternalRendererImpl : public VideoRenderCallback { // Implements VideoRenderCallback. virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 stream_id, - VideoFrame& video_frame); + I420VideoFrame& video_frame); private: ExternalRenderer* external_renderer_; RawVideoType external_renderer_format_; - WebRtc_UWord32 external_renderer_width_; - WebRtc_UWord32 external_renderer_height_; + int external_renderer_width_; + int external_renderer_height_; + // Converted_frame_ in color format specified by render_format_. scoped_ptr converted_frame_; }; @@ -60,7 +61,7 @@ class ViERenderer: public ViEFrameCallback { WebRtc_Word32 StopRender(); WebRtc_Word32 GetLastRenderedFrame(const WebRtc_Word32 renderID, - VideoFrame& video_frame); + I420VideoFrame& video_frame); int SetExpectedRenderDelay(int render_delay); @@ -77,9 +78,9 @@ class ViERenderer: public ViEFrameCallback { const bool mirror_xaxis, const bool mirror_yaxis); - WebRtc_Word32 SetTimeoutImage(const VideoFrame& timeout_image, + WebRtc_Word32 SetTimeoutImage(const I420VideoFrame& timeout_image, const WebRtc_Word32 timeout_value); - WebRtc_Word32 SetRenderStartImage(const VideoFrame& start_image); + WebRtc_Word32 SetRenderStartImage(const I420VideoFrame& start_image); WebRtc_Word32 SetExternalRenderer(const WebRtc_Word32 render_id, RawVideoType video_input_format, ExternalRenderer* external_renderer); @@ -97,7 +98,7 @@ class ViERenderer: public ViEFrameCallback { // Implement ViEFrameCallback virtual void DeliverFrame(int id, - VideoFrame* video_frame, + I420VideoFrame* video_frame, int num_csrcs = 0, const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL); virtual void DelayChanged(int id, int frame_delay);