Replacing RawImage with VideoFrame in video_coding and related engine code.

This is the first step of replacing RawImage with VideoFrame in all WebRtc modules.

BUG=
TEST=

Review URL: https://webrtc-codereview.appspot.com/672010

git-svn-id: http://webrtc.googlecode.com/svn/trunk@2540 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mikhal@webrtc.org 2012-07-31 15:53:44 +00:00
parent 5fe91a89ac
commit a2031d58f6
28 changed files with 218 additions and 277 deletions

View File

@ -47,7 +47,7 @@ public:
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK.
// <0 - Error
virtual int Encode(const RawImage& inputImage,
virtual int Encode(const VideoFrame& inputImage,
const CodecSpecificInfo* /*codecSpecificInfo*/,
const VideoFrameType /*frameTypes*/);
@ -138,7 +138,7 @@ public:
private:
RawImage _decodedImage;
VideoFrame _decodedImage;
int _width;
int _height;
bool _inited;

View File

@ -44,8 +44,8 @@ int I420Encoder::Release() {
}
int I420Encoder::InitEncode(const VideoCodec* codecSettings,
int /*numberOfCores*/,
uint32_t /*maxPayloadSize */) {
int /*numberOfCores*/,
uint32_t /*maxPayloadSize */) {
if (codecSettings == NULL) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
@ -76,7 +76,7 @@ int I420Encoder::InitEncode(const VideoCodec* codecSettings,
int I420Encoder::Encode(const RawImage& inputImage,
int I420Encoder::Encode(const VideoFrame& inputImage,
const CodecSpecificInfo* /*codecSpecificInfo*/,
const VideoFrameType /*frameType*/) {
if (!_inited) {
@ -87,10 +87,10 @@ int I420Encoder::Encode(const RawImage& inputImage,
}
_encodedImage._frameType = kKeyFrame; // No coding.
_encodedImage._timeStamp = inputImage._timeStamp;
_encodedImage._encodedHeight = inputImage._height;
_encodedImage._encodedWidth = inputImage._width;
if (inputImage._length > _encodedImage._size) {
_encodedImage._timeStamp = inputImage.TimeStamp();
_encodedImage._encodedHeight = inputImage.Height();
_encodedImage._encodedWidth = inputImage.Width();
if (inputImage.Length() > _encodedImage._size) {
// Allocating encoded memory.
if (_encodedImage._buffer != NULL) {
@ -108,8 +108,8 @@ int I420Encoder::Encode(const RawImage& inputImage,
_encodedImage._size = newSize;
_encodedImage._buffer = newBuffer;
}
memcpy(_encodedImage._buffer, inputImage._buffer, inputImage._length);
_encodedImage._length = inputImage._length;
memcpy(_encodedImage._buffer, inputImage.Buffer(), inputImage.Length());
_encodedImage._length = inputImage.Length();
_encodedCompleteCallback->Encoded(_encodedImage);
return WEBRTC_VIDEO_CODEC_OK;
}
@ -173,28 +173,13 @@ I420Decoder::Decode(const EncodedImage& inputImage,
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
// Allocate memory for decoded image.
if (_decodedImage._buffer != NULL) {
delete [] _decodedImage._buffer;
_decodedImage._buffer = NULL;
_decodedImage._size = 0;
}
if (_decodedImage._buffer == NULL) {
const uint32_t newSize = CalcBufferSize(kI420, _width, _height);
uint8_t* newBuffer = new uint8_t[newSize];
if (newBuffer == NULL) {
return WEBRTC_VIDEO_CODEC_MEMORY;
}
_decodedImage._size = newSize;
_decodedImage._buffer = newBuffer;
}
// Set decoded image parameters.
_decodedImage._height = _height;
_decodedImage._width = _width;
_decodedImage._timeStamp = inputImage._timeStamp;
memcpy(_decodedImage._buffer, inputImage._buffer, inputImage._length);
_decodedImage._length = inputImage._length;
if (_decodedImage.CopyFrame(inputImage._length, inputImage._buffer) < 0) {
return WEBRTC_VIDEO_CODEC_MEMORY;
}
_decodedImage.SetHeight(_height);
_decodedImage.SetWidth(_width);
_decodedImage.SetTimeStamp(inputImage._timeStamp);
_decodeCompleteCallback->Decoded(_decodedImage);
return WEBRTC_VIDEO_CODEC_OK;
@ -208,10 +193,7 @@ I420Decoder::RegisterDecodeCompleteCallback(DecodedImageCallback* callback) {
int
I420Decoder::Release() {
if (_decodedImage._buffer != NULL) {
delete [] _decodedImage._buffer;
_decodedImage._buffer = NULL;
}
_decodedImage.Free();
_inited = false;
return WEBRTC_VIDEO_CODEC_OK;
}

View File

@ -37,7 +37,7 @@ class MockVideoEncoder : public VideoEncoder {
WebRtc_Word32 numberOfCores,
WebRtc_UWord32 maxPayloadSize));
MOCK_METHOD3(Encode,
WebRtc_Word32(const RawImage& inputImage,
WebRtc_Word32(const VideoFrame& inputImage,
const CodecSpecificInfo* codecSpecificInfo,
const VideoFrameType frameType));
MOCK_METHOD1(RegisterEncodeCompleteCallback,
@ -57,7 +57,7 @@ class MockVideoEncoder : public VideoEncoder {
class MockDecodedImageCallback : public DecodedImageCallback {
public:
MOCK_METHOD1(Decoded,
WebRtc_Word32(RawImage& decodedImage));
WebRtc_Word32(VideoFrame& decodedImage));
MOCK_METHOD1(ReceivedDecodedReferenceFrame,
WebRtc_Word32(const WebRtc_UWord64 pictureId));
MOCK_METHOD1(ReceivedDecodedFrame,

View File

@ -12,8 +12,9 @@
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H
#include "common_types.h"
#include "common_video/interface/video_image.h"
#include "modules/interface/module_common_types.h"
#include "modules/video_coding/codecs/interface/video_error_codes.h"
#include "common_video/interface/video_image.h"
#include "typedefs.h"
namespace webrtc
@ -97,7 +98,7 @@ public:
// - frameType : The frame type to encode
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual WebRtc_Word32 Encode(const RawImage& inputImage,
virtual WebRtc_Word32 Encode(const VideoFrame& inputImage,
const CodecSpecificInfo* codecSpecificInfo,
const VideoFrameType frameType) = 0;
@ -159,10 +160,10 @@ public:
// Callback function which is called when an image has been decoded.
//
// Input:
// - decodedImage : The decoded image
// - decodedImage : The decoded image.
//
// Return value : 0 if OK, < 0 otherwise.
virtual WebRtc_Word32 Decoded(RawImage& decodedImage) = 0;
virtual WebRtc_Word32 Decoded(VideoFrame& decodedImage) = 0;
virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId) {return -1;}

View File

@ -65,10 +65,10 @@ bool VideoProcessorImpl::Init() {
last_successful_frame_buffer_ = new WebRtc_UWord8[frame_length_in_bytes];
// Set fixed properties common for all frames:
source_frame_._width = config_.codec_settings->width;
source_frame_._height = config_.codec_settings->height;
source_frame_._length = frame_length_in_bytes;
source_frame_._size = frame_length_in_bytes;
source_frame_.SetWidth(config_.codec_settings->width);
source_frame_.SetHeight(config_.codec_settings->height);
source_frame_.VerifyAndAllocate(frame_length_in_bytes);
source_frame_.SetLength(frame_length_in_bytes);
// To keep track of spatial resize actions by encoder.
last_encoder_frame_width_ = config_.codec_settings->width;
@ -168,15 +168,16 @@ bool VideoProcessorImpl::ProcessFrame(int frame_number) {
prev_time_stamp_ = -1;
}
if (frame_reader_->ReadFrame(source_buffer_)) {
// point the source frame buffer to the newly read frame data:
source_frame_._buffer = source_buffer_;
// Copy the source frame to the newly read frame data.
// Length is common for all frames.
source_frame_.CopyFrame(source_frame_.Length(), source_buffer_);
// Ensure we have a new statistics data object we can fill:
FrameStatistic& stat = stats_->NewFrame(frame_number);
encode_start_ = TickTime::Now();
// Use the frame number as "timestamp" to identify frames
source_frame_._timeStamp = frame_number;
source_frame_.SetTimeStamp(frame_number);
// Decide if we're going to force a keyframe:
VideoFrameType frame_type = kDeltaFrame;
@ -272,9 +273,9 @@ void VideoProcessorImpl::FrameEncoded(EncodedImage* encoded_image) {
last_frame_missing_ = encoded_image->_length == 0;
}
void VideoProcessorImpl::FrameDecoded(const RawImage& image) {
void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
TickTime decode_stop = TickTime::Now();
int frame_number = image._timeStamp;
int frame_number = image.TimeStamp();
// Report stats
FrameStatistic& stat = stats_->stats_[frame_number];
stat.decode_time_in_us = GetElapsedTimeMicroseconds(decode_start_,
@ -282,25 +283,26 @@ void VideoProcessorImpl::FrameDecoded(const RawImage& image) {
stat.decoding_successful = true;
// Check for resize action (either down or up):
if (static_cast<int>(image._width) != last_encoder_frame_width_ ||
static_cast<int>(image._height) != last_encoder_frame_height_ ) {
if (static_cast<int>(image.Width()) != last_encoder_frame_width_ ||
static_cast<int>(image.Height()) != last_encoder_frame_height_ ) {
++num_spatial_resizes_;
last_encoder_frame_width_ = image._width;
last_encoder_frame_height_ = image._height;
last_encoder_frame_width_ = image.Width();
last_encoder_frame_height_ = image.Height();
}
// Check if codec size is different from native/original size, and if so,
// upsample back to original size: needed for PSNR and SSIM computations.
if (image._width != config_.codec_settings->width ||
image._height != config_.codec_settings->height) {
int required_size = static_cast<WebRtc_UWord32>
(config_.codec_settings->width * config_.codec_settings->height * 3 / 2);
RawImage up_image = image;
up_image._buffer = new uint8_t[required_size];
up_image._length = required_size;
up_image._width = config_.codec_settings->width;
up_image._height = config_.codec_settings->height;
if (image.Width() != config_.codec_settings->width ||
image.Height() != config_.codec_settings->height) {
int required_size = CalcBufferSize(kI420,
config_.codec_settings->width,
config_.codec_settings->height);
VideoFrame up_image;
up_image.VerifyAndAllocate(required_size);
up_image.SetLength(required_size);
up_image.SetWidth(config_.codec_settings->width);
up_image.SetHeight(config_.codec_settings->height);
int ret_val = scaler_.Set(image._width, image._height,
int ret_val = scaler_.Set(image.Width(), image.Height(),
config_.codec_settings->width,
config_.codec_settings->height,
kI420, kI420, kScaleBilinear);
@ -309,7 +311,7 @@ void VideoProcessorImpl::FrameDecoded(const RawImage& image) {
fprintf(stderr, "Failed to set scalar for frame: %d, return code: %d\n",
frame_number, ret_val);
}
ret_val = scaler_.Scale(image._buffer, up_image._buffer,
ret_val = scaler_.Scale(image.Buffer(), up_image.Buffer(),
required_size);
assert(ret_val >= 0);
if (ret_val < 0) {
@ -317,19 +319,19 @@ void VideoProcessorImpl::FrameDecoded(const RawImage& image) {
frame_number, ret_val);
}
// Update our copy of the last successful frame:
memcpy(last_successful_frame_buffer_, up_image._buffer, up_image._length);
memcpy(last_successful_frame_buffer_, up_image.Buffer(), up_image.Length());
bool write_success = frame_writer_->WriteFrame(up_image._buffer);
bool write_success = frame_writer_->WriteFrame(up_image.Buffer());
assert(write_success);
if (!write_success) {
fprintf(stderr, "Failed to write frame %d to disk!", frame_number);
}
delete [] up_image._buffer;
up_image.Free();
} else { // No resize.
// Update our copy of the last successful frame:
memcpy(last_successful_frame_buffer_, image._buffer, image._length);
memcpy(last_successful_frame_buffer_, image.Buffer(), image.Length());
bool write_success = frame_writer_->WriteFrame(image._buffer);
bool write_success = frame_writer_->WriteFrame(image.Buffer());
assert(write_success);
if (!write_success) {
fprintf(stderr, "Failed to write frame %d to disk!", frame_number);
@ -386,7 +388,7 @@ VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
}
WebRtc_Word32
VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded(
RawImage& image) {
VideoFrame& image) {
video_processor_->FrameDecoded(image); // forward to parent class
return 0;
}

View File

@ -173,9 +173,9 @@ class VideoProcessorImpl : public VideoProcessor {
private:
// Invoked by the callback when a frame has completed encoding.
void FrameEncoded(EncodedImage* encodedImage);
void FrameEncoded(webrtc::EncodedImage* encodedImage);
// Invoked by the callback when a frame has completed decoding.
void FrameDecoded(const RawImage& image);
void FrameDecoded(const webrtc::VideoFrame& image);
// Used for getting a 32-bit integer representing time
// (checks the size is within signed 32-bit bounds before casting it)
int GetElapsedTimeMicroseconds(const webrtc::TickTime& start,
@ -204,7 +204,7 @@ class VideoProcessorImpl : public VideoProcessor {
// Keep track of the last successful frame, since we need to write that
// when decoding fails:
WebRtc_UWord8* last_successful_frame_buffer_;
webrtc::RawImage source_frame_;
webrtc::VideoFrame source_frame_;
// To keep track of if we have excluded the first key frame from packet loss:
bool first_key_frame_has_been_excluded_;
// To tell the decoder previous frame have been dropped due to packet loss:
@ -247,7 +247,7 @@ class VideoProcessorImpl : public VideoProcessor {
explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp)
: video_processor_(vp) {
}
WebRtc_Word32 Decoded(webrtc::RawImage& image);
WebRtc_Word32 Decoded(webrtc::VideoFrame& image);
private:
VideoProcessorImpl* video_processor_;

View File

@ -261,14 +261,14 @@ WebRtc_UWord32 VideoDecodeCompleteCallback::DecodedBytes()
}
WebRtc_Word32
VideoDecodeCompleteCallback::Decoded(RawImage& image)
VideoDecodeCompleteCallback::Decoded(VideoFrame& image)
{
_test.Decoded(image);
_decodedBytes += image._length;
_decodedBytes += image.Length();
if (_decodedFile != NULL)
{
if (fwrite(image._buffer, 1, image._length,
_decodedFile) != image._length) {
if (fwrite(image.Buffer(), 1, image.Length(),
_decodedFile) != image.Length()) {
return -1;
}
}
@ -299,14 +299,14 @@ NormalAsyncTest::Encoded(const EncodedImage& encodedImage)
}
void
NormalAsyncTest::Decoded(const RawImage& decodedImage)
NormalAsyncTest::Decoded(const VideoFrame& decodedImage)
{
_decodeCompleteTime = tGetTime();
_decFrameCnt++;
_totalDecodePipeTime += _decodeCompleteTime -
_decodeTimes[decodedImage._timeStamp];
_decodedWidth = decodedImage._width;
_decodedHeight = decodedImage._height;
_decodeTimes[decodedImage.TimeStamp()];
_decodedWidth = decodedImage.Width();
_decodedHeight = decodedImage.Height();
}
void
@ -414,14 +414,14 @@ NormalAsyncTest::Encode()
(_encFrameCnt * 9e4 / _inst.maxFramerate));
_inputVideoBuffer.SetWidth(_inst.width);
_inputVideoBuffer.SetHeight(_inst.height);
RawImage rawImage;
VideoFrame rawImage;
VideoBufferToRawImage(_inputVideoBuffer, rawImage);
if (feof(_sourceFile) != 0)
{
return true;
}
_encodeCompleteTime = 0;
_encodeTimes[rawImage._timeStamp] = tGetTime();
_encodeTimes[rawImage.TimeStamp()] = tGetTime();
VideoFrameType frameType = kDeltaFrame;
// check SLI queue
@ -474,11 +474,11 @@ NormalAsyncTest::Encode()
if (_encodeCompleteTime > 0)
{
_totalEncodeTime += _encodeCompleteTime -
_encodeTimes[rawImage._timeStamp];
_encodeTimes[rawImage.TimeStamp()];
}
else
{
_totalEncodeTime += tGetTime() - _encodeTimes[rawImage._timeStamp];
_totalEncodeTime += tGetTime() - _encodeTimes[rawImage.TimeStamp()];
}
assert(ret >= 0);
return false;

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -82,7 +82,7 @@ public:
virtual ~NormalAsyncTest() {};
virtual void Perform();
virtual void Encoded(const webrtc::EncodedImage& encodedImage);
virtual void Decoded(const webrtc::RawImage& decodedImage);
virtual void Decoded(const webrtc::VideoFrame& decodedImage);
virtual webrtc::CodecSpecificInfo*
CopyCodecSpecificInfo(
const webrtc::CodecSpecificInfo* codecSpecificInfo) const;
@ -174,7 +174,7 @@ public:
_decodedBytes(0)
{}
virtual WebRtc_Word32 Decoded(webrtc::RawImage& decodedImage);
virtual WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
virtual WebRtc_Word32
ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);

View File

@ -64,11 +64,11 @@ PacketLossTest::Encoded(const EncodedImage& encodedImage)
}
void
PacketLossTest::Decoded(const RawImage& decodedImage)
PacketLossTest::Decoded(const VideoFrame& decodedImage)
{
// check the frame queue if any frames have gone missing
assert(!_frameQueue.empty()); // decoded frame is not in the queue
while(_frameQueue.front() < decodedImage._timeStamp)
while(_frameQueue.front() < decodedImage.TimeStamp())
{
// this frame is missing
// write previous decoded frame again (frame freeze)
@ -83,20 +83,21 @@ PacketLossTest::Decoded(const RawImage& decodedImage)
// remove frame from queue
_frameQueue.pop_front();
}
assert(_frameQueue.front() == decodedImage._timeStamp); // decoded frame is not in the queue
// Decoded frame is not in the queue.
assert(_frameQueue.front() == decodedImage.TimeStamp());
// pop the current frame
_frameQueue.pop_front();
// save image for future freeze-frame
if (_lastFrameLength < decodedImage._length)
if (_lastFrameLength < decodedImage.Length())
{
if (_lastFrame) delete [] _lastFrame;
_lastFrame = new WebRtc_UWord8[decodedImage._length];
_lastFrame = new WebRtc_UWord8[decodedImage.Length()];
}
memcpy(_lastFrame, decodedImage._buffer, decodedImage._length);
_lastFrameLength = decodedImage._length;
memcpy(_lastFrame, decodedImage.Buffer(), decodedImage.Length());
_lastFrameLength = decodedImage.Length();
NormalAsyncTest::Decoded(decodedImage);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -21,7 +21,7 @@ public:
PacketLossTest();
virtual ~PacketLossTest() {if(_lastFrame) {delete [] _lastFrame; _lastFrame = NULL;}}
virtual void Encoded(const webrtc::EncodedImage& encodedImage);
virtual void Decoded(const webrtc::RawImage& decodedImage);
virtual void Decoded(const webrtc::VideoFrame& decodedImage);
protected:
PacketLossTest(std::string name, std::string description);
PacketLossTest(std::string name,

View File

@ -265,7 +265,7 @@ PerformanceTest::PerformSingleTest()
bool PerformanceTest::Encode()
{
RawImage rawImage;
VideoFrame rawImage;
VideoBufferToRawImage(_inputVideoBuffer, rawImage);
VideoFrameType frameType = kDeltaFrame;
if (_requestKeyFrame && !(_encFrameCnt%50))

View File

@ -135,14 +135,14 @@ bool CodecTest::PacketLoss(double lossRate, int /*thrown*/)
}
void
CodecTest::VideoBufferToRawImage(TestVideoBuffer& videoBuffer, RawImage &image)
CodecTest::VideoBufferToRawImage(TestVideoBuffer& videoBuffer,
VideoFrame &image)
{
image._buffer = videoBuffer.GetBuffer();
image._size = videoBuffer.GetSize();
image._length = videoBuffer.GetLength();
image._width = videoBuffer.GetWidth();
image._height = videoBuffer.GetHeight();
image._timeStamp = videoBuffer.GetTimeStamp();
// TODO(mikhal): Use videoBuffer in lieu of TestVideoBuffer.
image.CopyFrame(videoBuffer.GetLength(), videoBuffer.GetBuffer());
image.SetWidth(videoBuffer.GetWidth());
image.SetHeight(videoBuffer.GetHeight());
image.SetTimeStamp(videoBuffer.GetTimeStamp());
}
void
CodecTest::VideoEncodedBufferToEncodedImage(TestVideoEncodedBuffer& videoBuffer,

View File

@ -41,7 +41,7 @@ protected:
virtual bool PacketLoss(double lossRate, int /*thrown*/);
static double RandUniform() { return (std::rand() + 1.0)/(RAND_MAX + 1.0); }
static void VideoBufferToRawImage(TestVideoBuffer& videoBuffer,
webrtc::RawImage &image);
webrtc::VideoFrame &image);
static void VideoEncodedBufferToEncodedImage(
TestVideoEncodedBuffer& videoBuffer,
webrtc::EncodedImage &image);

View File

@ -109,13 +109,12 @@ UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
return 0;
}
WebRtc_Word32 UnitTestDecodeCompleteCallback::Decoded(RawImage& image)
WebRtc_Word32 UnitTestDecodeCompleteCallback::Decoded(VideoFrame& image)
{
_decodedVideoBuffer->VerifyAndAllocate(image._length);
_decodedVideoBuffer->CopyBuffer(image._length, image._buffer);
_decodedVideoBuffer->SetWidth(image._width);
_decodedVideoBuffer->SetHeight(image._height);
_decodedVideoBuffer->SetTimeStamp(image._timeStamp);
_decodedVideoBuffer->CopyBuffer(image.Length(), image.Buffer());
_decodedVideoBuffer->SetWidth(image.Width());
_decodedVideoBuffer->SetHeight(image.Height());
_decodedVideoBuffer->SetTimeStamp(image.TimeStamp());
_decodeComplete = true;
return 0;
}
@ -236,7 +235,7 @@ UnitTest::Setup()
// Get a reference encoded frame.
_encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
RawImage image;
VideoFrame image;
VideoBufferToRawImage(_inputVideoBuffer, image);
// Ensures our initial parameters are valid.
@ -351,7 +350,7 @@ UnitTest::Perform()
{
UnitTest::Setup();
int frameLength;
RawImage inputImage;
VideoFrame inputImage;
EncodedImage encodedImage;
VideoFrameType videoFrameType = kDeltaFrame;
@ -423,7 +422,7 @@ UnitTest::Perform()
// inputVideoBuffer unallocated.
_inputVideoBuffer.Free();
VideoBufferToRawImage(_inputVideoBuffer, inputImage);
inputImage.Free();
EXPECT_TRUE(_encoder->Encode(inputImage, NULL, videoFrameType) ==
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
@ -571,8 +570,8 @@ UnitTest::Perform()
// Decode with other size, reset, then decode with original size again
// to verify that decoder is reset to a "fresh" state upon Reset().
{
// assert that input frame size is a factor of two, so that we can use
// quarter size below
// Assert that input frame size is a factor of two, so that we can use
// quarter size below.
EXPECT_TRUE((_inst.width % 2 == 0) && (_inst.height % 2 == 0));
VideoCodec tempInst;
@ -580,19 +579,23 @@ UnitTest::Perform()
tempInst.width /= 2;
tempInst.height /= 2;
// Encode reduced (quarter) frame size
// Encode reduced (quarter) frame size.
EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
EXPECT_TRUE(_encoder->InitEncode(&tempInst, 1, 1440) ==
WEBRTC_VIDEO_CODEC_OK);
RawImage tempInput(inputImage._buffer, inputImage._length/4,
inputImage._size/4);
tempInput._width = tempInst.width;
tempInput._height = tempInst.height;
VideoFrame tempInput;
unsigned int tmpLength = inputImage.Length() / 4;
unsigned int tmpSize = inputImage.Length() / 4;
tempInput.Swap(inputImage.Buffer(),
tmpLength,
tmpSize);
tempInput.SetWidth(tempInst.width);
tempInput.SetHeight(tempInst.height);
VideoFrameType videoFrameType = kDeltaFrame;
_encoder->Encode(tempInput, NULL, videoFrameType);
frameLength = WaitForEncodedFrame();
EXPECT_TRUE(frameLength > 0);
tempInput.Free();
// Reset then decode.
EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
frameLength = 0;
@ -703,7 +706,7 @@ void
UnitTest::RateControlTests()
{
int frames = 0;
RawImage inputImage;
VideoFrame inputImage;
WebRtc_UWord32 frameLength;
// Do not specify maxBitRate (as in ViE).

View File

@ -100,7 +100,7 @@ class UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback
public:
UnitTestDecodeCompleteCallback(TestVideoBuffer* buffer) :
_decodedVideoBuffer(buffer), _decodeComplete(false) {}
WebRtc_Word32 Decoded(webrtc::RawImage& image);
WebRtc_Word32 Decoded(webrtc::VideoFrame& image);
bool DecodeComplete();
private:
TestVideoBuffer* _decodedVideoBuffer;

View File

@ -72,7 +72,7 @@ class VP8Encoder : public VideoEncoder {
// WEBRTC_VIDEO_CODEC_ERROR
// WEBRTC_VIDEO_CODEC_TIMEOUT
virtual int Encode(const RawImage& input_image,
virtual int Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const VideoFrameType frame_type);
@ -116,9 +116,9 @@ class VP8Encoder : public VideoEncoder {
void PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
const vpx_codec_cx_pkt& pkt);
int GetEncodedFrame(const RawImage& input_image);
int GetEncodedFrame(const VideoFrame& input_image);
int GetEncodedPartitions(const RawImage& input_image);
int GetEncodedPartitions(const VideoFrame& input_image);
// Determine maximum target for Intra frames
//
@ -222,7 +222,7 @@ class VP8Decoder : public VideoDecoder {
int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp);
RawImage decoded_image_;
VideoFrame decoded_image_;
DecodedImageCallback* decode_complete_callback_;
bool inited_;
bool feedback_mode_;

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -198,13 +198,13 @@ VP8DualDecoderTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
return memcmp(ptrA, ptrB, aLengthBytes) == 0;
}
WebRtc_Word32 DualDecoderCompleteCallback::Decoded(webrtc::RawImage& image)
WebRtc_Word32 DualDecoderCompleteCallback::Decoded(webrtc::VideoFrame& image)
{
_decodedVideoBuffer->VerifyAndAllocate(image._length);
_decodedVideoBuffer->CopyBuffer(image._length, image._buffer);
_decodedVideoBuffer->SetWidth(image._width);
_decodedVideoBuffer->SetHeight(image._height);
_decodedVideoBuffer->SetTimeStamp(image._timeStamp);
_decodedVideoBuffer->VerifyAndAllocate(image.Length());
_decodedVideoBuffer->CopyBuffer(image.Length(), image.Buffer());
_decodedVideoBuffer->SetWidth(image.Width());
_decodedVideoBuffer->SetHeight(image.Height());
_decodedVideoBuffer->SetTimeStamp(image.TimeStamp());
_decodeComplete = true;
return 0;
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -41,7 +41,7 @@ class DualDecoderCompleteCallback : public webrtc::DecodedImageCallback
public:
DualDecoderCompleteCallback(TestVideoBuffer* buffer)
: _decodedVideoBuffer(buffer), _decodeComplete(false) {}
WebRtc_Word32 Decoded(webrtc::RawImage& decodedImage);
WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
bool DecodeComplete();
private:
TestVideoBuffer* _decodedVideoBuffer;

View File

@ -142,13 +142,13 @@ bool VP8RpsTest::EncodeRps(RpsDecodeCompleteCallback* decodeCallback) {
(_encFrameCnt * 9e4 / _inst.maxFramerate));
_inputVideoBuffer.SetWidth(_inst.width);
_inputVideoBuffer.SetHeight(_inst.height);
webrtc::RawImage rawImage;
webrtc::VideoFrame rawImage;
VideoBufferToRawImage(_inputVideoBuffer, rawImage);
if (feof(_sourceFile) != 0) {
return true;
}
_encodeCompleteTime = 0;
_encodeTimes[rawImage._timeStamp] = tGetTime();
_encodeTimes[rawImage.TimeStamp()] = tGetTime();
webrtc::VideoFrameType frameType = webrtc::kDeltaFrame;
webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
@ -172,10 +172,10 @@ bool VP8RpsTest::EncodeRps(RpsDecodeCompleteCallback* decodeCallback) {
}
if (_encodeCompleteTime > 0) {
_totalEncodeTime += _encodeCompleteTime -
_encodeTimes[rawImage._timeStamp];
_encodeTimes[rawImage.TimeStamp()];
}
else {
_totalEncodeTime += tGetTime() - _encodeTimes[rawImage._timeStamp];
_totalEncodeTime += tGetTime() - _encodeTimes[rawImage.TimeStamp()];
}
return false;
}
@ -266,12 +266,12 @@ RpsDecodeCompleteCallback::RpsDecodeCompleteCallback(TestVideoBuffer* buffer)
updated_ref_picture_id_(false) {
}
WebRtc_Word32 RpsDecodeCompleteCallback::Decoded(webrtc::RawImage& image) {
decoded_frame_->VerifyAndAllocate(image._length);
decoded_frame_->CopyBuffer(image._length, image._buffer);
decoded_frame_->SetWidth(image._width);
decoded_frame_->SetHeight(image._height);
decoded_frame_->SetTimeStamp(image._timeStamp);
WebRtc_Word32 RpsDecodeCompleteCallback::Decoded(webrtc::VideoFrame& image) {
decoded_frame_->VerifyAndAllocate(image.Length());
decoded_frame_->CopyBuffer(image.Length(), image.Buffer());
decoded_frame_->SetWidth(image.Width());
decoded_frame_->SetHeight(image.Height());
decoded_frame_->SetTimeStamp(image.TimeStamp());
decode_complete_ = true;
return 0;
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -39,7 +39,7 @@ class VP8RpsTest : public VP8NormalAsyncTest {
class RpsDecodeCompleteCallback : public webrtc::DecodedImageCallback {
public:
RpsDecodeCompleteCallback(TestVideoBuffer* buffer);
WebRtc_Word32 Decoded(webrtc::RawImage& decodedImage);
WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
bool DecodeComplete();
WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 picture_id);
WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 picture_id);

View File

@ -320,13 +320,13 @@ uint32_t VP8Encoder::MaxIntraTarget(uint32_t optimalBuffersize) {
return (targetPct < minIntraTh) ? minIntraTh: targetPct;
}
int VP8Encoder::Encode(const RawImage& input_image,
int VP8Encoder::Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const VideoFrameType frame_type) {
if (!inited_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
if (input_image._buffer == NULL) {
if (input_image.Buffer() == NULL) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
if (encoded_complete_callback_ == NULL) {
@ -334,18 +334,20 @@ int VP8Encoder::Encode(const RawImage& input_image,
}
// Check for change in frame size.
if (input_image._width != codec_.width ||
input_image._height != codec_.height) {
int ret = UpdateCodecFrameSize(input_image._width, input_image._height);
if (input_image.Width() != codec_.width ||
input_image.Height() != codec_.height) {
int ret = UpdateCodecFrameSize(input_image.Width(), input_image.Height());
if (ret < 0) {
return ret;
}
}
// image in vpx_image_t format
raw_->planes[PLANE_Y] = input_image._buffer;
raw_->planes[PLANE_U] = &input_image._buffer[codec_.height * codec_.width];
raw_->planes[PLANE_V] =
&input_image._buffer[codec_.height * codec_.width * 5 >> 2];
// Image in vpx_image_t format.
uint8_t* buffer = input_image.Buffer();
uint32_t v_plane_loc = codec_.height * codec_.width +
((codec_.width + 1) >> 1) * ((codec_.height + 1) >> 1);
raw_->planes[PLANE_Y] = buffer;
raw_->planes[PLANE_U] = &buffer[codec_.width * codec_.height];
raw_->planes[PLANE_V] = &buffer[v_plane_loc];
int flags = 0;
#if WEBRTC_LIBVPX_VERSION >= 971
@ -367,10 +369,11 @@ int VP8Encoder::Encode(const RawImage& input_image,
codec_specific_info->codecSpecific.VP8.pictureIdRPSI);
}
if (codec_specific_info->codecSpecific.VP8.hasReceivedSLI) {
sendRefresh = rps_->ReceivedSLI(input_image._timeStamp);
sendRefresh = rps_->ReceivedSLI(input_image.TimeStamp());
}
}
flags = rps_->EncodeFlags(picture_id_, sendRefresh, input_image._timeStamp);
flags = rps_->EncodeFlags(picture_id_, sendRefresh,
input_image.TimeStamp());
}
// TODO(holmer): Ideally the duration should be the timestamp diff of this
@ -441,7 +444,7 @@ void VP8Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
picture_id_ = (picture_id_ + 1) & 0x7FFF; // prepare next
}
int VP8Encoder::GetEncodedFrame(const RawImage& input_image) {
int VP8Encoder::GetEncodedFrame(const VideoFrame& input_image) {
vpx_codec_iter_t iter = NULL;
encoded_image_._frameType = kDeltaFrame;
const vpx_codec_cx_pkt_t *pkt= vpx_codec_get_cx_data(encoder_, &iter);
@ -462,15 +465,16 @@ int VP8Encoder::GetEncodedFrame(const RawImage& input_image) {
encoded_image_._encodedHeight = raw_->h;
encoded_image_._encodedWidth = raw_->w;
// check if encoded frame is a key frame
// Check if encoded frame is a key frame.
if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
encoded_image_._frameType = kKeyFrame;
rps_->EncodedKeyFrame(picture_id_);
}
if (encoded_image_._length > 0) {
encoded_image_._timeStamp = input_image._timeStamp;
encoded_image_.capture_time_ms_ = input_image.capture_time_ms_;
encoded_image_._timeStamp = input_image.TimeStamp();
// TODO(mikhal): Resolve confusion in terms.
encoded_image_.capture_time_ms_ = input_image.RenderTimeMs();
// Figure out where partition boundaries are located.
RTPFragmentationHeader fragInfo;
@ -502,7 +506,7 @@ int VP8Encoder::GetEncodedFrame(const RawImage& input_image) {
}
#if WEBRTC_LIBVPX_VERSION >= 971
int VP8Encoder::GetEncodedPartitions(const RawImage& input_image) {
int VP8Encoder::GetEncodedPartitions(const VideoFrame& input_image) {
vpx_codec_iter_t iter = NULL;
int part_idx = 0;
encoded_image_._length = 0;
@ -543,8 +547,8 @@ int VP8Encoder::GetEncodedPartitions(const RawImage& input_image) {
}
}
if (encoded_image_._length > 0) {
encoded_image_._timeStamp = input_image._timeStamp;
encoded_image_.capture_time_ms_ = input_image.capture_time_ms_;
encoded_image_._timeStamp = input_image.TimeStamp();
encoded_image_.capture_time_ms_ = input_image.RenderTimeMs();
encoded_image_._encodedHeight = raw_->h;
encoded_image_._encodedWidth = raw_->w;
encoded_complete_callback_->Encoded(encoded_image_, &codec_specific,
@ -858,36 +862,29 @@ int VP8Decoder::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
}
// Allocate memory for decoded image.
uint32_t required_size = CalcBufferSize(kI420, img->d_w, img->d_h);
if (required_size > decoded_image_._size) {
delete [] decoded_image_._buffer;
decoded_image_._buffer = NULL;
}
if (decoded_image_._buffer == NULL) {
decoded_image_._size = required_size;
decoded_image_._buffer = new uint8_t[decoded_image_._size];
}
decoded_image_.VerifyAndAllocate(required_size);
uint8_t* buf;
uint32_t pos = 0;
uint32_t plane, y;
uint8_t* buffer = decoded_image_.Buffer();
for (plane = 0; plane < 3; plane++) {
unsigned int width = (plane ? (img->d_w + 1) >> 1 : img->d_w);
unsigned int height = (plane ? (img->d_h + 1) >> 1 : img->d_h);
buf = img->planes[plane];
for(y = 0; y < height; y++) {
memcpy(&decoded_image_._buffer[pos], buf, width);
memcpy(&buffer[pos], buf, width);
pos += width;
buf += img->stride[plane];
}
}
// Set image parameters
decoded_image_._height = img->d_h;
decoded_image_._width = img->d_w;
decoded_image_._length = CalcBufferSize(kI420, img->d_w, img->d_h);
decoded_image_._timeStamp = timestamp;
// Set decoded image parameters.
decoded_image_.SetHeight(img->d_h);
decoded_image_.SetWidth(img->d_w);
decoded_image_.SetLength(CalcBufferSize(kI420, img->d_w, img->d_h));
decoded_image_.SetTimeStamp(timestamp);
int ret = decode_complete_callback_->Decoded(decoded_image_);
if (ret != 0)
return ret;
@ -904,10 +901,7 @@ int VP8Decoder::RegisterDecodeCompleteCallback(
}
int VP8Decoder::Release() {
if (decoded_image_._buffer != NULL) {
delete [] decoded_image_._buffer;
decoded_image_._buffer = NULL;
}
decoded_image_.Free();
if (last_keyframe_._buffer != NULL) {
delete [] last_keyframe_._buffer;
last_keyframe_._buffer = NULL;
@ -935,7 +929,7 @@ VideoDecoder* VP8Decoder::Copy() {
assert(false);
return NULL;
}
if (decoded_image_._buffer == NULL) {
if (decoded_image_.Buffer() == NULL) {
// Nothing has been decoded before; cannot clone.
return NULL;
}
@ -958,13 +952,13 @@ VideoDecoder* VP8Decoder::Copy() {
return NULL;
}
// Allocate memory for reference image copy
assert(decoded_image_._width > 0);
assert(decoded_image_._height > 0);
assert(decoded_image_.Width() > 0);
assert(decoded_image_.Height() > 0);
assert(image_format_ > VPX_IMG_FMT_NONE);
// Check if frame format has changed.
if (ref_frame_ &&
(decoded_image_._width != ref_frame_->img.d_w ||
decoded_image_._height != ref_frame_->img.d_h ||
(decoded_image_.Width() != ref_frame_->img.d_w ||
decoded_image_.Height() != ref_frame_->img.d_h ||
image_format_ != ref_frame_->img.fmt)) {
vpx_img_free(&ref_frame_->img);
delete ref_frame_;
@ -976,12 +970,13 @@ VideoDecoder* VP8Decoder::Copy() {
ref_frame_ = new vpx_ref_frame_t;
unsigned int align = 1;
if (decoded_image_._width % 32 == 0) {
if (decoded_image_.Width() % 32 == 0) {
align = 32;
}
if (!vpx_img_alloc(&ref_frame_->img,
static_cast<vpx_img_fmt_t>(image_format_),
decoded_image_._width, decoded_image_._height, align)) {
decoded_image_.Width(), decoded_image_.Height(),
align)) {
assert(false);
delete copy;
return NULL;
@ -1006,12 +1001,6 @@ VideoDecoder* VP8Decoder::Copy() {
memcpy(copy->last_keyframe_._buffer, last_keyframe_._buffer,
last_keyframe_._length);
// Initialize decoded_image_.
copy->decoded_image_ = decoded_image_; // Shallow copy
copy->decoded_image_._buffer = NULL;
if (decoded_image_._size) {
copy->decoded_image_._buffer = new uint8_t[decoded_image_._size];
}
return static_cast<VideoDecoder*>(copy);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -40,13 +40,13 @@ void VCMDecodedFrameCallback::SetUserReceiveCallback(
_receiveCallback = receiveCallback;
}
WebRtc_Word32 VCMDecodedFrameCallback::Decoded(RawImage& decodedImage)
WebRtc_Word32 VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage)
{
// TODO(holmer): We should improve this so that we can handle multiple
// callbacks from one call to Decode().
CriticalSectionScoped cs(_critSect);
VCMFrameInformation* frameInfo = static_cast<VCMFrameInformation*>(
_timestampMap.Pop(decodedImage._timeStamp));
_timestampMap.Pop(decodedImage.TimeStamp()));
if (frameInfo == NULL)
{
// The map should never be empty or full if this callback is called.
@ -54,20 +54,13 @@ WebRtc_Word32 VCMDecodedFrameCallback::Decoded(RawImage& decodedImage)
}
_timing.StopDecodeTimer(
decodedImage._timeStamp,
decodedImage.TimeStamp(),
frameInfo->decodeStartTimeMs,
_clock->MillisecondTimestamp());
if (_receiveCallback != NULL)
{
_frame.Swap(decodedImage._buffer,
decodedImage._length,
decodedImage._size);
_frame.SetWidth(decodedImage._width);
_frame.SetHeight(decodedImage._height);
_frame.SetTimeStamp(decodedImage._timeStamp);
_frame.SetRenderTime(frameInfo->renderTimeMs);
// Convert raw image to video frame
_frame.SwapFrame(decodedImage);
WebRtc_Word32 callbackReturn = _receiveCallback->FrameToRender(_frame);
if (callbackReturn < 0)
{

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -38,7 +38,7 @@ public:
virtual ~VCMDecodedFrameCallback();
void SetUserReceiveCallback(VCMReceiveCallback* receiveCallback);
virtual WebRtc_Word32 Decoded(RawImage& decodedImage);
virtual WebRtc_Word32 Decoded(VideoFrame& decodedImage);
virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);

View File

@ -59,19 +59,9 @@ VCMGenericEncoder::InitEncode(const VideoCodec* settings,
WebRtc_Word32
VCMGenericEncoder::Encode(const VideoFrame& inputFrame,
const CodecSpecificInfo* codecSpecificInfo,
const FrameType frameType)
{
RawImage rawImage(inputFrame.Buffer(),
inputFrame.Length(),
inputFrame.Size());
rawImage._width = inputFrame.Width();
rawImage._height = inputFrame.Height();
rawImage._timeStamp = inputFrame.TimeStamp();
rawImage.capture_time_ms_ = inputFrame.RenderTimeMs();
VideoFrameType videoFrameType =
VCMEncodedFrame::ConvertFrameType(frameType);
return _encoder.Encode(rawImage, codecSpecificInfo, videoFrameType);
const FrameType frameType) {
VideoFrameType videoFrameType = VCMEncodedFrame::ConvertFrameType(frameType);
return _encoder.Encode(inputFrame, codecSpecificInfo, videoFrameType);
}
WebRtc_Word32
@ -121,7 +111,7 @@ VCMGenericEncoder::SetPeriodicKeyFrames(bool enable)
}
WebRtc_Word32 VCMGenericEncoder::RequestFrame(const FrameType frameType) {
RawImage image;
VideoFrame image;
VideoFrameType videoFrameType = VCMEncodedFrame::ConvertFrameType(frameType);
return _encoder.Encode(image, NULL, videoFrameType);
}

View File

@ -34,7 +34,7 @@ public:
WebRtc_UWord32 maxPayloadSize);
virtual WebRtc_Word32 Encode(
const webrtc::RawImage& inputImage,
const webrtc::VideoFrame& inputImage,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
const webrtc::VideoFrameType frameType);
@ -119,7 +119,7 @@ public:
private:
webrtc::RawImage _decodedImage;
webrtc::VideoFrame _decodedImage;
WebRtc_Word32 _width;
WebRtc_Word32 _height;
bool _inited;

View File

@ -14,6 +14,8 @@
#include <stdio.h>
#include <assert.h>
#include "common_video/libyuv/include/webrtc_libyuv.h"
TbI420Encoder::TbI420Encoder() :
_inited(false), _encodedImage(), _encodedCompleteCallback(NULL)
{
@ -116,7 +118,7 @@ WebRtc_Word32 TbI420Encoder::InitEncode(const webrtc::VideoCodec* inst,
}
WebRtc_Word32 TbI420Encoder::Encode(
const webrtc::RawImage& inputImage,
const webrtc::VideoFrame& inputImage,
const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
const webrtc::VideoFrameType /*frameType*/)
{
@ -131,10 +133,10 @@ WebRtc_Word32 TbI420Encoder::Encode(
}
_encodedImage._frameType = webrtc::kKeyFrame; // no coding
_encodedImage._timeStamp = inputImage._timeStamp;
_encodedImage._encodedHeight = inputImage._height;
_encodedImage._encodedWidth = inputImage._width;
if (inputImage._length > _encodedImage._size)
_encodedImage._timeStamp = inputImage.TimeStamp();
_encodedImage._encodedHeight = inputImage.Height();
_encodedImage._encodedWidth = inputImage.Width();
if (inputImage.Length() > _encodedImage._size)
{
// allocating encoded memory
@ -154,9 +156,9 @@ WebRtc_Word32 TbI420Encoder::Encode(
_encodedImage._size = newSize;
_encodedImage._buffer = newBuffer;
}
assert(_encodedImage._size >= inputImage._length);
memcpy(_encodedImage._buffer, inputImage._buffer, inputImage._length);
_encodedImage._length = inputImage._length;
assert(_encodedImage._size >= inputImage.Length());
memcpy(_encodedImage._buffer, inputImage.Buffer(), inputImage.Length());
_encodedImage._length = inputImage.Length();
_encodedCompleteCallback->Encoded(_encodedImage);
return WEBRTC_VIDEO_CODEC_OK;
}
@ -260,34 +262,16 @@ WebRtc_Word32 TbI420Decoder::Decode(
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
// Allocate memory for decoded image
// Allocate memory for decoded image.
const WebRtc_UWord32 newSize = webrtc::CalcBufferSize(webrtc::kI420,
_width, _height);
_decodedImage.VerifyAndAllocate(newSize);
if (_decodedImage._buffer != NULL)
{
delete[] _decodedImage._buffer;
_decodedImage._buffer = NULL;
_decodedImage._size = 0;
}
if (_decodedImage._buffer == NULL)
{
const WebRtc_UWord32 newSize = (3 * _width * _height) >> 1;
WebRtc_UWord8* newBuffer = new WebRtc_UWord8[newSize];
if (newBuffer == NULL)
{
return WEBRTC_VIDEO_CODEC_MEMORY;
}
_decodedImage._size = newSize;
_decodedImage._buffer = newBuffer;
}
// Set decoded image parameters
_decodedImage._height = _height;
_decodedImage._width = _width;
_decodedImage._timeStamp = inputImage._timeStamp;
assert(_decodedImage._size >= inputImage._length);
memcpy(_decodedImage._buffer, inputImage._buffer, inputImage._length);
_decodedImage._length = inputImage._length;
//_decodedImage._buffer = inputImage._buffer;
// Set decoded image parameters.
_decodedImage.SetHeight(_height);
_decodedImage.SetWidth(_width);
_decodedImage.SetTimeStamp(inputImage._timeStamp);
_decodedImage.CopyFrame(inputImage._length, inputImage._buffer);
_decodeCompleteCallback->Decoded(_decodedImage);
return WEBRTC_VIDEO_CODEC_OK;
@ -304,11 +288,7 @@ WebRtc_Word32 TbI420Decoder::RegisterDecodeCompleteCallback(
WebRtc_Word32 TbI420Decoder::Release()
{
_functionCalls.Release++;
if (_decodedImage._buffer != NULL)
{
delete[] _decodedImage._buffer;
_decodedImage._buffer = NULL;
}
_decodedImage.Free();
_inited = false;
return WEBRTC_VIDEO_CODEC_OK;
}

View File

@ -746,7 +746,7 @@ WebRtc_Word32 ViECapturer::InitEncode(const VideoCodec* codec_settings,
return capture_encoder_->ConfigureEncoder(*codec_settings, max_payload_size);
}
WebRtc_Word32 ViECapturer::Encode(const RawImage& input_image,
WebRtc_Word32 ViECapturer::Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const VideoFrameType frame_type) {
CriticalSectionScoped cs(encoding_cs_.get());

View File

@ -140,7 +140,7 @@ class ViECapturer
virtual WebRtc_Word32 InitEncode(const VideoCodec* codec_settings,
WebRtc_Word32 number_of_cores,
WebRtc_UWord32 max_payload_size);
virtual WebRtc_Word32 Encode(const RawImage& input_image,
virtual WebRtc_Word32 Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const VideoFrameType frame_type);
virtual WebRtc_Word32 RegisterEncodeCompleteCallback(