Updates to Codecs' tests
1. Removing VideoBuffer and replacing with VideoFrame. 2. Removing legacy rawImage related code. Open TODO: Get the frame type, only used in the packet_loss test - a TODO was added. Review URL: https://webrtc-codereview.appspot.com/896004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@2954 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
c0773fc8b0
commit
f1134e4565
@ -283,7 +283,6 @@ Benchmark::PerformNormalTest()
|
|||||||
}
|
}
|
||||||
|
|
||||||
_inputVideoBuffer.Free();
|
_inputVideoBuffer.Free();
|
||||||
//_encodedVideoBuffer.Reset(); ?
|
|
||||||
_encodedVideoBuffer.Free();
|
_encodedVideoBuffer.Free();
|
||||||
_decodedVideoBuffer.Free();
|
_decodedVideoBuffer.Free();
|
||||||
|
|
||||||
|
@ -197,7 +197,7 @@ FrameQueueTuple::~FrameQueueTuple()
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void FrameQueue::PushFrame(TestVideoEncodedBuffer *frame,
|
void FrameQueue::PushFrame(VideoFrame *frame,
|
||||||
webrtc::CodecSpecificInfo* codecSpecificInfo)
|
webrtc::CodecSpecificInfo* codecSpecificInfo)
|
||||||
{
|
{
|
||||||
WriteLockScoped cs(_queueRWLock);
|
WriteLockScoped cs(_queueRWLock);
|
||||||
@ -234,7 +234,7 @@ VideoEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
|
|||||||
fragmentation)
|
fragmentation)
|
||||||
{
|
{
|
||||||
_test.Encoded(encodedImage);
|
_test.Encoded(encodedImage);
|
||||||
TestVideoEncodedBuffer *newBuffer = new TestVideoEncodedBuffer();
|
VideoFrame *newBuffer = new VideoFrame();
|
||||||
//newBuffer->VerifyAndAllocate(encodedImage._length);
|
//newBuffer->VerifyAndAllocate(encodedImage._length);
|
||||||
newBuffer->VerifyAndAllocate(encodedImage._size);
|
newBuffer->VerifyAndAllocate(encodedImage._size);
|
||||||
_encodedBytes += encodedImage._length;
|
_encodedBytes += encodedImage._length;
|
||||||
@ -247,8 +247,8 @@ VideoEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
|
|||||||
_test.CopyEncodedImage(*newBuffer, encodedImage, codecSpecificInfoCopy);
|
_test.CopyEncodedImage(*newBuffer, encodedImage, codecSpecificInfoCopy);
|
||||||
if (_encodedFile != NULL)
|
if (_encodedFile != NULL)
|
||||||
{
|
{
|
||||||
if (fwrite(newBuffer->GetBuffer(), 1, newBuffer->GetLength(),
|
if (fwrite(newBuffer->Buffer(), 1, newBuffer->Length(),
|
||||||
_encodedFile) != newBuffer->GetLength()) {
|
_encodedFile) != newBuffer->Length()) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -410,19 +410,17 @@ NormalAsyncTest::Encode()
|
|||||||
{
|
{
|
||||||
_lengthEncFrame = 0;
|
_lengthEncFrame = 0;
|
||||||
EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile), 0u);
|
EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile), 0u);
|
||||||
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
|
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
|
||||||
_inputVideoBuffer.SetTimeStamp((unsigned int)
|
_inputVideoBuffer.SetTimeStamp((unsigned int)
|
||||||
(_encFrameCnt * 9e4 / _inst.maxFramerate));
|
(_encFrameCnt * 9e4 / _inst.maxFramerate));
|
||||||
_inputVideoBuffer.SetWidth(_inst.width);
|
_inputVideoBuffer.SetWidth(_inst.width);
|
||||||
_inputVideoBuffer.SetHeight(_inst.height);
|
_inputVideoBuffer.SetHeight(_inst.height);
|
||||||
VideoFrame rawImage;
|
|
||||||
VideoBufferToRawImage(_inputVideoBuffer, rawImage);
|
|
||||||
if (feof(_sourceFile) != 0)
|
if (feof(_sourceFile) != 0)
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
_encodeCompleteTime = 0;
|
_encodeCompleteTime = 0;
|
||||||
_encodeTimes[rawImage.TimeStamp()] = tGetTime();
|
_encodeTimes[_inputVideoBuffer.TimeStamp()] = tGetTime();
|
||||||
std::vector<VideoFrameType> frame_types(1, kDeltaFrame);
|
std::vector<VideoFrameType> frame_types(1, kDeltaFrame);
|
||||||
|
|
||||||
// check SLI queue
|
// check SLI queue
|
||||||
@ -465,7 +463,8 @@ NormalAsyncTest::Encode()
|
|||||||
}
|
}
|
||||||
|
|
||||||
webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
|
webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
|
||||||
int ret = _encoder->Encode(rawImage, codecSpecificInfo, &frame_types);
|
int ret = _encoder->Encode(_inputVideoBuffer,
|
||||||
|
codecSpecificInfo, &frame_types);
|
||||||
EXPECT_EQ(ret, WEBRTC_VIDEO_CODEC_OK);
|
EXPECT_EQ(ret, WEBRTC_VIDEO_CODEC_OK);
|
||||||
if (codecSpecificInfo != NULL)
|
if (codecSpecificInfo != NULL)
|
||||||
{
|
{
|
||||||
@ -475,11 +474,12 @@ NormalAsyncTest::Encode()
|
|||||||
if (_encodeCompleteTime > 0)
|
if (_encodeCompleteTime > 0)
|
||||||
{
|
{
|
||||||
_totalEncodeTime += _encodeCompleteTime -
|
_totalEncodeTime += _encodeCompleteTime -
|
||||||
_encodeTimes[rawImage.TimeStamp()];
|
_encodeTimes[_inputVideoBuffer.TimeStamp()];
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
_totalEncodeTime += tGetTime() - _encodeTimes[rawImage.TimeStamp()];
|
_totalEncodeTime += tGetTime() -
|
||||||
|
_encodeTimes[_inputVideoBuffer.TimeStamp()];
|
||||||
}
|
}
|
||||||
assert(ret >= 0);
|
assert(ret >= 0);
|
||||||
return false;
|
return false;
|
||||||
@ -488,7 +488,7 @@ NormalAsyncTest::Encode()
|
|||||||
int
|
int
|
||||||
NormalAsyncTest::Decode(int lossValue)
|
NormalAsyncTest::Decode(int lossValue)
|
||||||
{
|
{
|
||||||
_sumEncBytes += _frameToDecode->_frame->GetLength();
|
_sumEncBytes += _frameToDecode->_frame->Length();
|
||||||
EncodedImage encodedImage;
|
EncodedImage encodedImage;
|
||||||
VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
|
VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
|
||||||
encodedImage._completeFrame = !lossValue;
|
encodedImage._completeFrame = !lossValue;
|
||||||
@ -570,14 +570,14 @@ void NormalAsyncTest::CodecSpecific_InitBitrate()
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void NormalAsyncTest::CopyEncodedImage(TestVideoEncodedBuffer& dest,
|
void NormalAsyncTest::CopyEncodedImage(VideoFrame& dest,
|
||||||
EncodedImage& src,
|
EncodedImage& src,
|
||||||
void* /*codecSpecificInfo*/) const
|
void* /*codecSpecificInfo*/) const
|
||||||
{
|
{
|
||||||
dest.CopyBuffer(src._length, src._buffer);
|
dest.CopyFrame(src._length, src._buffer);
|
||||||
dest.SetFrameType(src._frameType);
|
//dest.SetFrameType(src._frameType);
|
||||||
dest.SetCaptureWidth((WebRtc_UWord16)src._encodedWidth);
|
dest.SetWidth((WebRtc_UWord16)src._encodedWidth);
|
||||||
dest.SetCaptureHeight((WebRtc_UWord16)src._encodedHeight);
|
dest.SetHeight((WebRtc_UWord16)src._encodedHeight);
|
||||||
dest.SetTimeStamp(src._timeStamp);
|
dest.SetTimeStamp(src._timeStamp);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -22,14 +22,14 @@
|
|||||||
class FrameQueueTuple
|
class FrameQueueTuple
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
FrameQueueTuple(TestVideoEncodedBuffer *frame,
|
FrameQueueTuple(webrtc::VideoFrame *frame,
|
||||||
const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL)
|
const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL)
|
||||||
:
|
:
|
||||||
_frame(frame),
|
_frame(frame),
|
||||||
_codecSpecificInfo(codecSpecificInfo)
|
_codecSpecificInfo(codecSpecificInfo)
|
||||||
{};
|
{};
|
||||||
~FrameQueueTuple();
|
~FrameQueueTuple();
|
||||||
TestVideoEncodedBuffer* _frame;
|
webrtc::VideoFrame* _frame;
|
||||||
const webrtc::CodecSpecificInfo* _codecSpecificInfo;
|
const webrtc::CodecSpecificInfo* _codecSpecificInfo;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -47,7 +47,7 @@ public:
|
|||||||
delete &_queueRWLock;
|
delete &_queueRWLock;
|
||||||
}
|
}
|
||||||
|
|
||||||
void PushFrame(TestVideoEncodedBuffer *frame,
|
void PushFrame(webrtc::VideoFrame *frame,
|
||||||
webrtc::CodecSpecificInfo* codecSpecificInfo = NULL);
|
webrtc::CodecSpecificInfo* codecSpecificInfo = NULL);
|
||||||
FrameQueueTuple* PopFrame();
|
FrameQueueTuple* PopFrame();
|
||||||
bool Empty();
|
bool Empty();
|
||||||
@ -84,7 +84,7 @@ public:
|
|||||||
virtual webrtc::CodecSpecificInfo*
|
virtual webrtc::CodecSpecificInfo*
|
||||||
CopyCodecSpecificInfo(
|
CopyCodecSpecificInfo(
|
||||||
const webrtc::CodecSpecificInfo* codecSpecificInfo) const;
|
const webrtc::CodecSpecificInfo* codecSpecificInfo) const;
|
||||||
virtual void CopyEncodedImage(TestVideoEncodedBuffer& dest,
|
virtual void CopyEncodedImage(webrtc::VideoFrame& dest,
|
||||||
webrtc::EncodedImage& src,
|
webrtc::EncodedImage& src,
|
||||||
void* /*codecSpecificInfo*/) const;
|
void* /*codecSpecificInfo*/) const;
|
||||||
virtual webrtc::CodecSpecificInfo* CreateEncoderSpecificInfo() const
|
virtual webrtc::CodecSpecificInfo* CreateEncoderSpecificInfo() const
|
||||||
|
@ -128,10 +128,10 @@ NormalTest::Perform()
|
|||||||
while (!Encode())
|
while (!Encode())
|
||||||
{
|
{
|
||||||
DoPacketLoss();
|
DoPacketLoss();
|
||||||
_encodedVideoBuffer.UpdateLength(_encodedVideoBuffer.GetLength());
|
_encodedVideoBuffer.SetLength(_encodedVideoBuffer.Length());
|
||||||
if (fwrite(_encodedVideoBuffer.GetBuffer(), 1,
|
if (fwrite(_encodedVideoBuffer.Buffer(), 1,
|
||||||
_encodedVideoBuffer.GetLength(),
|
_encodedVideoBuffer.Length(),
|
||||||
_encodedFile) != _encodedVideoBuffer.GetLength()) {
|
_encodedFile) != _encodedVideoBuffer.Length()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
decodeLength = Decode();
|
decodeLength = Decode();
|
||||||
@ -140,7 +140,7 @@ NormalTest::Perform()
|
|||||||
fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
|
fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
|
||||||
exit(EXIT_FAILURE);
|
exit(EXIT_FAILURE);
|
||||||
}
|
}
|
||||||
if (fwrite(_decodedVideoBuffer.GetBuffer(), 1, decodeLength,
|
if (fwrite(_decodedVideoBuffer.Buffer(), 1, decodeLength,
|
||||||
_decodedFile) != static_cast<unsigned int>(decodeLength)) {
|
_decodedFile) != static_cast<unsigned int>(decodeLength)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -157,7 +157,7 @@ NormalTest::Perform()
|
|||||||
fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
|
fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
|
||||||
exit(EXIT_FAILURE);
|
exit(EXIT_FAILURE);
|
||||||
}
|
}
|
||||||
if (fwrite(_decodedVideoBuffer.GetBuffer(), 1, decodeLength,
|
if (fwrite(_decodedVideoBuffer.Buffer(), 1, decodeLength,
|
||||||
_decodedFile) != static_cast<unsigned int>(decodeLength)) {
|
_decodedFile) != static_cast<unsigned int>(decodeLength)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -174,8 +174,6 @@ NormalTest::Perform()
|
|||||||
(*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
|
(*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
|
||||||
|
|
||||||
_inputVideoBuffer.Free();
|
_inputVideoBuffer.Free();
|
||||||
_encodedVideoBuffer.Reset();
|
|
||||||
_decodedVideoBuffer.Free();
|
|
||||||
|
|
||||||
_encoder->Release();
|
_encoder->Release();
|
||||||
_decoder->Release();
|
_decoder->Release();
|
||||||
@ -192,7 +190,7 @@ NormalTest::Encode()
|
|||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
|
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
|
||||||
_inputVideoBuffer.SetTimeStamp(_framecnt);
|
_inputVideoBuffer.SetTimeStamp(_framecnt);
|
||||||
|
|
||||||
// This multiple attempt ridiculousness is to accomodate VP7:
|
// This multiple attempt ridiculousness is to accomodate VP7:
|
||||||
@ -213,8 +211,8 @@ NormalTest::Encode()
|
|||||||
|
|
||||||
endtime = clock()/(double)CLOCKS_PER_SEC;
|
endtime = clock()/(double)CLOCKS_PER_SEC;
|
||||||
|
|
||||||
_encodedVideoBuffer.SetCaptureHeight(_inst.height);
|
_encodedVideoBuffer.SetHeight(_inst.height);
|
||||||
_encodedVideoBuffer.SetCaptureWidth(_inst.width);
|
_encodedVideoBuffer.SetWidth(_inst.width);
|
||||||
if (_lengthEncFrame < 0)
|
if (_lengthEncFrame < 0)
|
||||||
{
|
{
|
||||||
(*_log) << "Error in encoder: " << _lengthEncFrame << std::endl;
|
(*_log) << "Error in encoder: " << _lengthEncFrame << std::endl;
|
||||||
@ -256,7 +254,6 @@ NormalTest::Decode(int lossValue)
|
|||||||
{
|
{
|
||||||
return lengthDecFrame;
|
return lengthDecFrame;
|
||||||
}
|
}
|
||||||
_encodedVideoBuffer.Reset();
|
_encodedVideoBuffer.SetLength(0);
|
||||||
_encodedVideoBuffer.UpdateLength(0);
|
|
||||||
return lengthDecFrame;
|
return lengthDecFrame;
|
||||||
}
|
}
|
||||||
|
@ -172,13 +172,15 @@ PacketLossTest::CodecSpecific_InitBitrate()
|
|||||||
int PacketLossTest::DoPacketLoss()
|
int PacketLossTest::DoPacketLoss()
|
||||||
{
|
{
|
||||||
// Only packet loss for delta frames
|
// Only packet loss for delta frames
|
||||||
if (_frameToDecode->_frame->GetLength() == 0 || _frameToDecode->_frame->GetFrameType() != kDeltaFrame)
|
// TODO(mikhal): Identify delta frames
|
||||||
|
// First frame so never a delta frame.
|
||||||
|
if (_frameToDecode->_frame->Length() == 0 || _sumChannelBytes == 0)
|
||||||
{
|
{
|
||||||
_sumChannelBytes += _frameToDecode->_frame->GetLength();
|
_sumChannelBytes += _frameToDecode->_frame->Length();
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
unsigned char *packet = NULL;
|
unsigned char *packet = NULL;
|
||||||
TestVideoEncodedBuffer newEncBuf;
|
VideoFrame newEncBuf;
|
||||||
newEncBuf.VerifyAndAllocate(_lengthSourceFrame);
|
newEncBuf.VerifyAndAllocate(_lengthSourceFrame);
|
||||||
_inBufIdx = 0;
|
_inBufIdx = 0;
|
||||||
_outBufIdx = 0;
|
_outBufIdx = 0;
|
||||||
@ -210,24 +212,24 @@ int PacketLossTest::DoPacketLoss()
|
|||||||
{
|
{
|
||||||
lossResult += (kept==0); // 2 = all lost = full frame
|
lossResult += (kept==0); // 2 = all lost = full frame
|
||||||
}
|
}
|
||||||
_frameToDecode->_frame->CopyBuffer(newEncBuf.GetLength(), newEncBuf.GetBuffer());
|
_frameToDecode->_frame->CopyFrame(newEncBuf.Length(), newEncBuf.Buffer());
|
||||||
_sumChannelBytes += newEncBuf.GetLength();
|
_sumChannelBytes += newEncBuf.Length();
|
||||||
_totalKept += kept;
|
_totalKept += kept;
|
||||||
_totalThrown += thrown;
|
_totalThrown += thrown;
|
||||||
|
|
||||||
return lossResult;
|
return lossResult;
|
||||||
//printf("Threw away: %d out of %d packets\n", thrown, thrown + kept);
|
//printf("Threw away: %d out of %d packets\n", thrown, thrown + kept);
|
||||||
//printf("Encoded left: %d bytes\n", _encodedVideoBuffer.GetLength());
|
//printf("Encoded left: %d bytes\n", _encodedVideoBuffer.Length());
|
||||||
}
|
}
|
||||||
|
|
||||||
int PacketLossTest::NextPacket(int mtu, unsigned char **pkg)
|
int PacketLossTest::NextPacket(int mtu, unsigned char **pkg)
|
||||||
{
|
{
|
||||||
unsigned char *buf = _frameToDecode->_frame->GetBuffer();
|
unsigned char *buf = _frameToDecode->_frame->Buffer();
|
||||||
*pkg = buf + _inBufIdx;
|
*pkg = buf + _inBufIdx;
|
||||||
if (static_cast<long>(_frameToDecode->_frame->GetLength()) - _inBufIdx <= mtu)
|
if (static_cast<long>(_frameToDecode->_frame->Length()) - _inBufIdx <= mtu)
|
||||||
{
|
{
|
||||||
int size = _frameToDecode->_frame->GetLength() - _inBufIdx;
|
int size = _frameToDecode->_frame->Length() - _inBufIdx;
|
||||||
_inBufIdx = _frameToDecode->_frame->GetLength();
|
_inBufIdx = _frameToDecode->_frame->Length();
|
||||||
return size;
|
return size;
|
||||||
}
|
}
|
||||||
_inBufIdx += mtu;
|
_inBufIdx += mtu;
|
||||||
@ -239,14 +241,14 @@ int PacketLossTest::ByteLoss(int size, unsigned char *pkg, int bytesToLose)
|
|||||||
return size;
|
return size;
|
||||||
}
|
}
|
||||||
|
|
||||||
void PacketLossTest::InsertPacket(TestVideoEncodedBuffer *buf, unsigned char *pkg, int size)
|
void PacketLossTest::InsertPacket(VideoFrame *buf, unsigned char *pkg, int size)
|
||||||
{
|
{
|
||||||
if (static_cast<long>(buf->GetSize()) - _outBufIdx < size)
|
if (static_cast<long>(buf->Size()) - _outBufIdx < size)
|
||||||
{
|
{
|
||||||
printf("InsertPacket error!\n");
|
printf("InsertPacket error!\n");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
memcpy(buf->GetBuffer() + _outBufIdx, pkg, size);
|
memcpy(buf->Buffer() + _outBufIdx, pkg, size);
|
||||||
buf->UpdateLength(buf->GetLength() + size);
|
buf->SetLength(buf->Length() + size);
|
||||||
_outBufIdx += size;
|
_outBufIdx += size;
|
||||||
}
|
}
|
||||||
|
@ -36,7 +36,8 @@ protected:
|
|||||||
virtual int DoPacketLoss();
|
virtual int DoPacketLoss();
|
||||||
virtual int NextPacket(int size, unsigned char **pkg);
|
virtual int NextPacket(int size, unsigned char **pkg);
|
||||||
virtual int ByteLoss(int size, unsigned char *pkg, int bytesToLose);
|
virtual int ByteLoss(int size, unsigned char *pkg, int bytesToLose);
|
||||||
virtual void InsertPacket(TestVideoEncodedBuffer *buf, unsigned char *pkg, int size);
|
virtual void InsertPacket(webrtc::VideoFrame *buf, unsigned char *pkg,
|
||||||
|
int size);
|
||||||
int _inBufIdx;
|
int _inBufIdx;
|
||||||
int _outBufIdx;
|
int _outBufIdx;
|
||||||
|
|
||||||
|
@ -1,296 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
#include "performance_test.h"
|
|
||||||
|
|
||||||
#include <assert.h>
|
|
||||||
|
|
||||||
#include "gtest/gtest.h"
|
|
||||||
#include "testsupport/fileutils.h"
|
|
||||||
#include "tick_util.h"
|
|
||||||
|
|
||||||
using namespace webrtc;
|
|
||||||
|
|
||||||
#define NUM_FRAMES 300
|
|
||||||
|
|
||||||
PerformanceTest::PerformanceTest(WebRtc_UWord32 bitRate)
|
|
||||||
:
|
|
||||||
NormalAsyncTest(bitRate),
|
|
||||||
_numCodecs(0),
|
|
||||||
_tests(NULL),
|
|
||||||
_encoders(NULL),
|
|
||||||
_decoders(NULL),
|
|
||||||
_threads(NULL),
|
|
||||||
_rawImageLock(NULL),
|
|
||||||
_encodeEvents(new EventWrapper*[1]),
|
|
||||||
_stopped(true),
|
|
||||||
_encodeCompleteCallback(NULL),
|
|
||||||
_decodeCompleteCallback(NULL)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
PerformanceTest::PerformanceTest(WebRtc_UWord32 bitRate, WebRtc_UWord8 numCodecs)
|
|
||||||
:
|
|
||||||
NormalAsyncTest(bitRate),
|
|
||||||
_numCodecs(numCodecs),
|
|
||||||
_tests(new PerformanceTest*[_numCodecs]),
|
|
||||||
_encoders(new VideoEncoder*[_numCodecs]),
|
|
||||||
_decoders(new VideoDecoder*[_numCodecs]),
|
|
||||||
_threads(new ThreadWrapper*[_numCodecs]),
|
|
||||||
_rawImageLock(RWLockWrapper::CreateRWLock()),
|
|
||||||
_encodeEvents(new EventWrapper*[_numCodecs]),
|
|
||||||
_stopped(true),
|
|
||||||
_encodeCompleteCallback(NULL),
|
|
||||||
_decodeCompleteCallback(NULL)
|
|
||||||
{
|
|
||||||
for (int i=0; i < _numCodecs; i++)
|
|
||||||
{
|
|
||||||
_tests[i] = new PerformanceTest(bitRate);
|
|
||||||
_encodeEvents[i] = EventWrapper::Create();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
PerformanceTest::~PerformanceTest()
|
|
||||||
{
|
|
||||||
if (_encoders != NULL)
|
|
||||||
{
|
|
||||||
delete [] _encoders;
|
|
||||||
}
|
|
||||||
if (_decoders != NULL)
|
|
||||||
{
|
|
||||||
delete [] _decoders;
|
|
||||||
}
|
|
||||||
if (_tests != NULL)
|
|
||||||
{
|
|
||||||
delete [] _tests;
|
|
||||||
}
|
|
||||||
if (_threads != NULL)
|
|
||||||
{
|
|
||||||
delete [] _threads;
|
|
||||||
}
|
|
||||||
if (_rawImageLock != NULL)
|
|
||||||
{
|
|
||||||
delete _rawImageLock;
|
|
||||||
}
|
|
||||||
if (_encodeEvents != NULL)
|
|
||||||
{
|
|
||||||
delete [] _encodeEvents;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void
|
|
||||||
PerformanceTest::Setup()
|
|
||||||
{
|
|
||||||
_inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
|
|
||||||
NormalAsyncTest::Setup(); // Setup input and output files
|
|
||||||
CodecSettings(352, 288, 30, _bitRate); // common to all codecs
|
|
||||||
for (int i=0; i < _numCodecs; i++)
|
|
||||||
{
|
|
||||||
_encoders[i] = CreateEncoder();
|
|
||||||
_decoders[i] = CreateDecoder();
|
|
||||||
if (_encoders[i] == NULL)
|
|
||||||
{
|
|
||||||
printf("Must create a codec specific test!\n");
|
|
||||||
exit(EXIT_FAILURE);
|
|
||||||
}
|
|
||||||
if(_encoders[i]->InitEncode(&_inst, 4, 1440) < 0)
|
|
||||||
{
|
|
||||||
exit(EXIT_FAILURE);
|
|
||||||
}
|
|
||||||
if (_decoders[i]->InitDecode(&_inst, 1))
|
|
||||||
{
|
|
||||||
exit(EXIT_FAILURE);
|
|
||||||
}
|
|
||||||
_tests[i]->SetEncoder(_encoders[i]);
|
|
||||||
_tests[i]->SetDecoder(_decoders[i]);
|
|
||||||
_tests[i]->_rawImageLock = _rawImageLock;
|
|
||||||
_encodeEvents[i]->Reset();
|
|
||||||
_tests[i]->_encodeEvents[0] = _encodeEvents[i];
|
|
||||||
_tests[i]->_inst = _inst;
|
|
||||||
_threads[i] = ThreadWrapper::CreateThread(PerformanceTest::RunThread, _tests[i]);
|
|
||||||
unsigned int id = 0;
|
|
||||||
_tests[i]->_stopped = false;
|
|
||||||
_threads[i]->Start(id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void
|
|
||||||
PerformanceTest::Perform()
|
|
||||||
{
|
|
||||||
Setup();
|
|
||||||
EventWrapper& sleepEvent = *EventWrapper::Create();
|
|
||||||
const WebRtc_Word64 startTime = TickTime::MillisecondTimestamp();
|
|
||||||
for (int i=0; i < NUM_FRAMES; i++)
|
|
||||||
{
|
|
||||||
{
|
|
||||||
// Read a new frame from file
|
|
||||||
WriteLockScoped imageLock(*_rawImageLock);
|
|
||||||
_lengthEncFrame = 0;
|
|
||||||
EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile),
|
|
||||||
0u);
|
|
||||||
if (feof(_sourceFile) != 0)
|
|
||||||
{
|
|
||||||
rewind(_sourceFile);
|
|
||||||
}
|
|
||||||
_inputVideoBuffer.VerifyAndAllocate(_inst.width*_inst.height*3/2);
|
|
||||||
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
|
|
||||||
_inputVideoBuffer.SetTimeStamp((unsigned int) (_encFrameCnt * 9e4 / static_cast<float>(_inst.maxFramerate)));
|
|
||||||
_inputVideoBuffer.SetWidth(_inst.width);
|
|
||||||
_inputVideoBuffer.SetHeight(_inst.height);
|
|
||||||
for (int i=0; i < _numCodecs; i++)
|
|
||||||
{
|
|
||||||
_tests[i]->_inputVideoBuffer.CopyPointer(_inputVideoBuffer);
|
|
||||||
_encodeEvents[i]->Set();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (i < NUM_FRAMES - 1)
|
|
||||||
{
|
|
||||||
sleepEvent.Wait(33);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (int i=0; i < _numCodecs; i++)
|
|
||||||
{
|
|
||||||
_tests[i]->_stopped = true;
|
|
||||||
_encodeEvents[i]->Set();
|
|
||||||
_threads[i]->Stop();
|
|
||||||
}
|
|
||||||
const WebRtc_UWord32 totalTime =
|
|
||||||
static_cast<WebRtc_UWord32>(TickTime::MillisecondTimestamp() - startTime);
|
|
||||||
printf("Total time: %u\n", totalTime);
|
|
||||||
delete &sleepEvent;
|
|
||||||
Teardown();
|
|
||||||
}
|
|
||||||
|
|
||||||
void PerformanceTest::Teardown()
|
|
||||||
{
|
|
||||||
if (_encodeCompleteCallback != NULL)
|
|
||||||
{
|
|
||||||
delete _encodeCompleteCallback;
|
|
||||||
}
|
|
||||||
if (_decodeCompleteCallback != NULL)
|
|
||||||
{
|
|
||||||
delete _decodeCompleteCallback;
|
|
||||||
}
|
|
||||||
// main test only, all others have numCodecs = 0:
|
|
||||||
if (_numCodecs > 0)
|
|
||||||
{
|
|
||||||
WriteLockScoped imageLock(*_rawImageLock);
|
|
||||||
_inputVideoBuffer.Free();
|
|
||||||
NormalAsyncTest::Teardown();
|
|
||||||
}
|
|
||||||
for (int i=0; i < _numCodecs; i++)
|
|
||||||
{
|
|
||||||
_encoders[i]->Release();
|
|
||||||
delete _encoders[i];
|
|
||||||
_decoders[i]->Release();
|
|
||||||
delete _decoders[i];
|
|
||||||
_tests[i]->_inputVideoBuffer.ClearPointer();
|
|
||||||
_tests[i]->_rawImageLock = NULL;
|
|
||||||
_tests[i]->Teardown();
|
|
||||||
delete _tests[i];
|
|
||||||
delete _encodeEvents[i];
|
|
||||||
delete _threads[i];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bool
|
|
||||||
PerformanceTest::RunThread(void* obj)
|
|
||||||
{
|
|
||||||
PerformanceTest& test = *static_cast<PerformanceTest*>(obj);
|
|
||||||
return test.PerformSingleTest();
|
|
||||||
}
|
|
||||||
|
|
||||||
bool
|
|
||||||
PerformanceTest::PerformSingleTest()
|
|
||||||
{
|
|
||||||
if (_encodeCompleteCallback == NULL)
|
|
||||||
{
|
|
||||||
_encodeCompleteCallback = new VideoEncodeCompleteCallback(NULL, &_frameQueue, *this);
|
|
||||||
_encoder->RegisterEncodeCompleteCallback(_encodeCompleteCallback);
|
|
||||||
}
|
|
||||||
if (_decodeCompleteCallback == NULL)
|
|
||||||
{
|
|
||||||
_decodeCompleteCallback = new VideoDecodeCompleteCallback(NULL, *this);
|
|
||||||
_decoder->RegisterDecodeCompleteCallback(_decodeCompleteCallback);
|
|
||||||
}
|
|
||||||
(*_encodeEvents)->Wait(WEBRTC_EVENT_INFINITE); // The first event is used for every single test
|
|
||||||
CodecSpecific_InitBitrate();
|
|
||||||
bool complete = false;
|
|
||||||
{
|
|
||||||
ReadLockScoped imageLock(*_rawImageLock);
|
|
||||||
complete = Encode();
|
|
||||||
}
|
|
||||||
if (!_frameQueue.Empty() || complete)
|
|
||||||
{
|
|
||||||
while (!_frameQueue.Empty())
|
|
||||||
{
|
|
||||||
_frameToDecode = static_cast<FrameQueueTuple *>(_frameQueue.PopFrame());
|
|
||||||
int lost = DoPacketLoss();
|
|
||||||
if (lost == 2)
|
|
||||||
{
|
|
||||||
// Lost the whole frame, continue
|
|
||||||
_missingFrames = true;
|
|
||||||
delete _frameToDecode;
|
|
||||||
_frameToDecode = NULL;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
int ret = Decode(lost);
|
|
||||||
delete _frameToDecode;
|
|
||||||
_frameToDecode = NULL;
|
|
||||||
if (ret < 0)
|
|
||||||
{
|
|
||||||
fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
else if (ret < 0)
|
|
||||||
{
|
|
||||||
fprintf(stderr, "\n\nPositive return value from decode!\n\n");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (_stopped)
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool PerformanceTest::Encode()
|
|
||||||
{
|
|
||||||
VideoFrame rawImage;
|
|
||||||
VideoBufferToRawImage(_inputVideoBuffer, rawImage);
|
|
||||||
std::vector<VideoFrameType> frameTypes(1, kDeltaFrame);
|
|
||||||
if (_requestKeyFrame && !(_encFrameCnt%50))
|
|
||||||
{
|
|
||||||
frameTypes[0] = kKeyFrame;
|
|
||||||
}
|
|
||||||
webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
|
|
||||||
int ret = _encoder->Encode(rawImage, codecSpecificInfo, &frameTypes);
|
|
||||||
EXPECT_EQ(ret, WEBRTC_VIDEO_CODEC_OK);
|
|
||||||
if (codecSpecificInfo != NULL)
|
|
||||||
{
|
|
||||||
delete codecSpecificInfo;
|
|
||||||
codecSpecificInfo = NULL;
|
|
||||||
}
|
|
||||||
assert(ret >= 0);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
int PerformanceTest::Decode(int lossValue)
|
|
||||||
{
|
|
||||||
EncodedImage encodedImage;
|
|
||||||
VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
|
|
||||||
encodedImage._completeFrame = !lossValue;
|
|
||||||
int ret = _decoder->Decode(encodedImage, _missingFrames, NULL,
|
|
||||||
_frameToDecode->_codecSpecificInfo);
|
|
||||||
_missingFrames = false;
|
|
||||||
return ret;
|
|
||||||
}
|
|
@ -1,54 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PERFORMANCE_TEST_H_
|
|
||||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PERFORMANCE_TEST_H_
|
|
||||||
|
|
||||||
#include "normal_async_test.h"
|
|
||||||
#include "thread_wrapper.h"
|
|
||||||
#include "rw_lock_wrapper.h"
|
|
||||||
#include "event_wrapper.h"
|
|
||||||
|
|
||||||
class PerformanceTest : public NormalAsyncTest
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
PerformanceTest(WebRtc_UWord32 bitRate, WebRtc_UWord8 numCodecs);
|
|
||||||
virtual ~PerformanceTest();
|
|
||||||
|
|
||||||
virtual void Perform();
|
|
||||||
virtual void Print() {};
|
|
||||||
|
|
||||||
protected:
|
|
||||||
PerformanceTest(WebRtc_UWord32 bitRate);
|
|
||||||
virtual void Setup();
|
|
||||||
virtual bool Encode();
|
|
||||||
virtual int Decode(int lossValue = 0);
|
|
||||||
virtual void Teardown();
|
|
||||||
static bool RunThread(void* obj);
|
|
||||||
bool PerformSingleTest();
|
|
||||||
|
|
||||||
virtual webrtc::VideoEncoder* CreateEncoder() const { return NULL; };
|
|
||||||
virtual webrtc::VideoDecoder* CreateDecoder() const { return NULL; };
|
|
||||||
|
|
||||||
WebRtc_UWord8 _numCodecs;
|
|
||||||
PerformanceTest** _tests;
|
|
||||||
webrtc::VideoEncoder** _encoders;
|
|
||||||
webrtc::VideoDecoder** _decoders;
|
|
||||||
webrtc::ThreadWrapper** _threads;
|
|
||||||
webrtc::RWLockWrapper* _rawImageLock;
|
|
||||||
webrtc::EventWrapper** _encodeEvents;
|
|
||||||
FrameQueue _frameQueue;
|
|
||||||
bool _stopped;
|
|
||||||
webrtc::EncodedImageCallback* _encodeCompleteCallback;
|
|
||||||
webrtc::DecodedImageCallback* _decodeCompleteCallback;
|
|
||||||
FILE* _outFile;
|
|
||||||
};
|
|
||||||
|
|
||||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PERFORMANCE_TEST_H_
|
|
@ -135,26 +135,17 @@ bool CodecTest::PacketLoss(double lossRate, int /*thrown*/)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
CodecTest::VideoBufferToRawImage(TestVideoBuffer& videoBuffer,
|
CodecTest::VideoEncodedBufferToEncodedImage(VideoFrame& videoBuffer,
|
||||||
VideoFrame &image)
|
|
||||||
{
|
|
||||||
// TODO(mikhal): Use videoBuffer in lieu of TestVideoBuffer.
|
|
||||||
image.CopyFrame(videoBuffer.GetLength(), videoBuffer.GetBuffer());
|
|
||||||
image.SetWidth(videoBuffer.GetWidth());
|
|
||||||
image.SetHeight(videoBuffer.GetHeight());
|
|
||||||
image.SetTimeStamp(videoBuffer.GetTimeStamp());
|
|
||||||
}
|
|
||||||
void
|
|
||||||
CodecTest::VideoEncodedBufferToEncodedImage(TestVideoEncodedBuffer& videoBuffer,
|
|
||||||
EncodedImage &image)
|
EncodedImage &image)
|
||||||
{
|
{
|
||||||
image._buffer = videoBuffer.GetBuffer();
|
image._buffer = videoBuffer.Buffer();
|
||||||
image._length = videoBuffer.GetLength();
|
image._length = videoBuffer.Length();
|
||||||
image._size = videoBuffer.GetSize();
|
image._size = videoBuffer.Size();
|
||||||
image._frameType = static_cast<VideoFrameType>(videoBuffer.GetFrameType());
|
//image._frameType = static_cast<VideoFrameType>
|
||||||
image._timeStamp = videoBuffer.GetTimeStamp();
|
// (videoBuffer.GetFrameType());
|
||||||
image._encodedWidth = videoBuffer.GetCaptureWidth();
|
image._timeStamp = videoBuffer.TimeStamp();
|
||||||
image._encodedHeight = videoBuffer.GetCaptureHeight();
|
image._encodedWidth = videoBuffer.Width();
|
||||||
|
image._encodedHeight = videoBuffer.Height();
|
||||||
image._completeFrame = true;
|
image._completeFrame = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -11,8 +11,8 @@
|
|||||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
|
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
|
||||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
|
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
|
||||||
|
|
||||||
|
#include "modules/interface/module_common_types.h"
|
||||||
#include "video_codec_interface.h"
|
#include "video_codec_interface.h"
|
||||||
#include "video_buffer.h"
|
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <fstream>
|
#include <fstream>
|
||||||
#include <cstdlib>
|
#include <cstdlib>
|
||||||
@ -40,10 +40,8 @@ protected:
|
|||||||
double ActualBitRate(int nFrames);
|
double ActualBitRate(int nFrames);
|
||||||
virtual bool PacketLoss(double lossRate, int /*thrown*/);
|
virtual bool PacketLoss(double lossRate, int /*thrown*/);
|
||||||
static double RandUniform() { return (std::rand() + 1.0)/(RAND_MAX + 1.0); }
|
static double RandUniform() { return (std::rand() + 1.0)/(RAND_MAX + 1.0); }
|
||||||
static void VideoBufferToRawImage(TestVideoBuffer& videoBuffer,
|
|
||||||
webrtc::VideoFrame &image);
|
|
||||||
static void VideoEncodedBufferToEncodedImage(
|
static void VideoEncodedBufferToEncodedImage(
|
||||||
TestVideoEncodedBuffer& videoBuffer,
|
webrtc::VideoFrame& videoBuffer,
|
||||||
webrtc::EncodedImage &image);
|
webrtc::EncodedImage &image);
|
||||||
|
|
||||||
webrtc::VideoEncoder* _encoder;
|
webrtc::VideoEncoder* _encoder;
|
||||||
@ -51,9 +49,11 @@ protected:
|
|||||||
WebRtc_UWord32 _bitRate;
|
WebRtc_UWord32 _bitRate;
|
||||||
unsigned int _lengthSourceFrame;
|
unsigned int _lengthSourceFrame;
|
||||||
unsigned char* _sourceBuffer;
|
unsigned char* _sourceBuffer;
|
||||||
TestVideoBuffer _inputVideoBuffer;
|
webrtc::VideoFrame _inputVideoBuffer;
|
||||||
TestVideoEncodedBuffer _encodedVideoBuffer;
|
// TODO(mikhal): For now using VideoFrame for encodedBuffer, should use a
|
||||||
TestVideoBuffer _decodedVideoBuffer;
|
// designated class.
|
||||||
|
webrtc::VideoFrame _encodedVideoBuffer;
|
||||||
|
webrtc::VideoFrame _decodedVideoBuffer;
|
||||||
webrtc::VideoCodec _inst;
|
webrtc::VideoCodec _inst;
|
||||||
std::fstream* _log;
|
std::fstream* _log;
|
||||||
std::string _inname;
|
std::string _inname;
|
||||||
|
@ -40,10 +40,8 @@
|
|||||||
'normal_async_test.h',
|
'normal_async_test.h',
|
||||||
'normal_test.h',
|
'normal_test.h',
|
||||||
'packet_loss_test.h',
|
'packet_loss_test.h',
|
||||||
'performance_test.h',
|
|
||||||
'test.h',
|
'test.h',
|
||||||
'unit_test.h',
|
'unit_test.h',
|
||||||
'video_buffer.h',
|
|
||||||
'video_source.h',
|
'video_source.h',
|
||||||
|
|
||||||
# source files
|
# source files
|
||||||
@ -51,10 +49,8 @@
|
|||||||
'normal_async_test.cc',
|
'normal_async_test.cc',
|
||||||
'normal_test.cc',
|
'normal_test.cc',
|
||||||
'packet_loss_test.cc',
|
'packet_loss_test.cc',
|
||||||
'performance_test.cc',
|
|
||||||
'test.cc',
|
'test.cc',
|
||||||
'unit_test.cc',
|
'unit_test.cc',
|
||||||
'video_buffer.cc',
|
|
||||||
'video_source.cc',
|
'video_source.cc',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
@ -96,12 +96,12 @@ UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
|
|||||||
fragmentation)
|
fragmentation)
|
||||||
{
|
{
|
||||||
_encodedVideoBuffer->VerifyAndAllocate(encodedImage._size);
|
_encodedVideoBuffer->VerifyAndAllocate(encodedImage._size);
|
||||||
_encodedVideoBuffer->CopyBuffer(encodedImage._size, encodedImage._buffer);
|
_encodedVideoBuffer->CopyFrame(encodedImage._size, encodedImage._buffer);
|
||||||
_encodedVideoBuffer->UpdateLength(encodedImage._length);
|
_encodedVideoBuffer->SetLength(encodedImage._length);
|
||||||
_encodedVideoBuffer->SetFrameType(encodedImage._frameType);
|
// _encodedVideoBuffer->SetFrameType(encodedImage._frameType);
|
||||||
_encodedVideoBuffer->SetCaptureWidth(
|
_encodedVideoBuffer->SetWidth(
|
||||||
(WebRtc_UWord16)encodedImage._encodedWidth);
|
(WebRtc_UWord16)encodedImage._encodedWidth);
|
||||||
_encodedVideoBuffer->SetCaptureHeight(
|
_encodedVideoBuffer->SetHeight(
|
||||||
(WebRtc_UWord16)encodedImage._encodedHeight);
|
(WebRtc_UWord16)encodedImage._encodedHeight);
|
||||||
_encodedVideoBuffer->SetTimeStamp(encodedImage._timeStamp);
|
_encodedVideoBuffer->SetTimeStamp(encodedImage._timeStamp);
|
||||||
_encodeComplete = true;
|
_encodeComplete = true;
|
||||||
@ -111,7 +111,7 @@ UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
|
|||||||
|
|
||||||
WebRtc_Word32 UnitTestDecodeCompleteCallback::Decoded(VideoFrame& image)
|
WebRtc_Word32 UnitTestDecodeCompleteCallback::Decoded(VideoFrame& image)
|
||||||
{
|
{
|
||||||
_decodedVideoBuffer->CopyBuffer(image.Length(), image.Buffer());
|
_decodedVideoBuffer->CopyFrame(image.Length(), image.Buffer());
|
||||||
_decodedVideoBuffer->SetWidth(image.Width());
|
_decodedVideoBuffer->SetWidth(image.Width());
|
||||||
_decodedVideoBuffer->SetHeight(image.Height());
|
_decodedVideoBuffer->SetHeight(image.Height());
|
||||||
_decodedVideoBuffer->SetTimeStamp(image.TimeStamp());
|
_decodedVideoBuffer->SetTimeStamp(image.TimeStamp());
|
||||||
@ -155,7 +155,7 @@ UnitTest::WaitForEncodedFrame() const
|
|||||||
{
|
{
|
||||||
if (_encodeCompleteCallback->EncodeComplete())
|
if (_encodeCompleteCallback->EncodeComplete())
|
||||||
{
|
{
|
||||||
return _encodedVideoBuffer.GetLength();
|
return _encodedVideoBuffer.Length();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
@ -169,7 +169,7 @@ UnitTest::WaitForDecodedFrame() const
|
|||||||
{
|
{
|
||||||
if (_decodeCompleteCallback->DecodeComplete())
|
if (_decodeCompleteCallback->DecodeComplete())
|
||||||
{
|
{
|
||||||
return _decodedVideoBuffer.GetLength();
|
return _decodedVideoBuffer.Length();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
@ -227,7 +227,7 @@ UnitTest::Setup()
|
|||||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||||
ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame, _sourceFile)
|
ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame, _sourceFile)
|
||||||
== _lengthSourceFrame);
|
== _lengthSourceFrame);
|
||||||
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
|
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame);
|
||||||
_inputVideoBuffer.SetWidth(_source->GetWidth());
|
_inputVideoBuffer.SetWidth(_source->GetWidth());
|
||||||
_inputVideoBuffer.SetHeight(_source->GetHeight());
|
_inputVideoBuffer.SetHeight(_source->GetHeight());
|
||||||
rewind(_sourceFile);
|
rewind(_sourceFile);
|
||||||
@ -235,16 +235,13 @@ UnitTest::Setup()
|
|||||||
// Get a reference encoded frame.
|
// Get a reference encoded frame.
|
||||||
_encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
_encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||||
|
|
||||||
VideoFrame image;
|
|
||||||
VideoBufferToRawImage(_inputVideoBuffer, image);
|
|
||||||
|
|
||||||
// Ensures our initial parameters are valid.
|
// Ensures our initial parameters are valid.
|
||||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
|
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
|
||||||
_encoder->Encode(image, NULL, NULL);
|
_encoder->Encode(_inputVideoBuffer, NULL, NULL);
|
||||||
_refEncFrameLength = WaitForEncodedFrame();
|
_refEncFrameLength = WaitForEncodedFrame();
|
||||||
ASSERT_TRUE(_refEncFrameLength > 0);
|
ASSERT_TRUE(_refEncFrameLength > 0);
|
||||||
_refEncFrame = new unsigned char[_refEncFrameLength];
|
_refEncFrame = new unsigned char[_refEncFrameLength];
|
||||||
memcpy(_refEncFrame, _encodedVideoBuffer.GetBuffer(), _refEncFrameLength);
|
memcpy(_refEncFrame, _encodedVideoBuffer.Buffer(), _refEncFrameLength);
|
||||||
|
|
||||||
// Get a reference decoded frame.
|
// Get a reference decoded frame.
|
||||||
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||||
@ -261,11 +258,10 @@ UnitTest::Setup()
|
|||||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||||
ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame,
|
ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame,
|
||||||
_sourceFile) == _lengthSourceFrame);
|
_sourceFile) == _lengthSourceFrame);
|
||||||
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
|
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame);
|
||||||
_inputVideoBuffer.SetWidth(_source->GetWidth());
|
_inputVideoBuffer.SetWidth(_source->GetWidth());
|
||||||
_inputVideoBuffer.SetHeight(_source->GetHeight());
|
_inputVideoBuffer.SetHeight(_source->GetHeight());
|
||||||
VideoBufferToRawImage(_inputVideoBuffer, image);
|
_encoder->Encode(_inputVideoBuffer, NULL, NULL);
|
||||||
_encoder->Encode(image, NULL, NULL);
|
|
||||||
ASSERT_TRUE(WaitForEncodedFrame() > 0);
|
ASSERT_TRUE(WaitForEncodedFrame() > 0);
|
||||||
}
|
}
|
||||||
EncodedImage encodedImage;
|
EncodedImage encodedImage;
|
||||||
@ -273,13 +269,12 @@ UnitTest::Setup()
|
|||||||
ASSERT_TRUE(_decoder->Decode(encodedImage, 0, NULL)
|
ASSERT_TRUE(_decoder->Decode(encodedImage, 0, NULL)
|
||||||
== WEBRTC_VIDEO_CODEC_OK);
|
== WEBRTC_VIDEO_CODEC_OK);
|
||||||
frameLength = WaitForDecodedFrame();
|
frameLength = WaitForDecodedFrame();
|
||||||
_encodedVideoBuffer.Reset();
|
_encodedVideoBuffer.SetLength(0);
|
||||||
_encodedVideoBuffer.UpdateLength(0);
|
|
||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
rewind(_sourceFile);
|
rewind(_sourceFile);
|
||||||
EXPECT_TRUE(frameLength == _lengthSourceFrame);
|
EXPECT_TRUE(frameLength == _lengthSourceFrame);
|
||||||
memcpy(_refDecFrame, _decodedVideoBuffer.GetBuffer(), _lengthSourceFrame);
|
memcpy(_refDecFrame, _decodedVideoBuffer.Buffer(), _lengthSourceFrame);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
@ -318,8 +313,7 @@ UnitTest::DecodeWithoutAssert()
|
|||||||
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
||||||
int ret = _decoder->Decode(encodedImage, 0, NULL);
|
int ret = _decoder->Decode(encodedImage, 0, NULL);
|
||||||
int frameLength = WaitForDecodedFrame();
|
int frameLength = WaitForDecodedFrame();
|
||||||
_encodedVideoBuffer.Reset();
|
_encodedVideoBuffer.SetLength(0);
|
||||||
_encodedVideoBuffer.UpdateLength(0);
|
|
||||||
return ret == WEBRTC_VIDEO_CODEC_OK ? frameLength : ret;
|
return ret == WEBRTC_VIDEO_CODEC_OK ? frameLength : ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -338,8 +332,7 @@ UnitTest::Decode()
|
|||||||
== _lengthSourceFrame));
|
== _lengthSourceFrame));
|
||||||
EXPECT_TRUE(ret == WEBRTC_VIDEO_CODEC_OK && (frameLength == 0 || frameLength
|
EXPECT_TRUE(ret == WEBRTC_VIDEO_CODEC_OK && (frameLength == 0 || frameLength
|
||||||
== _lengthSourceFrame));
|
== _lengthSourceFrame));
|
||||||
_encodedVideoBuffer.Reset();
|
_encodedVideoBuffer.SetLength(0);
|
||||||
_encodedVideoBuffer.UpdateLength(0);
|
|
||||||
return ret == WEBRTC_VIDEO_CODEC_OK ? frameLength : ret;
|
return ret == WEBRTC_VIDEO_CODEC_OK ? frameLength : ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -357,8 +350,7 @@ UnitTest::Perform()
|
|||||||
//-- Calls before InitEncode() --
|
//-- Calls before InitEncode() --
|
||||||
// We want to revert the initialization done in Setup().
|
// We want to revert the initialization done in Setup().
|
||||||
EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
|
EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
|
||||||
VideoBufferToRawImage(_inputVideoBuffer, inputImage);
|
EXPECT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, NULL)
|
||||||
EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL)
|
|
||||||
== WEBRTC_VIDEO_CODEC_UNINITIALIZED);
|
== WEBRTC_VIDEO_CODEC_UNINITIALIZED);
|
||||||
|
|
||||||
//-- InitEncode() errors --
|
//-- InitEncode() errors --
|
||||||
@ -424,26 +416,24 @@ UnitTest::Perform()
|
|||||||
EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
|
EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
|
||||||
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
|
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
|
||||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||||
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
|
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame);
|
||||||
_inputVideoBuffer.SetWidth(_source->GetWidth());
|
_inputVideoBuffer.SetWidth(_source->GetWidth());
|
||||||
_inputVideoBuffer.SetHeight(_source->GetHeight());
|
_inputVideoBuffer.SetHeight(_source->GetHeight());
|
||||||
|
|
||||||
//----- Encoder stress tests -----
|
//----- Encoder stress tests -----
|
||||||
|
|
||||||
// Vary frame rate and I-frame request.
|
// Vary frame rate and I-frame request.
|
||||||
VideoBufferToRawImage(_inputVideoBuffer, inputImage);
|
|
||||||
for (int i = 1; i <= 60; i++)
|
for (int i = 1; i <= 60; i++)
|
||||||
{
|
{
|
||||||
VideoFrameType frame_type = !(i % 2) ? kKeyFrame : kDeltaFrame;
|
VideoFrameType frame_type = !(i % 2) ? kKeyFrame : kDeltaFrame;
|
||||||
std::vector<VideoFrameType> frame_types(1, frame_type);
|
std::vector<VideoFrameType> frame_types(1, frame_type);
|
||||||
EXPECT_TRUE(_encoder->Encode(inputImage, NULL, &frame_types) ==
|
EXPECT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, &frame_types) ==
|
||||||
WEBRTC_VIDEO_CODEC_OK);
|
WEBRTC_VIDEO_CODEC_OK);
|
||||||
EXPECT_TRUE(WaitForEncodedFrame() > 0);
|
EXPECT_TRUE(WaitForEncodedFrame() > 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Init then encode.
|
// Init then encode.
|
||||||
_encodedVideoBuffer.UpdateLength(0);
|
_encodedVideoBuffer.SetLength(0);
|
||||||
_encodedVideoBuffer.Reset();
|
|
||||||
EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
|
EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
|
||||||
WEBRTC_VIDEO_CODEC_OK);
|
WEBRTC_VIDEO_CODEC_OK);
|
||||||
EXPECT_TRUE(WaitForEncodedFrame() > 0);
|
EXPECT_TRUE(WaitForEncodedFrame() > 0);
|
||||||
@ -453,11 +443,10 @@ UnitTest::Perform()
|
|||||||
frameLength = WaitForEncodedFrame();
|
frameLength = WaitForEncodedFrame();
|
||||||
EXPECT_TRUE(frameLength > 0);
|
EXPECT_TRUE(frameLength > 0);
|
||||||
EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
|
EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
|
||||||
_encodedVideoBuffer.GetBuffer(), frameLength) == true);
|
_encodedVideoBuffer.Buffer(), frameLength) == true);
|
||||||
|
|
||||||
// Reset then encode.
|
// Reset then encode.
|
||||||
_encodedVideoBuffer.UpdateLength(0);
|
_encodedVideoBuffer.SetLength(0);
|
||||||
_encodedVideoBuffer.Reset();
|
|
||||||
EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
|
EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
|
||||||
WEBRTC_VIDEO_CODEC_OK);
|
WEBRTC_VIDEO_CODEC_OK);
|
||||||
WaitForEncodedFrame();
|
WaitForEncodedFrame();
|
||||||
@ -466,11 +455,10 @@ UnitTest::Perform()
|
|||||||
frameLength = WaitForEncodedFrame();
|
frameLength = WaitForEncodedFrame();
|
||||||
EXPECT_TRUE(frameLength > 0);
|
EXPECT_TRUE(frameLength > 0);
|
||||||
EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
|
EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
|
||||||
_encodedVideoBuffer.GetBuffer(), frameLength) == true);
|
_encodedVideoBuffer.Buffer(), frameLength) == true);
|
||||||
|
|
||||||
// Release then encode.
|
// Release then encode.
|
||||||
_encodedVideoBuffer.UpdateLength(0);
|
_encodedVideoBuffer.SetLength(0);
|
||||||
_encodedVideoBuffer.Reset();
|
|
||||||
EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
|
EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
|
||||||
WEBRTC_VIDEO_CODEC_OK);
|
WEBRTC_VIDEO_CODEC_OK);
|
||||||
WaitForEncodedFrame();
|
WaitForEncodedFrame();
|
||||||
@ -480,7 +468,7 @@ UnitTest::Perform()
|
|||||||
frameLength = WaitForEncodedFrame();
|
frameLength = WaitForEncodedFrame();
|
||||||
EXPECT_TRUE(frameLength > 0);
|
EXPECT_TRUE(frameLength > 0);
|
||||||
EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
|
EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
|
||||||
_encodedVideoBuffer.GetBuffer(), frameLength) == true);
|
_encodedVideoBuffer.Buffer(), frameLength) == true);
|
||||||
|
|
||||||
//----- Decoder parameter tests -----
|
//----- Decoder parameter tests -----
|
||||||
|
|
||||||
@ -517,7 +505,7 @@ UnitTest::Perform()
|
|||||||
{
|
{
|
||||||
ASSERT_TRUE(fread(tmpBuf, 1, _refEncFrameLength, _sourceFile)
|
ASSERT_TRUE(fread(tmpBuf, 1, _refEncFrameLength, _sourceFile)
|
||||||
== _refEncFrameLength);
|
== _refEncFrameLength);
|
||||||
_encodedVideoBuffer.CopyBuffer(_refEncFrameLength, tmpBuf);
|
_encodedVideoBuffer.CopyFrame(_refEncFrameLength, tmpBuf);
|
||||||
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
||||||
int ret = _decoder->Decode(encodedImage, false, NULL);
|
int ret = _decoder->Decode(encodedImage, false, NULL);
|
||||||
EXPECT_TRUE(ret <= 0);
|
EXPECT_TRUE(ret <= 0);
|
||||||
@ -527,7 +515,7 @@ UnitTest::Perform()
|
|||||||
}
|
}
|
||||||
|
|
||||||
memset(tmpBuf, 0, _refEncFrameLength);
|
memset(tmpBuf, 0, _refEncFrameLength);
|
||||||
_encodedVideoBuffer.CopyBuffer(_refEncFrameLength, tmpBuf);
|
_encodedVideoBuffer.CopyFrame(_refEncFrameLength, tmpBuf);
|
||||||
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
||||||
ret = _decoder->Decode(encodedImage, false, NULL);
|
ret = _decoder->Decode(encodedImage, false, NULL);
|
||||||
EXPECT_TRUE(ret <= 0);
|
EXPECT_TRUE(ret <= 0);
|
||||||
@ -538,8 +526,8 @@ UnitTest::Perform()
|
|||||||
}
|
}
|
||||||
rewind(_sourceFile);
|
rewind(_sourceFile);
|
||||||
|
|
||||||
_encodedVideoBuffer.UpdateLength(_refEncFrameLength);
|
_encodedVideoBuffer.SetLength(_refEncFrameLength);
|
||||||
_encodedVideoBuffer.CopyBuffer(_refEncFrameLength, _refEncFrame);
|
_encodedVideoBuffer.CopyFrame(_refEncFrameLength, _refEncFrame);
|
||||||
|
|
||||||
// Init then decode.
|
// Init then decode.
|
||||||
EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
|
EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
|
||||||
@ -551,7 +539,7 @@ UnitTest::Perform()
|
|||||||
_decoder->Decode(encodedImage, false, NULL);
|
_decoder->Decode(encodedImage, false, NULL);
|
||||||
frameLength = WaitForDecodedFrame();
|
frameLength = WaitForDecodedFrame();
|
||||||
}
|
}
|
||||||
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
|
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
|
||||||
_refDecFrame, _lengthSourceFrame) == true);
|
_refDecFrame, _lengthSourceFrame) == true);
|
||||||
|
|
||||||
// Reset then decode.
|
// Reset then decode.
|
||||||
@ -563,7 +551,7 @@ UnitTest::Perform()
|
|||||||
_decoder->Decode(encodedImage, false, NULL);
|
_decoder->Decode(encodedImage, false, NULL);
|
||||||
frameLength = WaitForDecodedFrame();
|
frameLength = WaitForDecodedFrame();
|
||||||
}
|
}
|
||||||
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
|
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
|
||||||
_refDecFrame, _lengthSourceFrame) == true);
|
_refDecFrame, _lengthSourceFrame) == true);
|
||||||
|
|
||||||
// Decode with other size, reset, then decode with original size again
|
// Decode with other size, reset, then decode with original size again
|
||||||
@ -620,7 +608,7 @@ UnitTest::Perform()
|
|||||||
}
|
}
|
||||||
|
|
||||||
// check that decoded frame matches with reference
|
// check that decoded frame matches with reference
|
||||||
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
|
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
|
||||||
_refDecFrame, _lengthSourceFrame) == true);
|
_refDecFrame, _lengthSourceFrame) == true);
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -636,10 +624,9 @@ UnitTest::Perform()
|
|||||||
_decoder->Decode(encodedImage, false, NULL);
|
_decoder->Decode(encodedImage, false, NULL);
|
||||||
frameLength = WaitForDecodedFrame();
|
frameLength = WaitForDecodedFrame();
|
||||||
}
|
}
|
||||||
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
|
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
|
||||||
_refDecFrame, _lengthSourceFrame) == true);
|
_refDecFrame, _lengthSourceFrame) == true);
|
||||||
_encodedVideoBuffer.UpdateLength(0);
|
_encodedVideoBuffer.SetLength(0);
|
||||||
_encodedVideoBuffer.Reset();
|
|
||||||
|
|
||||||
delete [] tmpBuf;
|
delete [] tmpBuf;
|
||||||
|
|
||||||
@ -661,16 +648,15 @@ UnitTest::Perform()
|
|||||||
while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
|
while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
|
||||||
_lengthSourceFrame)
|
_lengthSourceFrame)
|
||||||
{
|
{
|
||||||
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
|
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
|
||||||
_inputVideoBuffer.SetTimeStamp(frames);
|
_inputVideoBuffer.SetTimeStamp(frames);
|
||||||
VideoBufferToRawImage(_inputVideoBuffer, inputImage);
|
ASSERT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, NULL) ==
|
||||||
ASSERT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
|
|
||||||
WEBRTC_VIDEO_CODEC_OK);
|
WEBRTC_VIDEO_CODEC_OK);
|
||||||
frameLength = WaitForEncodedFrame();
|
frameLength = WaitForEncodedFrame();
|
||||||
//ASSERT_TRUE(frameLength);
|
//ASSERT_TRUE(frameLength);
|
||||||
EXPECT_TRUE(frameLength > 0);
|
EXPECT_TRUE(frameLength > 0);
|
||||||
encTimeStamp = _encodedVideoBuffer.GetTimeStamp();
|
encTimeStamp = _encodedVideoBuffer.TimeStamp();
|
||||||
EXPECT_TRUE(_inputVideoBuffer.GetTimeStamp() ==
|
EXPECT_TRUE(_inputVideoBuffer.TimeStamp() ==
|
||||||
static_cast<unsigned>(encTimeStamp));
|
static_cast<unsigned>(encTimeStamp));
|
||||||
|
|
||||||
frameLength = Decode();
|
frameLength = Decode();
|
||||||
@ -684,7 +670,7 @@ UnitTest::Perform()
|
|||||||
{
|
{
|
||||||
encTimeStamp = 0;
|
encTimeStamp = 0;
|
||||||
}
|
}
|
||||||
EXPECT_TRUE(_decodedVideoBuffer.GetTimeStamp() ==
|
EXPECT_TRUE(_decodedVideoBuffer.TimeStamp() ==
|
||||||
static_cast<unsigned>(encTimeStamp));
|
static_cast<unsigned>(encTimeStamp));
|
||||||
frames++;
|
frames++;
|
||||||
}
|
}
|
||||||
@ -737,20 +723,18 @@ UnitTest::RateControlTests()
|
|||||||
while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
|
while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
|
||||||
_lengthSourceFrame)
|
_lengthSourceFrame)
|
||||||
{
|
{
|
||||||
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
|
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
|
||||||
_inputVideoBuffer.SetTimeStamp(_inputVideoBuffer.GetTimeStamp() +
|
_inputVideoBuffer.SetTimeStamp(_inputVideoBuffer.TimeStamp() +
|
||||||
static_cast<WebRtc_UWord32>(9e4 /
|
static_cast<WebRtc_UWord32>(9e4 /
|
||||||
static_cast<float>(_inst.maxFramerate)));
|
static_cast<float>(_inst.maxFramerate)));
|
||||||
VideoBufferToRawImage(_inputVideoBuffer, inputImage);
|
ASSERT_EQ(_encoder->Encode(_inputVideoBuffer, NULL, NULL),
|
||||||
ASSERT_EQ(_encoder->Encode(inputImage, NULL, NULL),
|
|
||||||
WEBRTC_VIDEO_CODEC_OK);
|
WEBRTC_VIDEO_CODEC_OK);
|
||||||
frameLength = WaitForEncodedFrame();
|
frameLength = WaitForEncodedFrame();
|
||||||
ASSERT_GE(frameLength, 0u);
|
ASSERT_GE(frameLength, 0u);
|
||||||
totalBytes += frameLength;
|
totalBytes += frameLength;
|
||||||
frames++;
|
frames++;
|
||||||
|
|
||||||
_encodedVideoBuffer.UpdateLength(0);
|
_encodedVideoBuffer.SetLength(0);
|
||||||
_encodedVideoBuffer.Reset();
|
|
||||||
}
|
}
|
||||||
WebRtc_UWord32 actualBitrate =
|
WebRtc_UWord32 actualBitrate =
|
||||||
(totalBytes / frames * _inst.maxFramerate * 8)/1000;
|
(totalBytes / frames * _inst.maxFramerate * 8)/1000;
|
||||||
@ -765,7 +749,6 @@ UnitTest::RateControlTests()
|
|||||||
ASSERT_TRUE(feof(_sourceFile) != 0);
|
ASSERT_TRUE(feof(_sourceFile) != 0);
|
||||||
rewind(_sourceFile);
|
rewind(_sourceFile);
|
||||||
}
|
}
|
||||||
inputImage.Free();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
|
@ -73,7 +73,7 @@ protected:
|
|||||||
class UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback
|
class UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
UnitTestEncodeCompleteCallback(TestVideoEncodedBuffer* buffer,
|
UnitTestEncodeCompleteCallback(webrtc::VideoFrame* buffer,
|
||||||
WebRtc_UWord32 decoderSpecificSize = 0,
|
WebRtc_UWord32 decoderSpecificSize = 0,
|
||||||
void* decoderSpecificInfo = NULL) :
|
void* decoderSpecificInfo = NULL) :
|
||||||
_encodedVideoBuffer(buffer),
|
_encodedVideoBuffer(buffer),
|
||||||
@ -86,7 +86,7 @@ public:
|
|||||||
// Note that this only makes sense if an encode has been completed
|
// Note that this only makes sense if an encode has been completed
|
||||||
webrtc::VideoFrameType EncodedFrameType() const;
|
webrtc::VideoFrameType EncodedFrameType() const;
|
||||||
private:
|
private:
|
||||||
TestVideoEncodedBuffer* _encodedVideoBuffer;
|
webrtc::VideoFrame* _encodedVideoBuffer;
|
||||||
bool _encodeComplete;
|
bool _encodeComplete;
|
||||||
webrtc::VideoFrameType _encodedFrameType;
|
webrtc::VideoFrameType _encodedFrameType;
|
||||||
};
|
};
|
||||||
@ -94,12 +94,12 @@ private:
|
|||||||
class UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback
|
class UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
UnitTestDecodeCompleteCallback(TestVideoBuffer* buffer) :
|
UnitTestDecodeCompleteCallback(webrtc::VideoFrame* buffer) :
|
||||||
_decodedVideoBuffer(buffer), _decodeComplete(false) {}
|
_decodedVideoBuffer(buffer), _decodeComplete(false) {}
|
||||||
WebRtc_Word32 Decoded(webrtc::VideoFrame& image);
|
WebRtc_Word32 Decoded(webrtc::VideoFrame& image);
|
||||||
bool DecodeComplete();
|
bool DecodeComplete();
|
||||||
private:
|
private:
|
||||||
TestVideoBuffer* _decodedVideoBuffer;
|
webrtc::VideoFrame* _decodedVideoBuffer;
|
||||||
bool _decodeComplete;
|
bool _decodeComplete;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,319 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
#include <assert.h>
|
|
||||||
#include <string.h>
|
|
||||||
#include "video_buffer.h"
|
|
||||||
|
|
||||||
using namespace webrtc;
|
|
||||||
|
|
||||||
TestVideoBuffer::TestVideoBuffer():
|
|
||||||
_buffer(0),
|
|
||||||
_bufferSize(0),
|
|
||||||
_bufferLength(0),
|
|
||||||
_startOffset(0),
|
|
||||||
_timeStamp(0),
|
|
||||||
_width(0),
|
|
||||||
_height(0)
|
|
||||||
{
|
|
||||||
//
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
TestVideoBuffer::~TestVideoBuffer()
|
|
||||||
{
|
|
||||||
_timeStamp = 0;
|
|
||||||
_startOffset = 0;
|
|
||||||
_bufferLength = 0;
|
|
||||||
_bufferSize = 0;
|
|
||||||
|
|
||||||
if(_buffer)
|
|
||||||
{
|
|
||||||
delete [] _buffer;
|
|
||||||
_buffer = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TestVideoBuffer::TestVideoBuffer(const TestVideoBuffer& rhs)
|
|
||||||
:
|
|
||||||
_buffer(0),
|
|
||||||
_bufferSize(rhs._bufferSize),
|
|
||||||
_bufferLength(rhs._bufferLength),
|
|
||||||
_startOffset(rhs._startOffset),
|
|
||||||
_timeStamp(rhs._timeStamp),
|
|
||||||
_width(rhs._width),
|
|
||||||
_height(rhs._height)
|
|
||||||
{
|
|
||||||
// make sure that our buffer is big enough
|
|
||||||
_buffer = new unsigned char[_bufferSize];
|
|
||||||
|
|
||||||
// only copy required length
|
|
||||||
memcpy(_buffer + _startOffset, rhs._buffer, _bufferLength); // GetBuffer() includes _startOffset
|
|
||||||
}
|
|
||||||
|
|
||||||
void TestVideoBuffer::SetTimeStamp(unsigned int timeStamp)
|
|
||||||
{
|
|
||||||
_timeStamp = timeStamp;
|
|
||||||
}
|
|
||||||
|
|
||||||
unsigned int
|
|
||||||
TestVideoBuffer::GetWidth() const
|
|
||||||
{
|
|
||||||
return _width;
|
|
||||||
}
|
|
||||||
|
|
||||||
unsigned int
|
|
||||||
TestVideoBuffer::GetHeight() const
|
|
||||||
{
|
|
||||||
return _height;
|
|
||||||
}
|
|
||||||
|
|
||||||
void
|
|
||||||
TestVideoBuffer::SetWidth(unsigned int width)
|
|
||||||
{
|
|
||||||
_width = width;
|
|
||||||
}
|
|
||||||
|
|
||||||
void
|
|
||||||
TestVideoBuffer::SetHeight(unsigned int height)
|
|
||||||
{
|
|
||||||
_height = height;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void TestVideoBuffer::Free()
|
|
||||||
{
|
|
||||||
_timeStamp = 0;
|
|
||||||
_startOffset = 0;
|
|
||||||
_bufferLength = 0;
|
|
||||||
_bufferSize = 0;
|
|
||||||
_height = 0;
|
|
||||||
_width = 0;
|
|
||||||
|
|
||||||
if(_buffer)
|
|
||||||
{
|
|
||||||
delete [] _buffer;
|
|
||||||
_buffer = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void TestVideoBuffer::VerifyAndAllocate(unsigned int minimumSize)
|
|
||||||
{
|
|
||||||
if(minimumSize > _bufferSize)
|
|
||||||
{
|
|
||||||
// make sure that our buffer is big enough
|
|
||||||
unsigned char * newBufferBuffer = new unsigned char[minimumSize];
|
|
||||||
if(_buffer)
|
|
||||||
{
|
|
||||||
// copy the old data
|
|
||||||
memcpy(newBufferBuffer, _buffer, _bufferSize);
|
|
||||||
delete [] _buffer;
|
|
||||||
}
|
|
||||||
_buffer = newBufferBuffer;
|
|
||||||
_bufferSize = minimumSize;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
int TestVideoBuffer::SetOffset(unsigned int length)
|
|
||||||
{
|
|
||||||
if (length > _bufferSize ||
|
|
||||||
length > _bufferLength)
|
|
||||||
{
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
unsigned int oldOffset = _startOffset;
|
|
||||||
|
|
||||||
if(oldOffset > length)
|
|
||||||
{
|
|
||||||
unsigned int newLength = _bufferLength + (oldOffset-length);// increase by the diff
|
|
||||||
assert(newLength <= _bufferSize);
|
|
||||||
_bufferLength = newLength;
|
|
||||||
}
|
|
||||||
if(oldOffset < length)
|
|
||||||
{
|
|
||||||
if(_bufferLength > (length-oldOffset))
|
|
||||||
{
|
|
||||||
_bufferLength -= (length-oldOffset); // decrease by the diff
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_startOffset = length; // update
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
void TestVideoBuffer::UpdateLength(unsigned int newLength)
|
|
||||||
{
|
|
||||||
assert(newLength +_startOffset <= _bufferSize);
|
|
||||||
_bufferLength = newLength;
|
|
||||||
}
|
|
||||||
|
|
||||||
void TestVideoBuffer::CopyBuffer(unsigned int length, const unsigned char* buffer)
|
|
||||||
{
|
|
||||||
assert(length+_startOffset <= _bufferSize);
|
|
||||||
memcpy(_buffer+_startOffset, buffer, length);
|
|
||||||
_bufferLength = length;
|
|
||||||
}
|
|
||||||
|
|
||||||
void TestVideoBuffer::CopyBuffer(TestVideoBuffer& fromVideoBuffer)
|
|
||||||
{
|
|
||||||
assert(fromVideoBuffer.GetLength() + fromVideoBuffer.GetStartOffset() <= _bufferSize);
|
|
||||||
assert(fromVideoBuffer.GetSize() <= _bufferSize);
|
|
||||||
|
|
||||||
_bufferLength = fromVideoBuffer.GetLength();
|
|
||||||
_startOffset = fromVideoBuffer.GetStartOffset();
|
|
||||||
_timeStamp = fromVideoBuffer.GetTimeStamp();
|
|
||||||
_height = fromVideoBuffer.GetHeight();
|
|
||||||
_width = fromVideoBuffer.GetWidth();
|
|
||||||
|
|
||||||
// only copy required length
|
|
||||||
memcpy(_buffer+_startOffset, fromVideoBuffer.GetBuffer(), fromVideoBuffer.GetLength()); // GetBuffer() includes _startOffset
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
void TestVideoBuffer::CopyPointer(const TestVideoBuffer& fromVideoBuffer)
|
|
||||||
{
|
|
||||||
_bufferSize = fromVideoBuffer.GetSize();
|
|
||||||
_bufferLength = fromVideoBuffer.GetLength();
|
|
||||||
_startOffset = fromVideoBuffer.GetStartOffset();
|
|
||||||
_timeStamp = fromVideoBuffer.GetTimeStamp();
|
|
||||||
_height = fromVideoBuffer.GetHeight();
|
|
||||||
_width = fromVideoBuffer.GetWidth();
|
|
||||||
|
|
||||||
_buffer = fromVideoBuffer.GetBuffer();
|
|
||||||
}
|
|
||||||
|
|
||||||
void TestVideoBuffer::ClearPointer()
|
|
||||||
{
|
|
||||||
_buffer = NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
void TestVideoBuffer::SwapBuffers(TestVideoBuffer& videoBuffer)
|
|
||||||
{
|
|
||||||
unsigned char* tempBuffer = _buffer;
|
|
||||||
unsigned int tempSize = _bufferSize;
|
|
||||||
unsigned int tempLength =_bufferLength;
|
|
||||||
unsigned int tempOffset = _startOffset;
|
|
||||||
unsigned int tempTime = _timeStamp;
|
|
||||||
unsigned int tempHeight = _height;
|
|
||||||
unsigned int tempWidth = _width;
|
|
||||||
|
|
||||||
_buffer = videoBuffer.GetBuffer();
|
|
||||||
_bufferSize = videoBuffer.GetSize();
|
|
||||||
_bufferLength = videoBuffer.GetLength();
|
|
||||||
_startOffset = videoBuffer.GetStartOffset();
|
|
||||||
_timeStamp = videoBuffer.GetTimeStamp();
|
|
||||||
_height = videoBuffer.GetHeight();
|
|
||||||
_width = videoBuffer.GetWidth();
|
|
||||||
|
|
||||||
|
|
||||||
videoBuffer.Set(tempBuffer, tempSize, tempLength, tempOffset, tempTime);
|
|
||||||
videoBuffer.SetHeight(tempHeight);
|
|
||||||
videoBuffer.SetWidth(tempWidth);
|
|
||||||
}
|
|
||||||
|
|
||||||
void TestVideoBuffer::Set(unsigned char* tempBuffer,unsigned int tempSize,unsigned int tempLength, unsigned int tempOffset,unsigned int timeStamp)
|
|
||||||
{
|
|
||||||
_buffer = tempBuffer;
|
|
||||||
_bufferSize = tempSize;
|
|
||||||
_bufferLength = tempLength;
|
|
||||||
_startOffset = tempOffset;
|
|
||||||
_timeStamp = timeStamp;
|
|
||||||
}
|
|
||||||
|
|
||||||
unsigned char* TestVideoBuffer::GetBuffer() const
|
|
||||||
{
|
|
||||||
return _buffer+_startOffset;
|
|
||||||
}
|
|
||||||
|
|
||||||
unsigned int TestVideoBuffer::GetStartOffset() const
|
|
||||||
{
|
|
||||||
return _startOffset;
|
|
||||||
}
|
|
||||||
|
|
||||||
unsigned int TestVideoBuffer::GetSize() const
|
|
||||||
{
|
|
||||||
return _bufferSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
unsigned int TestVideoBuffer::GetLength() const
|
|
||||||
{
|
|
||||||
return _bufferLength;
|
|
||||||
}
|
|
||||||
|
|
||||||
unsigned int TestVideoBuffer::GetTimeStamp() const
|
|
||||||
{
|
|
||||||
return _timeStamp;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* TestVideoEncodedBuffer
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
|
|
||||||
TestVideoEncodedBuffer::TestVideoEncodedBuffer() :
|
|
||||||
_captureWidth(0),
|
|
||||||
_captureHeight(0),
|
|
||||||
_frameRate(-1)
|
|
||||||
{
|
|
||||||
_frameType = kDeltaFrame;
|
|
||||||
}
|
|
||||||
|
|
||||||
TestVideoEncodedBuffer::~TestVideoEncodedBuffer()
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
void TestVideoEncodedBuffer::SetCaptureWidth(unsigned short width)
|
|
||||||
{
|
|
||||||
_captureWidth = width;
|
|
||||||
}
|
|
||||||
|
|
||||||
void TestVideoEncodedBuffer::SetCaptureHeight(unsigned short height)
|
|
||||||
{
|
|
||||||
_captureHeight = height;
|
|
||||||
}
|
|
||||||
|
|
||||||
unsigned short TestVideoEncodedBuffer::GetCaptureWidth()
|
|
||||||
{
|
|
||||||
return _captureWidth;
|
|
||||||
}
|
|
||||||
|
|
||||||
unsigned short TestVideoEncodedBuffer::GetCaptureHeight()
|
|
||||||
{
|
|
||||||
return _captureHeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
VideoFrameType TestVideoEncodedBuffer::GetFrameType()
|
|
||||||
{
|
|
||||||
return _frameType;
|
|
||||||
}
|
|
||||||
|
|
||||||
void TestVideoEncodedBuffer::SetFrameType(VideoFrameType frametype)
|
|
||||||
{
|
|
||||||
_frameType = frametype;
|
|
||||||
}
|
|
||||||
|
|
||||||
void TestVideoEncodedBuffer::Reset()
|
|
||||||
{
|
|
||||||
_captureWidth = 0;
|
|
||||||
_captureHeight = 0;
|
|
||||||
_frameRate = -1;
|
|
||||||
_frameType = kDeltaFrame;
|
|
||||||
}
|
|
||||||
|
|
||||||
void TestVideoEncodedBuffer::SetFrameRate(float frameRate)
|
|
||||||
{
|
|
||||||
_frameRate = frameRate;
|
|
||||||
}
|
|
||||||
|
|
||||||
float TestVideoEncodedBuffer::GetFrameRate()
|
|
||||||
{
|
|
||||||
return _frameRate;
|
|
||||||
}
|
|
@ -1,122 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_BUFFER_H_
|
|
||||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_BUFFER_H_
|
|
||||||
|
|
||||||
#include "typedefs.h"
|
|
||||||
#include "video_image.h"
|
|
||||||
|
|
||||||
class TestVideoBuffer
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
TestVideoBuffer();
|
|
||||||
|
|
||||||
virtual ~TestVideoBuffer();
|
|
||||||
|
|
||||||
TestVideoBuffer(const TestVideoBuffer& rhs);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Verifies that current allocated buffer size is larger than or equal to the input size.
|
|
||||||
* If the current buffer size is smaller, a new allocation is made and the old buffer data is copied to the new buffer.
|
|
||||||
*/
|
|
||||||
void VerifyAndAllocate(unsigned int minimumSize);
|
|
||||||
|
|
||||||
void UpdateLength(unsigned int newLength);
|
|
||||||
|
|
||||||
void SwapBuffers(TestVideoBuffer& videoBuffer);
|
|
||||||
|
|
||||||
void CopyBuffer(unsigned int length, const unsigned char* fromBuffer);
|
|
||||||
|
|
||||||
void CopyBuffer(TestVideoBuffer& fromVideoBuffer);
|
|
||||||
|
|
||||||
// Use with care, and remember to call ClearPointer() when done.
|
|
||||||
void CopyPointer(const TestVideoBuffer& fromVideoBuffer);
|
|
||||||
|
|
||||||
void ClearPointer();
|
|
||||||
|
|
||||||
int SetOffset(unsigned int length); // Sets offset to beginning of frame in buffer
|
|
||||||
|
|
||||||
void Free(); // Deletes frame buffer and resets members to zero
|
|
||||||
|
|
||||||
void SetTimeStamp(unsigned int timeStamp); // Sets timestamp of frame (90kHz)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets pointer to frame buffer
|
|
||||||
*/
|
|
||||||
unsigned char* GetBuffer() const;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets allocated buffer size
|
|
||||||
*/
|
|
||||||
unsigned int GetSize() const;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets length of frame
|
|
||||||
*/
|
|
||||||
unsigned int GetLength() const;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets timestamp of frame (90kHz)
|
|
||||||
*/
|
|
||||||
unsigned int GetTimeStamp() const;
|
|
||||||
|
|
||||||
unsigned int GetWidth() const;
|
|
||||||
unsigned int GetHeight() const;
|
|
||||||
|
|
||||||
void SetWidth(unsigned int width);
|
|
||||||
void SetHeight(unsigned int height);
|
|
||||||
|
|
||||||
private:
|
|
||||||
TestVideoBuffer& operator=(const TestVideoBuffer& inBuffer);
|
|
||||||
|
|
||||||
private:
|
|
||||||
void Set(unsigned char* buffer,unsigned int size,unsigned int length,unsigned int offset, unsigned int timeStamp);
|
|
||||||
unsigned int GetStartOffset() const;
|
|
||||||
|
|
||||||
unsigned char* _buffer; // Pointer to frame buffer
|
|
||||||
unsigned int _bufferSize; // Allocated buffer size
|
|
||||||
unsigned int _bufferLength; // Length (in bytes) of frame
|
|
||||||
unsigned int _startOffset; // Offset (in bytes) to beginning of frame in buffer
|
|
||||||
unsigned int _timeStamp; // Timestamp of frame (90kHz)
|
|
||||||
unsigned int _width;
|
|
||||||
unsigned int _height;
|
|
||||||
};
|
|
||||||
|
|
||||||
class TestVideoEncodedBuffer: public TestVideoBuffer
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
TestVideoEncodedBuffer();
|
|
||||||
~TestVideoEncodedBuffer();
|
|
||||||
|
|
||||||
void SetCaptureWidth(unsigned short width);
|
|
||||||
void SetCaptureHeight(unsigned short height);
|
|
||||||
unsigned short GetCaptureWidth();
|
|
||||||
unsigned short GetCaptureHeight();
|
|
||||||
|
|
||||||
webrtc::VideoFrameType GetFrameType();
|
|
||||||
void SetFrameType(webrtc::VideoFrameType frametype);
|
|
||||||
|
|
||||||
void Reset();
|
|
||||||
|
|
||||||
void SetFrameRate(float frameRate);
|
|
||||||
float GetFrameRate();
|
|
||||||
|
|
||||||
private:
|
|
||||||
TestVideoEncodedBuffer& operator=(const TestVideoEncodedBuffer& inBuffer);
|
|
||||||
|
|
||||||
private:
|
|
||||||
unsigned short _captureWidth;
|
|
||||||
unsigned short _captureHeight;
|
|
||||||
webrtc::VideoFrameType _frameType;
|
|
||||||
float _frameRate;
|
|
||||||
};
|
|
||||||
|
|
||||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_BUFFER_H_
|
|
@ -149,7 +149,7 @@ VP8DualDecoderTest::Perform()
|
|||||||
int
|
int
|
||||||
VP8DualDecoderTest::Decode(int lossValue)
|
VP8DualDecoderTest::Decode(int lossValue)
|
||||||
{
|
{
|
||||||
_sumEncBytes += _frameToDecode->_frame->GetLength();
|
_sumEncBytes += _frameToDecode->_frame->Length();
|
||||||
webrtc::EncodedImage encodedImage;
|
webrtc::EncodedImage encodedImage;
|
||||||
VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
|
VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
|
||||||
encodedImage._completeFrame = !lossValue;
|
encodedImage._completeFrame = !lossValue;
|
||||||
@ -171,9 +171,9 @@ VP8DualDecoderTest::Decode(int lossValue)
|
|||||||
}
|
}
|
||||||
|
|
||||||
// compare decoded images
|
// compare decoded images
|
||||||
if (!CheckIfBitExact(_decodedVideoBuffer.GetBuffer(),
|
if (!CheckIfBitExact(_decodedVideoBuffer.Buffer(),
|
||||||
_decodedVideoBuffer.GetLength(),
|
_decodedVideoBuffer.Length(),
|
||||||
_decodedVideoBuffer2.GetBuffer(), _decodedVideoBuffer.GetLength()))
|
_decodedVideoBuffer2.Buffer(), _decodedVideoBuffer.Length()))
|
||||||
{
|
{
|
||||||
fprintf(stderr,"\n\nClone output different from master.\n\n");
|
fprintf(stderr,"\n\nClone output different from master.\n\n");
|
||||||
exit(EXIT_FAILURE);
|
exit(EXIT_FAILURE);
|
||||||
@ -201,7 +201,7 @@ VP8DualDecoderTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
|
|||||||
WebRtc_Word32 DualDecoderCompleteCallback::Decoded(webrtc::VideoFrame& image)
|
WebRtc_Word32 DualDecoderCompleteCallback::Decoded(webrtc::VideoFrame& image)
|
||||||
{
|
{
|
||||||
_decodedVideoBuffer->VerifyAndAllocate(image.Length());
|
_decodedVideoBuffer->VerifyAndAllocate(image.Length());
|
||||||
_decodedVideoBuffer->CopyBuffer(image.Length(), image.Buffer());
|
_decodedVideoBuffer->CopyFrame(image.Length(), image.Buffer());
|
||||||
_decodedVideoBuffer->SetWidth(image.Width());
|
_decodedVideoBuffer->SetWidth(image.Width());
|
||||||
_decodedVideoBuffer->SetHeight(image.Height());
|
_decodedVideoBuffer->SetHeight(image.Height());
|
||||||
_decodedVideoBuffer->SetTimeStamp(image.TimeStamp());
|
_decodedVideoBuffer->SetTimeStamp(image.TimeStamp());
|
||||||
|
@ -30,7 +30,7 @@ protected:
|
|||||||
virtual int Decode(int lossValue = 0);
|
virtual int Decode(int lossValue = 0);
|
||||||
|
|
||||||
webrtc::VP8Decoder* _decoder2;
|
webrtc::VP8Decoder* _decoder2;
|
||||||
TestVideoBuffer _decodedVideoBuffer2;
|
webrtc::VideoFrame _decodedVideoBuffer2;
|
||||||
static bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes,
|
static bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes,
|
||||||
const void *ptrB, unsigned int bLengthBytes);
|
const void *ptrB, unsigned int bLengthBytes);
|
||||||
private:
|
private:
|
||||||
@ -39,12 +39,12 @@ private:
|
|||||||
class DualDecoderCompleteCallback : public webrtc::DecodedImageCallback
|
class DualDecoderCompleteCallback : public webrtc::DecodedImageCallback
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
DualDecoderCompleteCallback(TestVideoBuffer* buffer)
|
DualDecoderCompleteCallback(webrtc::VideoFrame* buffer)
|
||||||
: _decodedVideoBuffer(buffer), _decodeComplete(false) {}
|
: _decodedVideoBuffer(buffer), _decodeComplete(false) {}
|
||||||
WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
|
WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
|
||||||
bool DecodeComplete();
|
bool DecodeComplete();
|
||||||
private:
|
private:
|
||||||
TestVideoBuffer* _decodedVideoBuffer;
|
webrtc::VideoFrame* _decodedVideoBuffer;
|
||||||
bool _decodeComplete;
|
bool _decodeComplete;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -137,18 +137,16 @@ bool VP8RpsTest::EncodeRps(RpsDecodeCompleteCallback* decodeCallback) {
|
|||||||
size_t bytes_read = fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile);
|
size_t bytes_read = fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile);
|
||||||
if (bytes_read < _lengthSourceFrame)
|
if (bytes_read < _lengthSourceFrame)
|
||||||
return true;
|
return true;
|
||||||
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
|
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
|
||||||
_inputVideoBuffer.SetTimeStamp((unsigned int)
|
_inputVideoBuffer.SetTimeStamp((unsigned int)
|
||||||
(_encFrameCnt * 9e4 / _inst.maxFramerate));
|
(_encFrameCnt * 9e4 / _inst.maxFramerate));
|
||||||
_inputVideoBuffer.SetWidth(_inst.width);
|
_inputVideoBuffer.SetWidth(_inst.width);
|
||||||
_inputVideoBuffer.SetHeight(_inst.height);
|
_inputVideoBuffer.SetHeight(_inst.height);
|
||||||
webrtc::VideoFrame rawImage;
|
|
||||||
VideoBufferToRawImage(_inputVideoBuffer, rawImage);
|
|
||||||
if (feof(_sourceFile) != 0) {
|
if (feof(_sourceFile) != 0) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
_encodeCompleteTime = 0;
|
_encodeCompleteTime = 0;
|
||||||
_encodeTimes[rawImage.TimeStamp()] = tGetTime();
|
_encodeTimes[_inputVideoBuffer.TimeStamp()] = tGetTime();
|
||||||
|
|
||||||
webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
|
webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
|
||||||
codecSpecificInfo->codecSpecific.VP8.pictureIdRPSI =
|
codecSpecificInfo->codecSpecific.VP8.pictureIdRPSI =
|
||||||
@ -161,7 +159,7 @@ bool VP8RpsTest::EncodeRps(RpsDecodeCompleteCallback* decodeCallback) {
|
|||||||
sli_ = false;
|
sli_ = false;
|
||||||
}
|
}
|
||||||
printf("Encoding: %u\n", _framecnt);
|
printf("Encoding: %u\n", _framecnt);
|
||||||
int ret = _encoder->Encode(rawImage, codecSpecificInfo, NULL);
|
int ret = _encoder->Encode(_inputVideoBuffer, codecSpecificInfo, NULL);
|
||||||
if (ret < 0)
|
if (ret < 0)
|
||||||
printf("Failed to encode: %u\n", _framecnt);
|
printf("Failed to encode: %u\n", _framecnt);
|
||||||
|
|
||||||
@ -171,10 +169,11 @@ bool VP8RpsTest::EncodeRps(RpsDecodeCompleteCallback* decodeCallback) {
|
|||||||
}
|
}
|
||||||
if (_encodeCompleteTime > 0) {
|
if (_encodeCompleteTime > 0) {
|
||||||
_totalEncodeTime += _encodeCompleteTime -
|
_totalEncodeTime += _encodeCompleteTime -
|
||||||
_encodeTimes[rawImage.TimeStamp()];
|
_encodeTimes[_inputVideoBuffer.TimeStamp()];
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
_totalEncodeTime += tGetTime() - _encodeTimes[rawImage.TimeStamp()];
|
_totalEncodeTime += tGetTime() -
|
||||||
|
_encodeTimes[_inputVideoBuffer.TimeStamp()];
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -182,7 +181,7 @@ bool VP8RpsTest::EncodeRps(RpsDecodeCompleteCallback* decodeCallback) {
|
|||||||
//#define FRAME_LOSS 1
|
//#define FRAME_LOSS 1
|
||||||
|
|
||||||
int VP8RpsTest::Decode(int lossValue) {
|
int VP8RpsTest::Decode(int lossValue) {
|
||||||
_sumEncBytes += _frameToDecode->_frame->GetLength();
|
_sumEncBytes += _frameToDecode->_frame->Length();
|
||||||
webrtc::EncodedImage encodedImage;
|
webrtc::EncodedImage encodedImage;
|
||||||
VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
|
VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
|
||||||
encodedImage._completeFrame = !lossValue;
|
encodedImage._completeFrame = !lossValue;
|
||||||
@ -230,9 +229,9 @@ int VP8RpsTest::Decode(int lossValue) {
|
|||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
if (_framecnt > 0 && _framecnt % 10 != 0) {
|
if (_framecnt > 0 && _framecnt % 10 != 0) {
|
||||||
if (!CheckIfBitExact(_decodedVideoBuffer.GetBuffer(),
|
if (!CheckIfBitExact(_decodedVideoBuffer.Buffer(),
|
||||||
_decodedVideoBuffer.GetLength(),
|
_decodedVideoBuffer.Length(),
|
||||||
decoded_frame2_.GetBuffer(), _decodedVideoBuffer.GetLength())) {
|
decoded_frame2_.Buffer(), _decodedVideoBuffer.Length())) {
|
||||||
fprintf(stderr,"\n\nRPS decoder different from master: %u\n\n",
|
fprintf(stderr,"\n\nRPS decoder different from master: %u\n\n",
|
||||||
_framecnt);
|
_framecnt);
|
||||||
return -1;
|
return -1;
|
||||||
@ -257,7 +256,7 @@ VP8RpsTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
|
|||||||
return memcmp(ptrA, ptrB, aLengthBytes) == 0;
|
return memcmp(ptrA, ptrB, aLengthBytes) == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
RpsDecodeCompleteCallback::RpsDecodeCompleteCallback(TestVideoBuffer* buffer)
|
RpsDecodeCompleteCallback::RpsDecodeCompleteCallback(webrtc::VideoFrame* buffer)
|
||||||
: decoded_frame_(buffer),
|
: decoded_frame_(buffer),
|
||||||
decode_complete_(false),
|
decode_complete_(false),
|
||||||
last_decoded_picture_id_(0),
|
last_decoded_picture_id_(0),
|
||||||
@ -266,13 +265,8 @@ RpsDecodeCompleteCallback::RpsDecodeCompleteCallback(TestVideoBuffer* buffer)
|
|||||||
}
|
}
|
||||||
|
|
||||||
WebRtc_Word32 RpsDecodeCompleteCallback::Decoded(webrtc::VideoFrame& image) {
|
WebRtc_Word32 RpsDecodeCompleteCallback::Decoded(webrtc::VideoFrame& image) {
|
||||||
decoded_frame_->VerifyAndAllocate(image.Length());
|
return decoded_frame_->CopyFrame(image);
|
||||||
decoded_frame_->CopyBuffer(image.Length(), image.Buffer());
|
|
||||||
decoded_frame_->SetWidth(image.Width());
|
|
||||||
decoded_frame_->SetHeight(image.Height());
|
|
||||||
decoded_frame_->SetTimeStamp(image.TimeStamp());
|
|
||||||
decode_complete_ = true;
|
decode_complete_ = true;
|
||||||
return 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool RpsDecodeCompleteCallback::DecodeComplete() {
|
bool RpsDecodeCompleteCallback::DecodeComplete() {
|
||||||
|
@ -32,13 +32,13 @@ class VP8RpsTest : public VP8NormalAsyncTest {
|
|||||||
const void *ptrB, unsigned int bLengthBytes);
|
const void *ptrB, unsigned int bLengthBytes);
|
||||||
|
|
||||||
webrtc::VP8Decoder* decoder2_;
|
webrtc::VP8Decoder* decoder2_;
|
||||||
TestVideoBuffer decoded_frame2_;
|
webrtc::VideoFrame decoded_frame2_;
|
||||||
bool sli_;
|
bool sli_;
|
||||||
};
|
};
|
||||||
|
|
||||||
class RpsDecodeCompleteCallback : public webrtc::DecodedImageCallback {
|
class RpsDecodeCompleteCallback : public webrtc::DecodedImageCallback {
|
||||||
public:
|
public:
|
||||||
RpsDecodeCompleteCallback(TestVideoBuffer* buffer);
|
RpsDecodeCompleteCallback(webrtc::VideoFrame* buffer);
|
||||||
WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
|
WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
|
||||||
bool DecodeComplete();
|
bool DecodeComplete();
|
||||||
WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 picture_id);
|
WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 picture_id);
|
||||||
@ -47,7 +47,7 @@ class RpsDecodeCompleteCallback : public webrtc::DecodedImageCallback {
|
|||||||
WebRtc_UWord64 LastDecodedRefPictureId(bool *updated);
|
WebRtc_UWord64 LastDecodedRefPictureId(bool *updated);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
TestVideoBuffer* decoded_frame_;
|
webrtc::VideoFrame* decoded_frame_;
|
||||||
bool decode_complete_;
|
bool decode_complete_;
|
||||||
WebRtc_UWord64 last_decoded_picture_id_;
|
WebRtc_UWord64 last_decoded_picture_id_;
|
||||||
WebRtc_UWord64 last_decoded_ref_picture_id_;
|
WebRtc_UWord64 last_decoded_ref_picture_id_;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user