git-svn-id: http://webrtc.googlecode.com/svn/trunk@4 4adac7df-926f-26a2-2b94-8c16560cd09d

This commit is contained in:
niklase@google.com
2011-05-30 11:22:19 +00:00
parent 01813fe945
commit 77ae29bc81
1153 changed files with 404089 additions and 0 deletions

View File

@@ -0,0 +1,304 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "benchmark.h"
#include "video_source.h"
#include "vplib.h"
#include <vector>
#include <fstream>
#include <iostream>
#include <sstream>
#include <cassert>
#if defined(_WIN32)
#include <windows.h>
#endif
#include "event_wrapper.h"
#include "video_codec_interface.h"
#define SSIM_CALC 0 // by default, don't compute SSIM
using namespace webrtc;
Benchmark::Benchmark()
:
_resultsFileName("../../../../testFiles/benchmark.txt"),
_codecName("Default"),
NormalAsyncTest("Benchmark", "Codec benchmark over a range of test cases", 6)
{
}
Benchmark::Benchmark(std::string name, std::string description)
:
_resultsFileName("../../../../testFiles/benchmark.txt"),
_codecName("Default"),
NormalAsyncTest(name, description, 6)
{
}
Benchmark::Benchmark(std::string name, std::string description, std::string resultsFileName, std::string codecName)
:
_resultsFileName(resultsFileName),
_codecName(codecName),
NormalAsyncTest(name, description, 6)
{
}
void
Benchmark::Perform()
{
std::vector<const VideoSource*> sources;
std::vector<const VideoSource*>::iterator it;
// Configuration --------------------------
sources.push_back(new const VideoSource("test/testFiles/foreman_cif.yuv", kCIF));
sources.push_back(new const VideoSource("test/testFiles/akiyo_cif.yuv", kCIF));
const VideoSize size[] = {kQCIF, kCIF};
const int frameRate[] = {10, 15, 30};
// Specifies the framerates for which to perform a speed test.
const bool speedTestMask[] = {false, false, false};
const int bitRate[] = {50, 100, 200, 300, 400, 500, 600, 1000};
// Determines the number of iterations to perform to arrive at the speed result.
enum { kSpeedTestIterations = 10 };
// ----------------------------------------
const int nFrameRates = sizeof(frameRate)/sizeof(*frameRate);
assert(sizeof(speedTestMask)/sizeof(*speedTestMask) == nFrameRates);
const int nBitrates = sizeof(bitRate)/sizeof(*bitRate);
int testIterations = 10;
double psnr[nBitrates];
double ssim[nBitrates];
double fps[nBitrates];
double totalEncodeTime[nBitrates];
double totalDecodeTime[nBitrates];
_results.open(_resultsFileName.c_str(), std::fstream::out);
_results << GetMagicStr() << std::endl;
_results << _codecName << std::endl;
for (it = sources.begin() ; it < sources.end(); it++)
{
for (int i = 0; i < sizeof(size)/sizeof(*size); i++)
{
for (int j = 0; j < nFrameRates; j++)
{
std::stringstream ss;
std::string strFrameRate;
std::string outFileName;
ss << frameRate[j];
ss >> strFrameRate;
outFileName = (*it)->GetFilePath() + "/" + (*it)->GetName() + "_" +
VideoSource::GetSizeString(size[i]) + "_" + strFrameRate + ".yuv";
_target = new const VideoSource(outFileName, size[i], frameRate[j]);
(*it)->Convert(*_target);
if (VideoSource::FileExists(outFileName.c_str()))
{
_inname = outFileName;
}
else
{
_inname = (*it)->GetFileName();
}
std::cout << (*it)->GetName() << ", " << VideoSource::GetSizeString(size[i])
<< ", " << frameRate[j] << " fps" << std::endl << "Bitrate [kbps]:";
_results << (*it)->GetName() << "," << VideoSource::GetSizeString(size[i])
<< "," << frameRate[j] << " fps" << std::endl << "Bitrate [kbps]";
if (speedTestMask[j])
{
testIterations = kSpeedTestIterations;
}
else
{
testIterations = 1;
}
for (int k = 0; k < nBitrates; k++)
{
_bitRate = (bitRate[k]);
double avgFps = 0.0;
totalEncodeTime[k] = 0;
totalDecodeTime[k] = 0;
for (int l = 0; l < testIterations; l++)
{
PerformNormalTest();
_appendNext = false;
avgFps += _framecnt / (_totalEncodeTime + _totalDecodeTime);
totalEncodeTime[k] += _totalEncodeTime;
totalDecodeTime[k] += _totalDecodeTime;
}
avgFps /= testIterations;
totalEncodeTime[k] /= testIterations;
totalDecodeTime[k] /= testIterations;
double actualBitRate = ActualBitRate(_framecnt) / 1000.0;
std::cout << " " << actualBitRate;
_results << "," << actualBitRate;
PSNRfromFiles(_inname.c_str(), _outname.c_str(), _inst.width,
_inst.height, &psnr[k]);
if (SSIM_CALC)
{
SSIMfromFiles(_inname.c_str(), _outname.c_str(), _inst.width,
_inst.height, &ssim[k]);
}
fps[k] = avgFps;
}
std::cout << std::endl << "Y-PSNR [dB]:";
_results << std::endl << "Y-PSNR [dB]";
for (int k = 0; k < nBitrates; k++)
{
std::cout << " " << psnr[k];
_results << "," << psnr[k];
}
if (SSIM_CALC)
{
std::cout << std::endl << "SSIM: ";
_results << std::endl << "SSIM ";
for (int k = 0; k < nBitrates; k++)
{
std::cout << " " << ssim[k];
_results << "," << ssim[k];
}
}
std::cout << std::endl << "Encode Time[ms]:";
_results << std::endl << "Encode Time[ms]";
for (int k = 0; k < nBitrates; k++)
{
std::cout << " " << totalEncodeTime[k];
_results << "," << totalEncodeTime[k];
}
std::cout << std::endl << "Decode Time[ms]:";
_results << std::endl << "Decode Time[ms]";
for (int k = 0; k < nBitrates; k++)
{
std::cout << " " << totalDecodeTime[k];
_results << "," << totalDecodeTime[k];
}
if (speedTestMask[j])
{
std::cout << std::endl << "Speed [fps]:";
_results << std::endl << "Speed [fps]";
for (int k = 0; k < nBitrates; k++)
{
std::cout << " " << static_cast<int>(fps[k] + 0.5);
_results << "," << static_cast<int>(fps[k] + 0.5);
}
}
std::cout << std::endl << std::endl;
_results << std::endl << std::endl;
delete _target;
}
}
delete *it;
}
_results.close();
}
void
Benchmark::PerformNormalTest()
{
_encoder = GetNewEncoder();
_decoder = GetNewDecoder();
CodecSettings(_target->GetWidth(), _target->GetHeight(), _target->GetFrameRate(), _bitRate);
Setup();
EventWrapper* waitEvent = EventWrapper::Create();
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_encoder->InitEncode(&_inst, 4, 1440);
CodecSpecific_InitBitrate();
_decoder->InitDecode(&_inst,1);
FrameQueue frameQueue;
VideoEncodeCompleteCallback encCallback(_encodedFile, &frameQueue, *this);
VideoDecodeCompleteCallback decCallback(_decodedFile, *this);
_encoder->RegisterEncodeCompleteCallback(&encCallback);
_decoder->RegisterDecodeCompleteCallback(&decCallback);
SetCodecSpecificParameters();
_totalEncodeTime = _totalDecodeTime = 0;
_totalEncodePipeTime = _totalDecodePipeTime = 0;
bool complete = false;
_framecnt = 0;
_encFrameCnt = 0;
_sumEncBytes = 0;
_lengthEncFrame = 0;
while (!complete)
{
complete = Encode();
if (!frameQueue.Empty() || complete)
{
while (!frameQueue.Empty())
{
_frameToDecode = static_cast<FrameQueueTuple *>(frameQueue.PopFrame());
DoPacketLoss();
int ret = Decode();
delete _frameToDecode;
_frameToDecode = NULL;
if (ret < 0)
{
fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
exit(EXIT_FAILURE);
}
else if (ret == 0)
{
_framecnt++;
}
else
{
fprintf(stderr, "\n\nPositive return value from decode!\n\n");
}
}
}
waitEvent->Wait(5);
}
_inputVideoBuffer.Free();
//_encodedVideoBuffer.Reset(); ?
_encodedVideoBuffer.Free();
_decodedVideoBuffer.Free();
_encoder->Release();
_decoder->Release();
delete waitEvent;
delete _encoder;
delete _decoder;
Teardown();
}
void
Benchmark::CodecSpecific_InitBitrate()
{
if (_bitRate == 0)
{
_encoder->SetRates(600, _inst.maxFramerate);
}
else
{
_encoder->SetRates(_bitRate, _inst.maxFramerate);
}
}

View File

@@ -0,0 +1,40 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_BENCHMARK_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_BENCHMARK_H_
#include "normal_async_test.h"
class VideoSource;
class Benchmark : public NormalAsyncTest
{
public:
Benchmark();
virtual void Perform();
protected:
Benchmark(std::string name, std::string description);
Benchmark(std::string name, std::string description, std::string resultsFileName, std::string codecName);
virtual webrtc::VideoEncoder* GetNewEncoder() = 0;
virtual webrtc::VideoDecoder* GetNewDecoder() = 0;
virtual void PerformNormalTest();
virtual void CodecSpecific_InitBitrate();
static const char* GetMagicStr() { return "#!benchmark1.0"; }
const VideoSource* _target;
std::string _resultsFileName;
std::ofstream _results;
std::string _codecName;
};
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_BENCHMARK_H_

View File

@@ -0,0 +1,500 @@
function exportfig(varargin)
%EXPORTFIG Export a figure to Encapsulated Postscript.
% EXPORTFIG(H, FILENAME) writes the figure H to FILENAME. H is
% a figure handle and FILENAME is a string that specifies the
% name of the output file.
%
% EXPORTFIG(...,PARAM1,VAL1,PARAM2,VAL2,...) specifies
% parameters that control various characteristics of the output
% file.
%
% Format Paramter:
% 'Format' one of the strings 'eps','eps2','jpeg','png','preview'
% specifies the output format. Defaults to 'eps'.
% The output format 'preview' does not generate an output
% file but instead creates a new figure window with a
% preview of the exported figure. In this case the
% FILENAME parameter is ignored.
%
% 'Preview' one of the strings 'none', 'tiff'
% specifies a preview for EPS files. Defaults to 'none'.
%
% Size Parameters:
% 'Width' a positive scalar
% specifies the width in the figure's PaperUnits
% 'Height' a positive scalar
% specifies the height in the figure's PaperUnits
%
% Specifying only one dimension sets the other dimension
% so that the exported aspect ratio is the same as the
% figure's current aspect ratio.
% If neither dimension is specified the size defaults to
% the width and height from the figure's PaperPosition.
%
% Rendering Parameters:
% 'Color' one of the strings 'bw', 'gray', 'cmyk'
% 'bw' specifies that lines and text are exported in
% black and all other objects in grayscale
% 'gray' specifies that all objects are exported in grayscale
% 'cmyk' specifies that all objects are exported in color
% using the CMYK color space
% 'Renderer' one of the strings 'painters', 'zbuffer', 'opengl'
% specifies the renderer to use
% 'Resolution' a positive scalar
% specifies the resolution in dots-per-inch.
%
% The default color setting is 'bw'.
%
% Font Parameters:
% 'FontMode' one of the strings 'scaled', 'fixed'
% 'FontSize' a positive scalar
% in 'scaled' mode multiplies with the font size of each
% text object to obtain the exported font size
% in 'fixed' mode specifies the font size of all text
% objects in points
% 'FontEncoding' one of the strings 'latin1', 'adobe'
% specifies the character encoding of the font
%
% If FontMode is 'scaled' but FontSize is not specified then a
% scaling factor is computed from the ratio of the size of the
% exported figure to the size of the actual figure. The minimum
% font size allowed after scaling is 5 points.
% If FontMode is 'fixed' but FontSize is not specified then the
% exported font sizes of all text objects is 7 points.
%
% The default 'FontMode' setting is 'scaled'.
%
% Line Width Parameters:
% 'LineMode' one of the strings 'scaled', 'fixed'
% 'LineWidth' a positive scalar
% the semantics of LineMode and LineWidth are exactly the
% same as FontMode and FontSize, except that they apply
% to line widths instead of font sizes. The minumum line
% width allowed after scaling is 0.5 points.
% If LineMode is 'fixed' but LineWidth is not specified
% then the exported line width of all line objects is 1
% point.
%
% Examples:
% exportfig(gcf,'fig1.eps','height',3);
% Exports the current figure to the file named 'fig1.eps' with
% a height of 3 inches (assuming the figure's PaperUnits is
% inches) and an aspect ratio the same as the figure's aspect
% ratio on screen.
%
% exportfig(gcf, 'fig2.eps', 'FontMode', 'fixed',...
% 'FontSize', 10, 'color', 'cmyk' );
% Exports the current figure to 'fig2.eps' in color with all
% text in 10 point fonts. The size of the exported figure is
% the figure's PaperPostion width and height.
if (nargin < 2)
error('Too few input arguments');
end
% exportfig(H, filename, ...)
H = varargin{1};
if ~ishandle(H) | ~strcmp(get(H,'type'), 'figure')
error('First argument must be a handle to a figure.');
end
filename = varargin{2};
if ~ischar(filename)
error('Second argument must be a string.');
end
paramPairs = varargin(3:end);
% Do some validity checking on param-value pairs
if (rem(length(paramPairs),2) ~= 0)
error(['Invalid input syntax. Optional parameters and values' ...
' must be in pairs.']);
end
format = 'eps';
preview = 'none';
width = -1;
height = -1;
color = 'bw';
fontsize = -1;
fontmode='scaled';
linewidth = -1;
linemode=[];
fontencoding = 'latin1';
renderer = [];
resolution = [];
% Process param-value pairs
args = {};
for k = 1:2:length(paramPairs)
param = lower(paramPairs{k});
if (~ischar(param))
error('Optional parameter names must be strings');
end
value = paramPairs{k+1};
switch (param)
case 'format'
format = value;
if (~strcmp(format,{'eps','eps2','jpeg','png','preview'}))
error(['Format must be ''eps'', ''eps2'', ''jpeg'', ''png'' or' ...
' ''preview''.']);
end
case 'preview'
preview = value;
if (~strcmp(preview,{'none','tiff'}))
error('Preview must be ''none'' or ''tiff''.');
end
case 'width'
width = LocalToNum(value);
if(~LocalIsPositiveScalar(width))
error('Width must be a numeric scalar > 0');
end
case 'height'
height = LocalToNum(value);
if(~LocalIsPositiveScalar(height))
error('Height must be a numeric scalar > 0');
end
case 'color'
color = lower(value);
if (~strcmp(color,{'bw','gray','cmyk'}))
error('Color must be ''bw'', ''gray'' or ''cmyk''.');
end
case 'fontmode'
fontmode = lower(value);
if (~strcmp(fontmode,{'scaled','fixed'}))
error('FontMode must be ''scaled'' or ''fixed''.');
end
case 'fontsize'
fontsize = LocalToNum(value);
if(~LocalIsPositiveScalar(fontsize))
error('FontSize must be a numeric scalar > 0');
end
case 'fontencoding'
fontencoding = lower(value);
if (~strcmp(fontencoding,{'latin1','adobe'}))
error('FontEncoding must be ''latin1'' or ''adobe''.');
end
case 'linemode'
linemode = lower(value);
if (~strcmp(linemode,{'scaled','fixed'}))
error('LineMode must be ''scaled'' or ''fixed''.');
end
case 'linewidth'
linewidth = LocalToNum(value);
if(~LocalIsPositiveScalar(linewidth))
error('LineWidth must be a numeric scalar > 0');
end
case 'renderer'
renderer = lower(value);
if (~strcmp(renderer,{'painters','zbuffer','opengl'}))
error('Renderer must be ''painters'', ''zbuffer'' or ''opengl''.');
end
case 'resolution'
resolution = LocalToNum(value);
if ~(isnumeric(value) & (prod(size(value)) == 1) & (value >= 0));
error('Resolution must be a numeric scalar >= 0');
end
otherwise
error(['Unrecognized option ' param '.']);
end
end
allLines = findall(H, 'type', 'line');
allText = findall(H, 'type', 'text');
allAxes = findall(H, 'type', 'axes');
allImages = findall(H, 'type', 'image');
allLights = findall(H, 'type', 'light');
allPatch = findall(H, 'type', 'patch');
allSurf = findall(H, 'type', 'surface');
allRect = findall(H, 'type', 'rectangle');
allFont = [allText; allAxes];
allColor = [allLines; allText; allAxes; allLights];
allMarker = [allLines; allPatch; allSurf];
allEdge = [allPatch; allSurf];
allCData = [allImages; allPatch; allSurf];
old.objs = {};
old.prop = {};
old.values = {};
% Process format and preview parameter
showPreview = strcmp(format,'preview');
if showPreview
format = 'png';
filename = [tempName '.png'];
end
if strncmp(format,'eps',3) & ~strcmp(preview,'none')
args = {args{:}, ['-' preview]};
end
hadError = 0;
try
% Process size parameters
paperPos = get(H, 'PaperPosition');
old = LocalPushOldData(old, H, 'PaperPosition', paperPos);
figureUnits = get(H, 'Units');
set(H, 'Units', get(H,'PaperUnits'));
figurePos = get(H, 'Position');
aspectRatio = figurePos(3)/figurePos(4);
set(H, 'Units', figureUnits);
if (width == -1) & (height == -1)
width = paperPos(3);
height = paperPos(4);
elseif (width == -1)
width = height * aspectRatio;
elseif (height == -1)
height = width / aspectRatio;
end
set(H, 'PaperPosition', [0 0 width height]);
paperPosMode = get(H, 'PaperPositionMode');
old = LocalPushOldData(old, H, 'PaperPositionMode', paperPosMode);
set(H, 'PaperPositionMode', 'manual');
% Process rendering parameters
switch (color)
case {'bw', 'gray'}
if ~strcmp(color,'bw') & strncmp(format,'eps',3)
format = [format 'c'];
end
args = {args{:}, ['-d' format]};
%compute and set gray colormap
oldcmap = get(H,'Colormap');
newgrays = 0.30*oldcmap(:,1) + 0.59*oldcmap(:,2) + 0.11*oldcmap(:,3);
newcmap = [newgrays newgrays newgrays];
old = LocalPushOldData(old, H, 'Colormap', oldcmap);
set(H, 'Colormap', newcmap);
%compute and set ColorSpec and CData properties
old = LocalUpdateColors(allColor, 'color', old);
old = LocalUpdateColors(allAxes, 'xcolor', old);
old = LocalUpdateColors(allAxes, 'ycolor', old);
old = LocalUpdateColors(allAxes, 'zcolor', old);
old = LocalUpdateColors(allMarker, 'MarkerEdgeColor', old);
old = LocalUpdateColors(allMarker, 'MarkerFaceColor', old);
old = LocalUpdateColors(allEdge, 'EdgeColor', old);
old = LocalUpdateColors(allEdge, 'FaceColor', old);
old = LocalUpdateColors(allCData, 'CData', old);
case 'cmyk'
if strncmp(format,'eps',3)
format = [format 'c'];
args = {args{:}, ['-d' format], '-cmyk'};
else
args = {args{:}, ['-d' format]};
end
otherwise
error('Invalid Color parameter');
end
if (~isempty(renderer))
args = {args{:}, ['-' renderer]};
end
if (~isempty(resolution)) | ~strncmp(format,'eps',3)
if isempty(resolution)
resolution = 0;
end
args = {args{:}, ['-r' int2str(resolution)]};
end
% Process font parameters
if (~isempty(fontmode))
oldfonts = LocalGetAsCell(allFont,'FontSize');
switch (fontmode)
case 'fixed'
oldfontunits = LocalGetAsCell(allFont,'FontUnits');
old = LocalPushOldData(old, allFont, {'FontUnits'}, oldfontunits);
set(allFont,'FontUnits','points');
if (fontsize == -1)
set(allFont,'FontSize',7);
else
set(allFont,'FontSize',fontsize);
end
case 'scaled'
if (fontsize == -1)
wscale = width/figurePos(3);
hscale = height/figurePos(4);
scale = min(wscale, hscale);
else
scale = fontsize;
end
newfonts = LocalScale(oldfonts,scale,5);
set(allFont,{'FontSize'},newfonts);
otherwise
error('Invalid FontMode parameter');
end
% make sure we push the size after the units
old = LocalPushOldData(old, allFont, {'FontSize'}, oldfonts);
end
if strcmp(fontencoding,'adobe') & strncmp(format,'eps',3)
args = {args{:}, '-adobecset'};
end
% Process linewidth parameters
if (~isempty(linemode))
oldlines = LocalGetAsCell(allMarker,'LineWidth');
old = LocalPushOldData(old, allMarker, {'LineWidth'}, oldlines);
switch (linemode)
case 'fixed'
if (linewidth == -1)
set(allMarker,'LineWidth',1);
else
set(allMarker,'LineWidth',linewidth);
end
case 'scaled'
if (linewidth == -1)
wscale = width/figurePos(3);
hscale = height/figurePos(4);
scale = min(wscale, hscale);
else
scale = linewidth;
end
newlines = LocalScale(oldlines, scale, 0.5);
set(allMarker,{'LineWidth'},newlines);
otherwise
error('Invalid LineMode parameter');
end
end
% Export
print(H, filename, args{:});
catch
hadError = 1;
end
% Restore figure settings
for n=1:length(old.objs)
set(old.objs{n}, old.prop{n}, old.values{n});
end
if hadError
error(deblank(lasterr));
end
% Show preview if requested
if showPreview
X = imread(filename,'png');
delete(filename);
f = figure( 'Name', 'Preview', ...
'Menubar', 'none', ...
'NumberTitle', 'off', ...
'Visible', 'off');
image(X);
axis image;
ax = findobj(f, 'type', 'axes');
set(ax, 'Units', get(H,'PaperUnits'), ...
'Position', [0 0 width height], ...
'Visible', 'off');
set(ax, 'Units', 'pixels');
axesPos = get(ax,'Position');
figPos = get(f,'Position');
rootSize = get(0,'ScreenSize');
figPos(3:4) = axesPos(3:4);
if figPos(1) + figPos(3) > rootSize(3)
figPos(1) = rootSize(3) - figPos(3) - 50;
end
if figPos(2) + figPos(4) > rootSize(4)
figPos(2) = rootSize(4) - figPos(4) - 50;
end
set(f, 'Position',figPos, ...
'Visible', 'on');
end
%
% Local Functions
%
function outData = LocalPushOldData(inData, objs, prop, values)
outData.objs = {inData.objs{:}, objs};
outData.prop = {inData.prop{:}, prop};
outData.values = {inData.values{:}, values};
function cellArray = LocalGetAsCell(fig,prop);
cellArray = get(fig,prop);
if (~isempty(cellArray)) & (~iscell(cellArray))
cellArray = {cellArray};
end
function newArray = LocalScale(inArray, scale, minValue)
n = length(inArray);
newArray = cell(n,1);
for k=1:n
newArray{k} = max(minValue,scale*inArray{k}(1));
end
function newArray = LocalMapToGray(inArray);
n = length(inArray);
newArray = cell(n,1);
for k=1:n
color = inArray{k};
if (~isempty(color))
if ischar(color)
switch color(1)
case 'y'
color = [1 1 0];
case 'm'
color = [1 0 1];
case 'c'
color = [0 1 1];
case 'r'
color = [1 0 0];
case 'g'
color = [0 1 0];
case 'b'
color = [0 0 1];
case 'w'
color = [1 1 1];
case 'k'
color = [0 0 0];
otherwise
newArray{k} = color;
end
end
if ~ischar(color)
color = 0.30*color(1) + 0.59*color(2) + 0.11*color(3);
end
end
if isempty(color) | ischar(color)
newArray{k} = color;
else
newArray{k} = [color color color];
end
end
function newArray = LocalMapCData(inArray);
n = length(inArray);
newArray = cell(n,1);
for k=1:n
color = inArray{k};
if (ndims(color) == 3) & isa(color,'double')
gray = 0.30*color(:,:,1) + 0.59*color(:,:,2) + 0.11*color(:,:,3);
color(:,:,1) = gray;
color(:,:,2) = gray;
color(:,:,3) = gray;
end
newArray{k} = color;
end
function outData = LocalUpdateColors(inArray, prop, inData)
value = LocalGetAsCell(inArray,prop);
outData.objs = {inData.objs{:}, inArray};
outData.prop = {inData.prop{:}, {prop}};
outData.values = {inData.values{:}, value};
if (~isempty(value))
if strcmp(prop,'CData')
value = LocalMapCData(value);
else
value = LocalMapToGray(value);
end
set(inArray,{prop},value);
end
function bool = LocalIsPositiveScalar(value)
bool = isnumeric(value) & ...
prod(size(value)) == 1 & ...
value > 0;
function value = LocalToNum(value)
if ischar(value)
value = str2num(value);
end

View File

@@ -0,0 +1,563 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "normal_async_test.h"
#include "typedefs.h"
#include <sstream>
#include <assert.h>
#include <queue>
#include <string.h>
#include "tick_util.h"
using namespace webrtc;
NormalAsyncTest::NormalAsyncTest()
:
_requestKeyFrame(false),
_testNo(1),
_appendNext(false),
_decFrameCnt(0),
_encFrameCnt(0),
_missingFrames(false),
_decodeCompleteTime(0),
_encodeCompleteTime(0),
_rttFrames(0),
_hasReceivedSLI(false),
_hasReceivedPLI(false),
_waitForKey(false),
NormalTest("Async Normal Test 1", "A test of normal execution of the codec",
_testNo)
{
}
NormalAsyncTest::NormalAsyncTest(WebRtc_UWord32 bitRate)
:
_requestKeyFrame(false),
_testNo(1),
_appendNext(false),
_decFrameCnt(0),
_encFrameCnt(0),
_missingFrames(false),
_decodeCompleteTime(0),
_encodeCompleteTime(0),
_rttFrames(0),
_hasReceivedSLI(false),
_hasReceivedPLI(false),
_waitForKey(false),
NormalTest("Async Normal Test 1", "A test of normal execution of the codec",
bitRate, _testNo)
{
}
NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
unsigned int testNo)
:
_requestKeyFrame(false),
_testNo(testNo),
_lengthEncFrame(0),
_appendNext(false),
_decFrameCnt(0),
_encFrameCnt(0),
_missingFrames(false),
_decodeCompleteTime(0),
_encodeCompleteTime(0),
_rttFrames(0),
_hasReceivedSLI(false),
_hasReceivedPLI(false),
_waitForKey(false),
NormalTest(name, description, _testNo)
{
}
NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
WebRtc_UWord32 bitRate, unsigned int testNo)
:
_requestKeyFrame(false),
_testNo(testNo),
_lengthEncFrame(0),
_appendNext(false),
_decFrameCnt(0),
_encFrameCnt(0),
_missingFrames(false),
_decodeCompleteTime(0),
_encodeCompleteTime(0),
_rttFrames(0),
_hasReceivedSLI(false),
_hasReceivedPLI(false),
_waitForKey(false),
NormalTest(name, description, bitRate, _testNo)
{
}
NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
WebRtc_UWord32 bitRate, unsigned int testNo,
unsigned int rttFrames)
:
_requestKeyFrame(false),
_testNo(testNo),
_lengthEncFrame(0),
_appendNext(false),
_decFrameCnt(0),
_encFrameCnt(0),
_missingFrames(false),
_decodeCompleteTime(0),
_encodeCompleteTime(0),
_rttFrames(rttFrames),
_hasReceivedSLI(false),
_hasReceivedPLI(false),
_waitForKey(false),
NormalTest(name, description, bitRate, _testNo)
{
}
void
NormalAsyncTest::Setup()
{
Test::Setup();
std::stringstream ss;
std::string strTestNo;
ss << _testNo;
ss >> strTestNo;
// Check if settings exist. Otherwise use defaults.
if (_outname == "")
{
_outname = "../../out_normaltest" + strTestNo + ".yuv";
}
if (_encodedName == "")
{
_encodedName = "../../encoded_normaltest" + strTestNo + ".yuv";
}
if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
{
printf("Cannot read file %s.\n", _inname.c_str());
exit(1);
}
if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
{
printf("Cannot write encoded file.\n");
exit(1);
}
char mode[3] = "wb";
if (_appendNext)
{
strncpy(mode, "ab", 3);
}
if ((_decodedFile = fopen(_outname.c_str(), mode)) == NULL)
{
printf("Cannot write file %s.\n", _outname.c_str());
exit(1);
}
_appendNext = true;
}
void
NormalAsyncTest::Teardown()
{
Test::Teardown();
fclose(_sourceFile);
fclose(_decodedFile);
}
FrameQueueTuple::~FrameQueueTuple()
{
if (_codecSpecificInfo != NULL)
{
// TODO(holmer): implement virtual function for deleting this and
// remove warnings
delete _codecSpecificInfo;
}
if (_frame != NULL)
{
delete _frame;
}
}
void FrameQueue::PushFrame(TestVideoEncodedBuffer *frame,
void* codecSpecificInfo)
{
WriteLockScoped cs(_queueRWLock);
_frameBufferQueue.push(new FrameQueueTuple(frame, codecSpecificInfo));
}
FrameQueueTuple* FrameQueue::PopFrame()
{
WriteLockScoped cs(_queueRWLock);
if (_frameBufferQueue.empty())
{
return NULL;
}
FrameQueueTuple* tuple = _frameBufferQueue.front();
_frameBufferQueue.pop();
return tuple;
}
bool FrameQueue::Empty()
{
ReadLockScoped cs(_queueRWLock);
return _frameBufferQueue.empty();
}
WebRtc_UWord32 VideoEncodeCompleteCallback::EncodedBytes()
{
return _encodedBytes;
}
WebRtc_Word32
VideoEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
const void* codecSpecificInfo,
const webrtc::RTPFragmentationHeader*
fragmentation)
{
_test.Encoded(encodedImage);
TestVideoEncodedBuffer *newBuffer = new TestVideoEncodedBuffer();
//newBuffer->VerifyAndAllocate(encodedImage._length);
newBuffer->VerifyAndAllocate(encodedImage._size);
_encodedBytes += encodedImage._length;
// If _frameQueue would have been a fixed sized buffer we could have asked
// it for an empty frame and then just do:
// emptyFrame->SwapBuffers(encodedBuffer);
// This is how it should be done in Video Engine to save in on memcpys
void* codecSpecificInfoCopy =
_test.CopyCodecSpecificInfo(codecSpecificInfo);
_test.CopyEncodedImage(*newBuffer, encodedImage, codecSpecificInfoCopy);
if (_encodedFile != NULL)
{
fwrite(newBuffer->GetBuffer(), 1, newBuffer->GetLength(), _encodedFile);
}
_frameQueue->PushFrame(newBuffer, codecSpecificInfoCopy);
return 0;
}
WebRtc_UWord32 VideoDecodeCompleteCallback::DecodedBytes()
{
return _decodedBytes;
}
WebRtc_Word32
VideoDecodeCompleteCallback::Decoded(RawImage& image)
{
_test.Decoded(image);
_decodedBytes += image._length;
if (_decodedFile != NULL)
{
fwrite(image._buffer, 1, image._length, _decodedFile);
}
return 0;
}
WebRtc_Word32
VideoDecodeCompleteCallback::ReceivedDecodedReferenceFrame(
const WebRtc_UWord64 pictureId)
{
return _test.ReceivedDecodedReferenceFrame(pictureId);
}
WebRtc_Word32
VideoDecodeCompleteCallback::ReceivedDecodedFrame(
const WebRtc_UWord64 pictureId)
{
return _test.ReceivedDecodedFrame(pictureId);
}
void
NormalAsyncTest::Encoded(const EncodedImage& encodedImage)
{
_encodeCompleteTime = tGetTime();
_encFrameCnt++;
_totalEncodePipeTime += _encodeCompleteTime -
_encodeTimes[encodedImage._timeStamp];
}
void
NormalAsyncTest::Decoded(const RawImage& decodedImage)
{
_decodeCompleteTime = tGetTime();
_decFrameCnt++;
_totalDecodePipeTime += _decodeCompleteTime -
_decodeTimes[decodedImage._timeStamp];
_decodedWidth = decodedImage._width;
_decodedHeight = decodedImage._height;
}
void
NormalAsyncTest::Perform()
{
_inname = "test/testFiles/foreman_cif.yuv";
CodecSettings(352, 288, 30, _bitRate);
Setup();
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
if(_encoder->InitEncode(&_inst, 1, 1440) < 0)
{
exit(EXIT_FAILURE);
}
_decoder->InitDecode(&_inst, 1);
FrameQueue frameQueue;
VideoEncodeCompleteCallback encCallback(_encodedFile, &frameQueue, *this);
VideoDecodeCompleteCallback decCallback(_decodedFile, *this);
_encoder->RegisterEncodeCompleteCallback(&encCallback);
_decoder->RegisterDecodeCompleteCallback(&decCallback);
if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
{
exit(EXIT_FAILURE);
}
_totalEncodeTime = _totalDecodeTime = 0;
_totalEncodePipeTime = _totalDecodePipeTime = 0;
bool complete = false;
_framecnt = 0;
_encFrameCnt = 0;
_decFrameCnt = 0;
_sumEncBytes = 0;
_lengthEncFrame = 0;
double starttime = tGetTime();
while (!complete)
{
CodecSpecific_InitBitrate();
complete = Encode();
if (!frameQueue.Empty() || complete)
{
while (!frameQueue.Empty())
{
_frameToDecode =
static_cast<FrameQueueTuple *>(frameQueue.PopFrame());
int lost = DoPacketLoss();
if (lost == 2)
{
// Lost the whole frame, continue
_missingFrames = true;
delete _frameToDecode;
_frameToDecode = NULL;
continue;
}
int ret = Decode(lost);
delete _frameToDecode;
_frameToDecode = NULL;
if (ret < 0)
{
fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
exit(EXIT_FAILURE);
}
else if (ret == 0)
{
_framecnt++;
}
else
{
fprintf(stderr,
"\n\nPositive return value from decode!\n\n");
}
}
}
}
double endtime = tGetTime();
double totalExecutionTime = endtime - starttime;
printf("Total execution time: %.1f s\n", totalExecutionTime);
_sumEncBytes = encCallback.EncodedBytes();
double actualBitRate = ActualBitRate(_encFrameCnt) / 1000.0;
double avgEncTime = _totalEncodeTime / _encFrameCnt;
double avgDecTime = _totalDecodeTime / _decFrameCnt;
printf("Actual bitrate: %f kbps\n", actualBitRate);
printf("Average encode time: %.1f ms\n", 1000 * avgEncTime);
printf("Average decode time: %.1f ms\n", 1000 * avgDecTime);
printf("Average encode pipeline time: %.1f ms\n",
1000 * _totalEncodePipeTime / _encFrameCnt);
printf("Average decode pipeline time: %.1f ms\n",
1000 * _totalDecodePipeTime / _decFrameCnt);
printf("Number of encoded frames: %u\n", _encFrameCnt);
printf("Number of decoded frames: %u\n", _decFrameCnt);
(*_log) << "Actual bitrate: " << actualBitRate << " kbps\tTarget: " <<
_bitRate << " kbps" << std::endl;
(*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
(*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
_encoder->Release();
_decoder->Release();
Teardown();
}
bool
NormalAsyncTest::Encode()
{
_lengthEncFrame = 0;
fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile);
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
_inputVideoBuffer.SetTimeStamp((unsigned int)
(_encFrameCnt * 9e4 / _inst.maxFramerate));
_inputVideoBuffer.SetWidth(_inst.width);
_inputVideoBuffer.SetHeight(_inst.height);
RawImage rawImage;
VideoBufferToRawImage(_inputVideoBuffer, rawImage);
if (feof(_sourceFile) != 0)
{
return true;
}
_encodeCompleteTime = 0;
_encodeTimes[rawImage._timeStamp] = tGetTime();
VideoFrameType frameType = kDeltaFrame;
// check SLI queue
_hasReceivedSLI = false;
while (!_signalSLI.empty() && _signalSLI.front().delay == 0)
{
// SLI message has arrived at sender side
_hasReceivedSLI = true;
_pictureIdSLI = _signalSLI.front().id;
_signalSLI.pop_front();
}
// decrement SLI queue times
for (std::list<fbSignal>::iterator it = _signalSLI.begin();
it !=_signalSLI.end(); it++)
{
(*it).delay--;
}
// check PLI queue
_hasReceivedPLI = false;
while (!_signalPLI.empty() && _signalPLI.front().delay == 0)
{
// PLI message has arrived at sender side
_hasReceivedPLI = true;
_signalPLI.pop_front();
}
// decrement PLI queue times
for (std::list<fbSignal>::iterator it = _signalPLI.begin();
it != _signalPLI.end(); it++)
{
(*it).delay--;
}
if (_hasReceivedPLI)
{
// respond to PLI by encoding a key frame
frameType = kKeyFrame;
_hasReceivedPLI = false;
_hasReceivedSLI = false; // don't trigger both at once
}
void* codecSpecificInfo = CreateEncoderSpecificInfo();
int ret = _encoder->Encode(rawImage, codecSpecificInfo, frameType);
if (codecSpecificInfo != NULL)
{
// TODO(holmer): implement virtual function for deleting this and
// remove warnings
delete codecSpecificInfo;
codecSpecificInfo = NULL;
}
if (_encodeCompleteTime > 0)
{
_totalEncodeTime += _encodeCompleteTime -
_encodeTimes[rawImage._timeStamp];
}
else
{
_totalEncodeTime += tGetTime() - _encodeTimes[rawImage._timeStamp];
}
assert(ret >= 0);
return false;
}
int
NormalAsyncTest::Decode(int lossValue)
{
_sumEncBytes += _frameToDecode->_frame->GetLength();
double starttime = 0;
EncodedImage encodedImage;
VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
encodedImage._completeFrame = !lossValue;
_decodeCompleteTime = 0;
_decodeTimes[encodedImage._timeStamp] = tGetTime();
int ret = WEBRTC_VIDEO_CODEC_OK;
if (!_waitForKey || encodedImage._frameType == kKeyFrame)
{
_waitForKey = false;
ret = _decoder->Decode(encodedImage, _missingFrames,
_frameToDecode->_codecSpecificInfo);
if (ret >= 0)
{
_missingFrames = false;
}
}
// check for SLI
if (ret == WEBRTC_VIDEO_CODEC_REQUEST_SLI)
{
// add an SLI feedback to the feedback "queue"
// to be delivered to encoder with _rttFrames delay
_signalSLI.push_back(fbSignal(_rttFrames,
static_cast<WebRtc_UWord8>((_lastDecPictureId) & 0x3f))); // 6 lsb
ret = WEBRTC_VIDEO_CODEC_OK;
}
else if (ret == WEBRTC_VIDEO_CODEC_ERR_REQUEST_SLI)
{
// add an SLI feedback to the feedback "queue"
// to be delivered to encoder with _rttFrames delay
_signalSLI.push_back(fbSignal(_rttFrames,
static_cast<WebRtc_UWord8>((_lastDecPictureId + 1) & 0x3f)));//6 lsb
ret = WEBRTC_VIDEO_CODEC_OK;
}
else if (ret == WEBRTC_VIDEO_CODEC_ERROR)
{
// wait for new key frame
// add an PLI feedback to the feedback "queue"
// to be delivered to encoder with _rttFrames delay
_signalPLI.push_back(fbSignal(_rttFrames, 0 /* picId not used*/));
_waitForKey = true;
ret = WEBRTC_VIDEO_CODEC_OK;
}
if (_decodeCompleteTime > 0)
{
_totalDecodeTime += _decodeCompleteTime -
_decodeTimes[encodedImage._timeStamp];
}
else
{
_totalDecodeTime += tGetTime() - _decodeTimes[encodedImage._timeStamp];
}
return ret;
}
void NormalAsyncTest::CodecSpecific_InitBitrate()
{
if (_bitRate == 0)
{
_encoder->SetRates(600, _inst.maxFramerate);
}
else
{
_encoder->SetRates(_bitRate, _inst.maxFramerate);
}
}
void NormalAsyncTest::CopyEncodedImage(TestVideoEncodedBuffer& dest,
EncodedImage& src,
void* /*codecSpecificInfo*/) const
{
dest.CopyBuffer(src._length, src._buffer);
dest.SetFrameType(src._frameType);
dest.SetCaptureWidth((WebRtc_UWord16)src._encodedWidth);
dest.SetCaptureHeight((WebRtc_UWord16)src._encodedHeight);
dest.SetTimeStamp(src._timeStamp);
}
double
NormalAsyncTest::tGetTime()
{// return time in sec
return ((double) (TickTime::MillisecondTimestamp())/1000);
}

View File

@@ -0,0 +1,184 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_ASYNC_TEST_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_ASYNC_TEST_H_
#include "common_types.h"
#include "normal_test.h"
#include "rw_lock_wrapper.h"
#include <list>
#include <map>
#include <queue>
class FrameQueueTuple
{
public:
FrameQueueTuple(TestVideoEncodedBuffer *frame,
const void* codecSpecificInfo = NULL)
:
_frame(frame),
_codecSpecificInfo(codecSpecificInfo)
{};
~FrameQueueTuple();
TestVideoEncodedBuffer* _frame;
const void* _codecSpecificInfo;
};
class FrameQueue
{
public:
FrameQueue()
:
_queueRWLock(*webrtc::RWLockWrapper::CreateRWLock()),
_prevTS(-1)
{
}
~FrameQueue()
{
delete &_queueRWLock;
}
void PushFrame(TestVideoEncodedBuffer *frame,
void* codecSpecificInfo = NULL);
FrameQueueTuple* PopFrame();
bool Empty();
private:
webrtc::RWLockWrapper& _queueRWLock;
std::queue<FrameQueueTuple *> _frameBufferQueue;
WebRtc_Word64 _prevTS;
};
// feedback signal to encoder
struct fbSignal
{
fbSignal(int d, WebRtc_UWord8 pid) : delay(d), id(pid) {};
int delay;
WebRtc_UWord8 id;
};
class NormalAsyncTest : public NormalTest
{
public:
NormalAsyncTest();
NormalAsyncTest(WebRtc_UWord32 bitRate);
NormalAsyncTest(std::string name, std::string description,
unsigned int testNo);
NormalAsyncTest(std::string name, std::string description,
WebRtc_UWord32 bitRate, unsigned int testNo);
NormalAsyncTest(std::string name, std::string description,
WebRtc_UWord32 bitRate, unsigned int testNo,
unsigned int rttFrames);
virtual ~NormalAsyncTest() {};
virtual void Perform();
virtual void Encoded(const webrtc::EncodedImage& encodedImage);
virtual void Decoded(const webrtc::RawImage& decodedImage);
virtual void*
CopyCodecSpecificInfo(const void* /*codecSpecificInfo */) const
{ return NULL; };
virtual void CopyEncodedImage(TestVideoEncodedBuffer& dest,
webrtc::EncodedImage& src,
void* /*codecSpecificInfo*/) const;
virtual void* CreateEncoderSpecificInfo() const { return NULL; };
virtual WebRtc_Word32
ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId) { return 0;};
virtual WebRtc_Word32
ReceivedDecodedFrame(const WebRtc_UWord64 pictureId) { return 0;};
protected:
virtual void Setup();
virtual void Teardown();
virtual bool Encode();
virtual int Decode(int lossValue = 0);
virtual void CodecSpecific_InitBitrate();
virtual int SetCodecSpecificParameters() {return 0;};
double tGetTime();// return time in sec
FILE* _sourceFile;
FILE* _decodedFile;
WebRtc_UWord32 _decodedWidth;
WebRtc_UWord32 _decodedHeight;
double _totalEncodeTime;
double _totalDecodeTime;
double _decodeCompleteTime;
double _encodeCompleteTime;
double _totalEncodePipeTime;
double _totalDecodePipeTime;
int _framecnt;
int _encFrameCnt;
int _decFrameCnt;
bool _requestKeyFrame;
unsigned int _testNo;
unsigned int _lengthEncFrame;
FrameQueueTuple* _frameToDecode;
bool _appendNext;
std::map<WebRtc_UWord32, double> _encodeTimes;
std::map<WebRtc_UWord32, double> _decodeTimes;
bool _missingFrames;
std::list<fbSignal> _signalSLI;
int _rttFrames;
mutable bool _hasReceivedSLI;
WebRtc_UWord8 _pictureIdSLI;
WebRtc_UWord64 _lastDecPictureId;
std::list<fbSignal> _signalPLI;
bool _hasReceivedPLI;
bool _waitForKey;
};
class VideoEncodeCompleteCallback : public webrtc::EncodedImageCallback
{
public:
VideoEncodeCompleteCallback(FILE* encodedFile, FrameQueue *frameQueue,
NormalAsyncTest& test)
:
_encodedFile(encodedFile),
_frameQueue(frameQueue),
_test(test),
_encodedBytes(0)
{}
WebRtc_Word32
Encoded(webrtc::EncodedImage& encodedImage,
const void* codecSpecificInfo = NULL,
const webrtc::RTPFragmentationHeader* fragmentation = NULL);
WebRtc_UWord32 EncodedBytes();
private:
FILE* _encodedFile;
FrameQueue* _frameQueue;
NormalAsyncTest& _test;
WebRtc_UWord32 _encodedBytes;
};
class VideoDecodeCompleteCallback : public webrtc::DecodedImageCallback
{
public:
VideoDecodeCompleteCallback(FILE* decodedFile, NormalAsyncTest& test)
:
_decodedFile(decodedFile),
_test(test),
_decodedBytes(0)
{}
virtual WebRtc_Word32 Decoded(webrtc::RawImage& decodedImage);
virtual WebRtc_Word32
ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);
WebRtc_UWord32 DecodedBytes();
private:
FILE* _decodedFile;
NormalAsyncTest& _test;
WebRtc_UWord32 _decodedBytes;
};
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_ASYNC_TEST_H_

View File

@@ -0,0 +1,246 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "normal_test.h"
#include <time.h>
#include <sstream>
#include <string.h>
NormalTest::NormalTest()
:
_testNo(1),
_lengthEncFrame(0),
_appendNext(false),
Test("Normal Test 1", "A test of normal execution of the codec")
{
}
NormalTest::NormalTest(std::string name, std::string description, unsigned int testNo)
:
_requestKeyFrame(false),
_testNo(testNo),
_lengthEncFrame(0),
_appendNext(false),
Test(name, description)
{
}
NormalTest::NormalTest(std::string name, std::string description, WebRtc_UWord32 bitRate, unsigned int testNo)
:
_requestKeyFrame(false),
_testNo(testNo),
_lengthEncFrame(0),
_appendNext(false),
Test(name, description, bitRate)
{
}
void
NormalTest::Setup()
{
Test::Setup();
std::stringstream ss;
std::string strTestNo;
ss << _testNo;
ss >> strTestNo;
// Check if settings exist. Otherwise use defaults.
if (_outname == "")
{
_outname = "../../out_normaltest" + strTestNo + ".yuv";
}
if (_encodedName == "")
{
_encodedName = "../../encoded_normaltest" + strTestNo + ".yuv";
}
if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
{
printf("Cannot read file %s.\n", _inname.c_str());
exit(1);
}
if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
{
printf("Cannot write encoded file.\n");
exit(1);
}
char mode[3] = "wb";
if (_appendNext)
{
strncpy(mode, "ab", 3);
}
if ((_decodedFile = fopen(_outname.c_str(), mode)) == NULL)
{
printf("Cannot write file %s.\n", _outname.c_str());
exit(1);
}
_appendNext = true;
}
void
NormalTest::Teardown()
{
Test::Teardown();
fclose(_sourceFile);
fclose(_decodedFile);
}
void
NormalTest::Perform()
{
_inname = "../../../../testFiles/foreman.yuv";
CodecSettings(352, 288, 30, _bitRate);
Setup();
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_encoder->InitEncode(&_inst, 1, 1460);
CodecSpecific_InitBitrate();
_decoder->InitDecode(&_inst,1);
_totalEncodeTime = _totalDecodeTime = 0;
_framecnt = 0;
_sumEncBytes = 0;
_lengthEncFrame = 0;
int decodeLength = 0;
while (!Encode())
{
DoPacketLoss();
_encodedVideoBuffer.UpdateLength(_encodedVideoBuffer.GetLength());
fwrite(_encodedVideoBuffer.GetBuffer(), 1, _encodedVideoBuffer.GetLength(), _encodedFile);
decodeLength = Decode();
if (decodeLength < 0)
{
fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
exit(EXIT_FAILURE);
}
fwrite(_decodedVideoBuffer.GetBuffer(), 1, decodeLength, _decodedFile);
CodecSpecific_InitBitrate();
_framecnt++;
}
// Ensure we empty the decoding queue.
while (decodeLength > 0)
{
decodeLength = Decode();
if (decodeLength < 0)
{
fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
exit(EXIT_FAILURE);
}
fwrite(_decodedVideoBuffer.GetBuffer(), 1, decodeLength, _decodedFile);
}
double actualBitRate = ActualBitRate(_framecnt) / 1000.0;
double avgEncTime = _totalEncodeTime / _framecnt;
double avgDecTime = _totalDecodeTime / _framecnt;
printf("Actual bitrate: %f kbps\n", actualBitRate);
printf("Average encode time: %f s\n", avgEncTime);
printf("Average decode time: %f s\n", avgDecTime);
(*_log) << "Actual bitrate: " << actualBitRate << " kbps\tTarget: " << _bitRate << " kbps" << std::endl;
(*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
(*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
_inputVideoBuffer.Free();
_encodedVideoBuffer.Reset();
_decodedVideoBuffer.Free();
_encoder->Release();
_decoder->Release();
Teardown();
}
bool
NormalTest::Encode()
{
_lengthEncFrame = 0;
fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile);
if (feof(_sourceFile) != 0)
{
return true;
}
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
_inputVideoBuffer.SetTimeStamp(_framecnt);
// This multiple attempt ridiculousness is to accomodate VP7:
// 1. The wrapper can unilaterally reduce the framerate for low bitrates.
// 2. The codec inexplicably likes to reject some frames. Perhaps there
// is a good reason for this...
int encodingAttempts = 0;
double starttime = 0;
double endtime = 0;
while (_lengthEncFrame == 0)
{
starttime = clock()/(double)CLOCKS_PER_SEC;
_inputVideoBuffer.SetWidth(_inst.width);
_inputVideoBuffer.SetHeight(_inst.height);
//_lengthEncFrame = _encoder->Encode(_inputVideoBuffer, _encodedVideoBuffer, _frameInfo,
// _inst.frameRate, _requestKeyFrame && !(_framecnt%50));
endtime = clock()/(double)CLOCKS_PER_SEC;
_encodedVideoBuffer.SetCaptureHeight(_inst.height);
_encodedVideoBuffer.SetCaptureWidth(_inst.width);
if (_lengthEncFrame < 0)
{
(*_log) << "Error in encoder: " << _lengthEncFrame << std::endl;
fprintf(stderr,"\n\nError in encoder: %d\n\n", _lengthEncFrame);
exit(EXIT_FAILURE);
}
_sumEncBytes += _lengthEncFrame;
encodingAttempts++;
if (encodingAttempts > 50)
{
(*_log) << "Unable to encode frame: " << _framecnt << std::endl;
fprintf(stderr,"\n\nUnable to encode frame: %d\n\n", _framecnt);
exit(EXIT_FAILURE);
}
}
_totalEncodeTime += endtime - starttime;
if (encodingAttempts > 1)
{
(*_log) << encodingAttempts << " attempts required to encode frame: " <<
_framecnt + 1 << std::endl;
fprintf(stderr,"\n%d attempts required to encode frame: %d\n", encodingAttempts,
_framecnt + 1);
}
return false;
}
int
NormalTest::Decode()
{
double starttime = clock()/(double)CLOCKS_PER_SEC;
_encodedVideoBuffer.SetWidth(_inst.width);
_encodedVideoBuffer.SetHeight(_inst.height);
int lengthDecFrame = 0;
//int lengthDecFrame = _decoder->Decode(_encodedVideoBuffer, _decodedVideoBuffer);
//_totalDecodeTime += (double)((clock()/(double)CLOCKS_PER_SEC) - starttime);
if (lengthDecFrame < 0)
{
return lengthDecFrame;
}
_encodedVideoBuffer.Reset();
_encodedVideoBuffer.UpdateLength(0);
return lengthDecFrame;
}

View File

@@ -0,0 +1,46 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_TEST_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_TEST_H_
#include "test.h"
class NormalTest : public Test
{
public:
NormalTest();
NormalTest(std::string name, std::string description, unsigned int testNo);
NormalTest(std::string name, std::string description, WebRtc_UWord32 bitRate, unsigned int testNo);
virtual ~NormalTest() {};
virtual void Perform();
protected:
virtual void Setup();
virtual void Teardown();
virtual bool Encode();
virtual int Decode();
virtual void CodecSpecific_InitBitrate()=0;
virtual int DoPacketLoss() {return 0;};
FILE* _sourceFile;
FILE* _decodedFile;
FILE* _encodedFile;
double _totalEncodeTime;
double _totalDecodeTime;
unsigned int _framecnt;
bool _requestKeyFrame;
unsigned int _testNo;
int _lengthEncFrame;
bool _appendNext;
};
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_TEST_H_

View File

@@ -0,0 +1,244 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "packet_loss_test.h"
#include "video_source.h"
#include <sstream>
#include <cassert>
#include <string.h>
using namespace webrtc;
PacketLossTest::PacketLossTest()
:
_lossRate(0.1),
_lossProbability(0.1),
_lastFrame(NULL),
_lastFrameLength(0),
NormalAsyncTest("PacketLossTest", "Encode, remove lost packets, decode", 300, 5)
{
}
PacketLossTest::PacketLossTest(std::string name, std::string description)
:
_lossRate(0.1),
_lossProbability(0.1),
_lastFrame(NULL),
_lastFrameLength(0),
NormalAsyncTest(name, description, 300, 5)
{
}
PacketLossTest::PacketLossTest(std::string name, std::string description, double lossRate, bool useNack, unsigned int rttFrames /* = 0*/)
:
_lossRate(lossRate),
_lastFrame(NULL),
_lastFrameLength(0),
NormalAsyncTest(name, description, 300, 5, rttFrames)
{
assert(lossRate >= 0 && lossRate <= 1);
if (useNack)
{
_lossProbability = 0;
}
else
{
_lossProbability = lossRate;
}
}
void
PacketLossTest::Encoded(const EncodedImage& encodedImage)
{
// push timestamp to queue
_frameQueue.push_back(encodedImage._timeStamp);
NormalAsyncTest::Encoded(encodedImage);
}
void
PacketLossTest::Decoded(const RawImage& decodedImage)
{
// check the frame queue if any frames have gone missing
assert(!_frameQueue.empty()); // decoded frame is not in the queue
while(_frameQueue.front() < decodedImage._timeStamp)
{
// this frame is missing
// write previous decoded frame again (frame freeze)
if (_decodedFile && _lastFrame)
{
fwrite(_lastFrame, 1, _lastFrameLength, _decodedFile);
}
// remove frame from queue
_frameQueue.pop_front();
}
assert(_frameQueue.front() == decodedImage._timeStamp); // decoded frame is not in the queue
// pop the current frame
_frameQueue.pop_front();
// save image for future freeze-frame
if (_lastFrameLength < decodedImage._length)
{
if (_lastFrame) delete [] _lastFrame;
_lastFrame = new WebRtc_UWord8[decodedImage._length];
}
memcpy(_lastFrame, decodedImage._buffer, decodedImage._length);
_lastFrameLength = decodedImage._length;
NormalAsyncTest::Decoded(decodedImage);
}
void
PacketLossTest::Teardown()
{
if (_totalKept + _totalThrown > 0)
{
printf("Target packet loss rate: %.4f\n", _lossProbability);
printf("Actual packet loss rate: %.4f\n", (_totalThrown * 1.0f) / (_totalKept + _totalThrown));
printf("Channel rate: %.2f kbps\n",
0.001 * 8.0 * _sumChannelBytes / ((_framecnt * 1.0f) / _inst.maxFramerate));
}
else
{
printf("No packet losses inflicted\n");
}
NormalAsyncTest::Teardown();
}
void
PacketLossTest::Setup()
{
const VideoSource source(_inname, _inst.width, _inst.height, _inst.maxFramerate);
std::stringstream ss;
std::string lossRateStr;
ss << _lossRate;
ss >> lossRateStr;
_encodedName = "../../" + source.GetName() + "-" + lossRateStr;
_outname = "../../out-" + source.GetName() + "-" + lossRateStr;
if (_lossProbability != _lossRate)
{
_encodedName += "-nack";
_outname += "-nack";
}
_encodedName += ".vp8";
_outname += ".yuv";
_totalKept = 0;
_totalThrown = 0;
_sumChannelBytes = 0;
NormalAsyncTest::Setup();
}
void
PacketLossTest::CodecSpecific_InitBitrate()
{
assert(_bitRate > 0);
WebRtc_UWord32 simulatedBitRate;
if (_lossProbability != _lossRate)
{
// Simulating NACK
simulatedBitRate = WebRtc_UWord32(_bitRate / (1 + _lossRate));
}
else
{
simulatedBitRate = _bitRate;
}
_encoder->SetPacketLoss((WebRtc_UWord32)(_lossProbability * 255.0));
_encoder->SetRates(simulatedBitRate, _inst.maxFramerate);
}
int PacketLossTest::DoPacketLoss()
{
// Only packet loss for delta frames
if (_frameToDecode->_frame->GetLength() == 0 || _frameToDecode->_frame->GetFrameType() != kDeltaFrame)
{
_sumChannelBytes += _frameToDecode->_frame->GetLength();
return 0;
}
//printf("Encoded: %d bytes\n", _encodedVideoBuffer.GetLength());
unsigned char *packet = NULL;
TestVideoEncodedBuffer newEncBuf;
newEncBuf.VerifyAndAllocate(_lengthSourceFrame);
_inBufIdx = 0;
_outBufIdx = 0;
int size = 1;
int kept = 0;
int thrown = 0;
int count = 0;
while ((size = NextPacket(1500, &packet)) > 0)
{
if (!PacketLoss(_lossProbability))
{
InsertPacket(&newEncBuf, packet, size);
kept++;
}
else
{
// Use the ByteLoss function if you want to lose only
// parts of a packet, and not the whole packet.
//int size2 = ByteLoss(size, packet, 15);
thrown++;
//if (size2 != size)
//{
// InsertPacket(&newEncBuf, packet, size2);
//}
}
}
int lossResult = (thrown!=0); // 0 = no loss 1 = loss(es)
if (lossResult)
{
lossResult += (kept==0); // 2 = all lost = full frame
}
_frameToDecode->_frame->CopyBuffer(newEncBuf.GetLength(), newEncBuf.GetBuffer());
_sumChannelBytes += newEncBuf.GetLength();
_totalKept += kept;
_totalThrown += thrown;
return lossResult;
//printf("Threw away: %d out of %d packets\n", thrown, thrown + kept);
//printf("Encoded left: %d bytes\n", _encodedVideoBuffer.GetLength());
}
int PacketLossTest::NextPacket(int mtu, unsigned char **pkg)
{
unsigned char *buf = _frameToDecode->_frame->GetBuffer();
*pkg = buf + _inBufIdx;
if (static_cast<long>(_frameToDecode->_frame->GetLength()) - _inBufIdx <= mtu)
{
int size = _frameToDecode->_frame->GetLength() - _inBufIdx;
_inBufIdx = _frameToDecode->_frame->GetLength();
return size;
}
_inBufIdx += mtu;
return mtu;
}
int PacketLossTest::ByteLoss(int size, unsigned char *pkg, int bytesToLose)
{
return size;
}
void PacketLossTest::InsertPacket(TestVideoEncodedBuffer *buf, unsigned char *pkg, int size)
{
if (static_cast<long>(buf->GetSize()) - _outBufIdx < size)
{
printf("InsertPacket error!\n");
return;
}
memcpy(buf->GetBuffer() + _outBufIdx, pkg, size);
buf->UpdateLength(buf->GetLength() + size);
_outBufIdx += size;
}

View File

@@ -0,0 +1,59 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PACKET_LOSS_TEST_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PACKET_LOSS_TEST_H_
#include <list>
#include "normal_async_test.h"
class PacketLossTest : public NormalAsyncTest
{
public:
PacketLossTest();
virtual ~PacketLossTest() {if(_lastFrame) {delete [] _lastFrame; _lastFrame = NULL;}}
virtual void Encoded(const webrtc::EncodedImage& encodedImage);
virtual void Decoded(const webrtc::RawImage& decodedImage);
protected:
PacketLossTest(std::string name, std::string description);
PacketLossTest(std::string name,
std::string description,
double lossRate,
bool useNack,
unsigned int rttFrames = 0);
virtual void Setup();
virtual void Teardown();
virtual void CodecSpecific_InitBitrate();
virtual int DoPacketLoss();
virtual int NextPacket(int size, unsigned char **pkg);
virtual int ByteLoss(int size, unsigned char *pkg, int bytesToLose);
virtual void InsertPacket(TestVideoEncodedBuffer *buf, unsigned char *pkg, int size);
int _inBufIdx;
int _outBufIdx;
// When NACK is being simulated _lossProbabilty is zero,
// otherwise it is set equal to _lossRate.
// Desired channel loss rate.
double _lossRate;
// Probability used to simulate packet drops.
double _lossProbability;
int _totalKept;
int _totalThrown;
int _sumChannelBytes;
std::list<WebRtc_UWord32> _frameQueue;
WebRtc_UWord8* _lastFrame;
WebRtc_UWord32 _lastFrameLength;
};
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PACKET_LOSS_TEST_H_

View File

@@ -0,0 +1,290 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "performance_test.h"
#include "tick_util.h"
#include <assert.h>
using namespace webrtc;
#define NUM_FRAMES 300
PerformanceTest::PerformanceTest(WebRtc_UWord32 bitRate)
:
_numCodecs(0),
_tests(NULL),
_encoders(NULL),
_decoders(NULL),
_threads(NULL),
_rawImageLock(NULL),
_encodeEvents(new EventWrapper*[1]),
_stopped(true),
_encodeCompleteCallback(NULL),
_decodeCompleteCallback(NULL),
NormalAsyncTest(bitRate)
{
}
PerformanceTest::PerformanceTest(WebRtc_UWord32 bitRate, WebRtc_UWord8 numCodecs)
:
_numCodecs(numCodecs),
_tests(new PerformanceTest*[_numCodecs]),
_encoders(new VideoEncoder*[_numCodecs]),
_decoders(new VideoDecoder*[_numCodecs]),
_threads(new ThreadWrapper*[_numCodecs]),
_rawImageLock(RWLockWrapper::CreateRWLock()),
_encodeEvents(new EventWrapper*[_numCodecs]),
_stopped(true),
_encodeCompleteCallback(NULL),
_decodeCompleteCallback(NULL),
NormalAsyncTest(bitRate)
{
for (int i=0; i < _numCodecs; i++)
{
_tests[i] = new PerformanceTest(bitRate);
_encodeEvents[i] = EventWrapper::Create();
}
}
PerformanceTest::~PerformanceTest()
{
if (_encoders != NULL)
{
delete [] _encoders;
}
if (_decoders != NULL)
{
delete [] _decoders;
}
if (_tests != NULL)
{
delete [] _tests;
}
if (_threads != NULL)
{
delete [] _threads;
}
if (_rawImageLock != NULL)
{
delete _rawImageLock;
}
if (_encodeEvents != NULL)
{
delete [] _encodeEvents;
}
}
void
PerformanceTest::Setup()
{
_inname = "../../../../testFiles/foreman.yuv";
NormalAsyncTest::Setup(); // Setup input and output files
CodecSettings(352, 288, 30, _bitRate); // common to all codecs
for (int i=0; i < _numCodecs; i++)
{
_encoders[i] = CreateEncoder();
_decoders[i] = CreateDecoder();
if (_encoders[i] == NULL)
{
printf("Must create a codec specific test!\n");
exit(EXIT_FAILURE);
}
if(_encoders[i]->InitEncode(&_inst, 4, 1440) < 0)
{
exit(EXIT_FAILURE);
}
if (_decoders[i]->InitDecode(&_inst, 1))
{
exit(EXIT_FAILURE);
}
_tests[i]->SetEncoder(_encoders[i]);
_tests[i]->SetDecoder(_decoders[i]);
_tests[i]->_rawImageLock = _rawImageLock;
_encodeEvents[i]->Reset();
_tests[i]->_encodeEvents[0] = _encodeEvents[i];
_tests[i]->_inst = _inst;
_threads[i] = ThreadWrapper::CreateThread(PerformanceTest::RunThread, _tests[i]);
unsigned int id = 0;
_tests[i]->_stopped = false;
_threads[i]->Start(id);
}
}
void
PerformanceTest::Perform()
{
Setup();
EventWrapper& sleepEvent = *EventWrapper::Create();
const WebRtc_Word64 startTime = TickTime::MillisecondTimestamp();
for (int i=0; i < NUM_FRAMES; i++)
{
{
// Read a new frame from file
WriteLockScoped imageLock(*_rawImageLock);
_lengthEncFrame = 0;
fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile);
if (feof(_sourceFile) != 0)
{
rewind(_sourceFile);
}
_inputVideoBuffer.VerifyAndAllocate(_inst.width*_inst.height*3/2);
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
_inputVideoBuffer.SetTimeStamp((unsigned int) (_encFrameCnt * 9e4 / static_cast<float>(_inst.maxFramerate)));
_inputVideoBuffer.SetWidth(_inst.width);
_inputVideoBuffer.SetHeight(_inst.height);
for (int i=0; i < _numCodecs; i++)
{
_tests[i]->_inputVideoBuffer.CopyPointer(_inputVideoBuffer);
_encodeEvents[i]->Set();
}
}
if (i < NUM_FRAMES - 1)
{
sleepEvent.Wait(33);
}
}
for (int i=0; i < _numCodecs; i++)
{
_tests[i]->_stopped = true;
_encodeEvents[i]->Set();
_threads[i]->Stop();
}
const WebRtc_UWord32 totalTime =
static_cast<WebRtc_UWord32>(TickTime::MillisecondTimestamp() - startTime);
printf("Total time: %u\n", totalTime);
delete &sleepEvent;
Teardown();
}
void PerformanceTest::Teardown()
{
if (_encodeCompleteCallback != NULL)
{
delete _encodeCompleteCallback;
}
if (_decodeCompleteCallback != NULL)
{
delete _decodeCompleteCallback;
}
// main test only, all others have numCodecs = 0:
if (_numCodecs > 0)
{
WriteLockScoped imageLock(*_rawImageLock);
_inputVideoBuffer.Free();
NormalAsyncTest::Teardown();
}
for (int i=0; i < _numCodecs; i++)
{
_encoders[i]->Release();
delete _encoders[i];
_decoders[i]->Release();
delete _decoders[i];
_tests[i]->_inputVideoBuffer.ClearPointer();
_tests[i]->_rawImageLock = NULL;
_tests[i]->Teardown();
delete _tests[i];
delete _encodeEvents[i];
delete _threads[i];
}
}
bool
PerformanceTest::RunThread(void* obj)
{
PerformanceTest& test = *static_cast<PerformanceTest*>(obj);
return test.PerformSingleTest();
}
bool
PerformanceTest::PerformSingleTest()
{
if (_encodeCompleteCallback == NULL)
{
_encodeCompleteCallback = new VideoEncodeCompleteCallback(NULL, &_frameQueue, *this);
_encoder->RegisterEncodeCompleteCallback(_encodeCompleteCallback);
}
if (_decodeCompleteCallback == NULL)
{
_decodeCompleteCallback = new VideoDecodeCompleteCallback(NULL, *this);
_decoder->RegisterDecodeCompleteCallback(_decodeCompleteCallback);
}
(*_encodeEvents)->Wait(WEBRTC_EVENT_INFINITE); // The first event is used for every single test
CodecSpecific_InitBitrate();
bool complete = false;
{
ReadLockScoped imageLock(*_rawImageLock);
complete = Encode();
}
if (!_frameQueue.Empty() || complete)
{
while (!_frameQueue.Empty())
{
_frameToDecode = static_cast<FrameQueueTuple *>(_frameQueue.PopFrame());
int lost = DoPacketLoss();
if (lost == 2)
{
// Lost the whole frame, continue
_missingFrames = true;
delete _frameToDecode;
_frameToDecode = NULL;
continue;
}
int ret = Decode(lost);
delete _frameToDecode;
_frameToDecode = NULL;
if (ret < 0)
{
fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
return false;
}
else if (ret < 0)
{
fprintf(stderr, "\n\nPositive return value from decode!\n\n");
return false;
}
}
}
if (_stopped)
{
return false;
}
return true;
}
bool PerformanceTest::Encode()
{
RawImage rawImage;
VideoBufferToRawImage(_inputVideoBuffer, rawImage);
VideoFrameType frameType = kDeltaFrame;
if (_requestKeyFrame && !(_encFrameCnt%50))
{
frameType = kKeyFrame;
}
void* codecSpecificInfo = CreateEncoderSpecificInfo();
int ret = _encoder->Encode(rawImage, codecSpecificInfo, frameType);
if (codecSpecificInfo != NULL)
{
// TODO(holmer): implement virtual function for deleting this and remove warnings
delete codecSpecificInfo;
codecSpecificInfo = NULL;
}
assert(ret >= 0);
return false;
}
int PerformanceTest::Decode(int lossValue)
{
EncodedImage encodedImage;
VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
encodedImage._completeFrame = !lossValue;
int ret = _decoder->Decode(encodedImage, _missingFrames, _frameToDecode->_codecSpecificInfo);
_missingFrames = false;
return ret;
}

View File

@@ -0,0 +1,54 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PERFORMANCE_TEST_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PERFORMANCE_TEST_H_
#include "normal_async_test.h"
#include "thread_wrapper.h"
#include "rw_lock_wrapper.h"
#include "event_wrapper.h"
class PerformanceTest : public NormalAsyncTest
{
public:
PerformanceTest(WebRtc_UWord32 bitRate, WebRtc_UWord8 numCodecs);
virtual ~PerformanceTest();
virtual void Perform();
virtual void Print() {};
protected:
PerformanceTest(WebRtc_UWord32 bitRate);
virtual void Setup();
virtual bool Encode();
virtual int Decode(int lossValue = 0);
virtual void Teardown();
static bool RunThread(void* obj);
bool PerformSingleTest();
virtual webrtc::VideoEncoder* CreateEncoder() const { return NULL; };
virtual webrtc::VideoDecoder* CreateDecoder() const { return NULL; };
WebRtc_UWord8 _numCodecs;
PerformanceTest** _tests;
webrtc::VideoEncoder** _encoders;
webrtc::VideoDecoder** _decoders;
webrtc::ThreadWrapper** _threads;
webrtc::RWLockWrapper* _rawImageLock;
webrtc::EventWrapper** _encodeEvents;
FrameQueue _frameQueue;
bool _stopped;
webrtc::EncodedImageCallback* _encodeCompleteCallback;
webrtc::DecodedImageCallback* _decodeCompleteCallback;
FILE* _outFile;
};
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PERFORMANCE_TEST_H_

View File

@@ -0,0 +1,427 @@
function plotBenchmark(fileNames, export)
%PLOTBENCHMARK Plots and exports video codec benchmarking results.
% PLOTBENCHMARK(FILENAMES, EXPORT) parses the video codec benchmarking result
% files given by the cell array of strings FILENAME. It plots the results and
% optionally exports each plot to an appropriately named file.
%
% EXPORT parameter:
% 'none' No file exports.
% 'eps' Exports to eps files (default).
% 'pdf' Exports to eps files and uses the command-line utility
% epstopdf to obtain pdf files.
%
% Example:
% plotBenchmark({'H264Benchmark.txt' 'LSVXBenchmark.txt'}, 'pdf')
if (nargin < 1)
error('Too few input arguments');
elseif (nargin < 2)
export = 'eps';
end
if ~iscell(fileNames)
if ischar(fileNames)
% one single file name as a string is ok
if size(fileNames,1) > 1
% this is a char matrix, not ok
error('First argument must not be a char matrix');
end
% wrap in a cell array
fileNames = {fileNames};
else
error('First argument must be a cell array of strings');
end
end
if ~ischar(export)
error('Second argument must be a string');
end
outpath = 'BenchmarkPlots';
[status, errMsg] = mkdir(outpath);
if status == 0
error(errMsg);
end
nCases = 0;
testCases = [];
% Read each test result file
for fileIdx = 1:length(fileNames)
if ~isstr(fileNames{fileIdx})
error('First argument must be a cell array of strings');
end
fid = fopen(fileNames{fileIdx}, 'rt');
if fid == -1
error(['Unable to open ' fileNames{fileIdx}]);
end
version = '1.0';
if ~strcmp(fgetl(fid), ['#!benchmark' version])
fclose(fid);
error(['Requires benchmark file format version ' version]);
end
% Parse results file into testCases struct
codec = fgetl(fid);
tline = fgetl(fid);
while(tline ~= -1)
nCases = nCases + 1;
delim = strfind(tline, ',');
name = tline(1:delim(1)-1);
% Drop underscored suffix from name
underscore = strfind(name, '_');
if ~isempty(underscore)
name = name(1:underscore(1)-1);
end
resolution = tline(delim(1)+1:delim(2)-1);
frameRate = tline(delim(2)+1:end);
tline = fgetl(fid);
delim = strfind(tline, ',');
bitrateLabel = tline(1:delim(1)-1);
bitrate = sscanf(tline(delim(1):end),',%f');
tline = fgetl(fid);
delim = strfind(tline, ',');
psnrLabel = tline(1:delim(1)-1);
psnr = sscanf(tline(delim(1):end),',%f');
% Default data for the optional lines
speedLabel = 'Default';
speed = 0;
ssimLabel = 'Default';
ssim = 0;
tline = fgetl(fid);
delim = strfind(tline, ',');
while ~isempty(delim)
% More data
% Check type of data
if strncmp(lower(tline), 'speed', 5)
% Speed data included
speedLabel = tline(1:delim(1)-1);
speed = sscanf(tline(delim(1):end), ',%f');
tline = fgetl(fid);
elseif strncmp(lower(tline), 'encode time', 11)
% Encode and decode times included
% TODO: take care of the data
% pop two lines from file
tline = fgetl(fid);
tline = fgetl(fid);
elseif strncmp(tline, 'SSIM', 4)
% SSIM data included
ssimLabel = tline(1:delim(1)-1);
ssim = sscanf(tline(delim(1):end), ',%f');
tline = fgetl(fid);
end
delim = strfind(tline, ',');
end
testCases = [testCases struct('codec', codec, 'name', name, 'resolution', ...
resolution, 'frameRate', frameRate, 'bitrate', bitrate, 'psnr', psnr, ...
'speed', speed, 'bitrateLabel', bitrateLabel, 'psnrLabel', psnrLabel, ...
'speedLabel', speedLabel, ...
'ssim', ssim, 'ssimLabel', ssimLabel)];
tline = fgetl(fid);
end
fclose(fid);
end
i = 0;
casesPsnr = testCases;
while ~isempty(casesPsnr)
i = i + 1;
casesPsnr = plotOnePsnr(casesPsnr, i, export, outpath);
end
casesSSIM = testCases;
while ~isempty(casesSSIM)
i = i + 1;
casesSSIM = plotOneSSIM(casesSSIM, i, export, outpath);
end
casesSpeed = testCases;
while ~isempty(casesSpeed)
if casesSpeed(1).speed == 0
casesSpeed = casesSpeed(2:end);
else
i = i + 1;
casesSpeed = plotOneSpeed(casesSpeed, i, export, outpath);
end
end
%%%%%%%%%%%%%%%%%%
%% SUBFUNCTIONS %%
%%%%%%%%%%%%%%%%%%
function casesOut = plotOnePsnr(cases, num, export, outpath)
% Find matching specs
plotIdx = 1;
for i = 2:length(cases)
if strcmp(cases(1).resolution, cases(i).resolution) & ...
strcmp(cases(1).frameRate, cases(i).frameRate)
plotIdx = [plotIdx i];
end
end
% Return unplotted cases
casesOut = cases(setdiff(1:length(cases), plotIdx));
cases = cases(plotIdx);
% Prune similar results
for i = 1:length(cases)
simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
while ~isempty(simIndx)
diffIndx = setdiff(1:length(cases(i).bitrate), simIndx);
cases(i).psnr = cases(i).psnr(diffIndx);
cases(i).bitrate = cases(i).bitrate(diffIndx);
simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
end
end
% Prepare figure with axis labels and so on
hFig = figure(num);
clf;
hold on;
grid on;
axis([0 1100 20 50]);
set(gca, 'XTick', 0:200:1000);
set(gca, 'YTick', 20:10:60);
xlabel(cases(1).bitrateLabel);
ylabel(cases(1).psnrLabel);
res = cases(1).resolution;
frRate = cases(1).frameRate;
title([res ', ' frRate]);
hLines = [];
codecs = {};
sequences = {};
i = 0;
while ~isempty(cases)
i = i + 1;
[cases, hLine, codec, sequences] = plotOneCodec(cases, 'bitrate', 'psnr', i, sequences, 1);
% Stored to generate the legend
hLines = [hLines ; hLine];
codecs = {codecs{:} codec};
end
legend(hLines, codecs, 4);
hold off;
if ~strcmp(export, 'none')
% Export figure to an eps file
res = stripws(res);
frRate = stripws(frRate);
exportName = [outpath '/psnr-' res '-' frRate];
exportfig(hFig, exportName, 'Format', 'eps2', 'Color', 'cmyk');
end
if strcmp(export, 'pdf')
% Use the epstopdf utility to convert to pdf
system(['epstopdf ' exportName '.eps']);
end
function casesOut = plotOneSSIM(cases, num, export, outpath)
% Find matching specs
plotIdx = 1;
for i = 2:length(cases)
if strcmp(cases(1).resolution, cases(i).resolution) & ...
strcmp(cases(1).frameRate, cases(i).frameRate)
plotIdx = [plotIdx i];
end
end
% Return unplotted cases
casesOut = cases(setdiff(1:length(cases), plotIdx));
cases = cases(plotIdx);
% Prune similar results
for i = 1:length(cases)
simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
while ~isempty(simIndx)
diffIndx = setdiff(1:length(cases(i).bitrate), simIndx);
cases(i).ssim = cases(i).ssim(diffIndx);
cases(i).bitrate = cases(i).bitrate(diffIndx);
simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
end
end
% Prepare figure with axis labels and so on
hFig = figure(num);
clf;
hold on;
grid on;
axis([0 1100 0.5 1]); % y-limit are set to 'auto' below
set(gca, 'XTick', 0:200:1000);
%set(gca, 'YTick', 20:10:60);
xlabel(cases(1).bitrateLabel);
ylabel(cases(1).ssimLabel);
res = cases(1).resolution;
frRate = cases(1).frameRate;
title([res ', ' frRate]);
hLines = [];
codecs = {};
sequences = {};
i = 0;
while ~isempty(cases)
i = i + 1;
[cases, hLine, codec, sequences] = plotOneCodec(cases, 'bitrate', 'ssim', i, sequences, 1);
% Stored to generate the legend
hLines = [hLines ; hLine];
codecs = {codecs{:} codec};
end
%set(gca,'YLimMode','auto')
set(gca,'YLim',[0.5 1])
set(gca,'YScale','log')
legend(hLines, codecs, 4);
hold off;
if ~strcmp(export, 'none')
% Export figure to an eps file
res = stripws(res);
frRate = stripws(frRate);
exportName = [outpath '/psnr-' res '-' frRate];
exportfig(hFig, exportName, 'Format', 'eps2', 'Color', 'cmyk');
end
if strcmp(export, 'pdf')
% Use the epstopdf utility to convert to pdf
system(['epstopdf ' exportName '.eps']);
end
function casesOut = plotOneSpeed(cases, num, export, outpath)
% Find matching specs
plotIdx = 1;
for i = 2:length(cases)
if strcmp(cases(1).resolution, cases(i).resolution) & ...
strcmp(cases(1).frameRate, cases(i).frameRate) & ...
strcmp(cases(1).name, cases(i).name)
plotIdx = [plotIdx i];
end
end
% Return unplotted cases
casesOut = cases(setdiff(1:length(cases), plotIdx));
cases = cases(plotIdx);
% Prune similar results
for i = 1:length(cases)
simIndx = find(abs(cases(i).psnr - [cases(i).psnr(2:end) ; 0]) < 0.25);
while ~isempty(simIndx)
diffIndx = setdiff(1:length(cases(i).psnr), simIndx);
cases(i).psnr = cases(i).psnr(diffIndx);
cases(i).speed = cases(i).speed(diffIndx);
simIndx = find(abs(cases(i).psnr - [cases(i).psnr(2:end) ; 0]) < 0.25);
end
end
hFig = figure(num);
clf;
hold on;
%grid on;
xlabel(cases(1).psnrLabel);
ylabel(cases(1).speedLabel);
res = cases(1).resolution;
name = cases(1).name;
frRate = cases(1).frameRate;
title([name ', ' res ', ' frRate]);
hLines = [];
codecs = {};
sequences = {};
i = 0;
while ~isempty(cases)
i = i + 1;
[cases, hLine, codec, sequences] = plotOneCodec(cases, 'psnr', 'speed', i, sequences, 0);
% Stored to generate the legend
hLines = [hLines ; hLine];
codecs = {codecs{:} codec};
end
legend(hLines, codecs, 1);
hold off;
if ~strcmp(export, 'none')
% Export figure to an eps file
res = stripws(res);
frRate = stripws(frRate);
exportName = [outpath '/speed-' name '-' res '-' frRate];
exportfig(hFig, exportName, 'Format', 'eps2', 'Color', 'cmyk');
end
if strcmp(export, 'pdf')
% Use the epstopdf utility to convert to pdf
system(['epstopdf ' exportName '.eps']);
end
function [casesOut, hLine, codec, sequences] = plotOneCodec(cases, xfield, yfield, num, sequences, annotatePlot)
plotStr = {'gx-', 'bo-', 'r^-', 'kd-', 'cx-', 'go--', 'b^--'};
% Find matching codecs
plotIdx = 1;
for i = 2:length(cases)
if strcmp(cases(1).codec, cases(i).codec)
plotIdx = [plotIdx i];
end
end
% Return unplotted cases
casesOut = cases(setdiff(1:length(cases), plotIdx));
cases = cases(plotIdx);
for i = 1:length(cases)
% Plot a single case
hLine = plot(getfield(cases(i), xfield), getfield(cases(i), yfield), plotStr{num}, ...
'LineWidth', 1.1, 'MarkerSize', 6);
end
% hLine handle and codec are returned to construct the legend afterwards
codec = cases(1).codec;
if annotatePlot == 0
return;
end
for i = 1:length(cases)
% Print the codec name as a text label
% Ensure each codec is only printed once
sequencePlotted = 0;
for j = 1:length(sequences)
if strcmp(cases(i).name, sequences{j})
sequencePlotted = 1;
break;
end
end
if sequencePlotted == 0
text(getfield(cases(i), xfield, {1}), getfield(cases(i), yfield, {1}), ...
[' ' cases(i).name]);
sequences = {sequences{:} cases(i).name};
end
end
% Strip whitespace from string
function str = stripws(str)
if ~isstr(str)
error('String required');
end
str = str(setdiff(1:length(str), find(isspace(str) == 1)));

View File

@@ -0,0 +1,534 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "test.h"
#include "video_source.h"
#include "vplib.h"
#include "event_wrapper.h"
#include "thread_wrapper.h"
#include <iostream>
#include <fstream>
#include <cmath>
#include <ctime>
#include <string.h>
#include <cassert>
#include <vector>
using namespace webrtc;
long filesize(const char *filename); // local function defined at end of file
struct SSIMcontext
{
SSIMcontext() :
refFileName(NULL), testFileName(NULL), width(0), height(0),
SSIMptr(NULL), startFrame(-1), endFrame(-1), evnt(NULL) {};
SSIMcontext(const char *ref, const char *test, int w, int h, double *Sptr,
int start, int end, EventWrapper* ev) :
refFileName(ref), testFileName(test), width(w), height(h),
SSIMptr(Sptr), startFrame(start), endFrame(end), evnt(ev) {};
const char *refFileName;
const char *testFileName;
int width;
int height;
double *SSIMptr;
int startFrame;
int endFrame;
EventWrapper* evnt;
};
Test::Test(std::string name, std::string description)
:
_name(name),
_description(description),
_bitRate(0),
_inname(""),
_outname(""),
_encodedName("")
{
memset(&_inst, 0, sizeof(_inst));
unsigned int seed = static_cast<unsigned int>(0);
std::srand(seed);
}
Test::Test(std::string name, std::string description, WebRtc_UWord32 bitRate)
:
_name(name),
_description(description),
_bitRate(bitRate),
_inname(""),
_outname(""),
_encodedName("")
{
memset(&_inst, 0, sizeof(_inst));
unsigned int seed = static_cast<unsigned int>(0);
std::srand(seed);
}
void
Test::Print()
{
std::cout << _name << " completed!" << std::endl;
(*_log) << _name << std::endl;
(*_log) << _description << std::endl;
(*_log) << "Input file: " << _inname << std::endl;
(*_log) << "Output file: " << _outname << std::endl;
double psnr = -1.0, ssim = -1.0;
PSNRfromFiles(_inname.c_str(), _outname.c_str(), _inst.width, _inst.height, &psnr);
ssim = SSIMfromFilesMT(4 /* number of threads*/);
(*_log) << "PSNR: " << psnr << std::endl;
std::cout << "PSNR: " << psnr << std::endl << std::endl;
(*_log) << "SSIM: " << ssim << std::endl;
std::cout << "SSIM: " << ssim << std::endl << std::endl;
(*_log) << std::endl;
}
void
Test::Setup()
{
int widhei = _inst.width*_inst.height;
_lengthSourceFrame = 3*widhei/2;
_sourceBuffer = new unsigned char[_lengthSourceFrame];
}
void
Test::CodecSettings(int width, int height, WebRtc_UWord32 frameRate /*=30*/, WebRtc_UWord32 bitRate /*=0*/)
{
if (bitRate > 0)
{
_bitRate = bitRate;
}
else if (_bitRate == 0)
{
_bitRate = 600;
}
_inst.maxFramerate = (unsigned char)frameRate;
_inst.startBitrate = (int)_bitRate;
_inst.maxBitrate = 8000;
_inst.width = width;
_inst.height = height;
}
void
Test::Teardown()
{
delete [] _sourceBuffer;
}
void
Test::SetEncoder(webrtc::VideoEncoder*encoder)
{
_encoder = encoder;
}
void
Test::SetDecoder(VideoDecoder*decoder)
{
_decoder = decoder;
}
void
Test::SetLog(std::fstream* log)
{
_log = log;
}
int
Test::PSNRfromFiles(const char *refFileName, const char *testFileName, int width, int height, double *YPSNRptr)
{
FILE *refFp = fopen(refFileName, "rb");
if( refFp == NULL ) {
// cannot open reference file
fprintf(stderr, "Cannot open file %s\n", refFileName);
return -1;
}
FILE *testFp = fopen(testFileName, "rb");
if( testFp == NULL ) {
// cannot open test file
fprintf(stderr, "Cannot open file %s\n", testFileName);
return -2;
}
double mse = 0.0;
double mseLogSum = 0.0;
int frames = 0;
int frameBytes = 3*width*height/2; // bytes in one frame I420
unsigned char *ref = new unsigned char[frameBytes]; // space for one frame I420
unsigned char *test = new unsigned char[frameBytes]; // space for one frame I420
int refBytes = (int) fread(ref, 1, frameBytes, refFp);
int testBytes = (int) fread(test, 1, frameBytes, testFp);
while( refBytes == frameBytes && testBytes == frameBytes )
{
mse = 0.0;
// calculate Y sum-square-difference
for( int k = 0; k < width * height; k++ )
{
mse += (test[k] - ref[k]) * (test[k] - ref[k]);
}
// divide by number of pixels
mse /= (double) (width * height);
// accumulate for total average
mseLogSum += std::log10( mse );
frames++;
refBytes = (int) fread(ref, 1, frameBytes, refFp);
testBytes = (int) fread(test, 1, frameBytes, testFp);
}
// ypsnrAvg = sum( 10 log (255^2 / MSE) ) / frames
// = 20 * log(255) - 10 * mseLogSum / frames
*YPSNRptr = 20.0 * std::log10(255.0) - 10.0 * mseLogSum / frames;
delete [] ref;
delete [] test;
fclose(refFp);
fclose(testFp);
return 0;
}
int
Test::SSIMfromFiles(const char *refFileName, const char *testFileName, int width, int height, double *SSIMptr,
int startFrame /*= -1*/, int endFrame /*= -1*/)
{
FILE *refFp = fopen(refFileName, "rb");
if( refFp == NULL ) {
// cannot open reference file
fprintf(stderr, "Cannot open file %s\n", refFileName);
return -1;
}
FILE *testFp = fopen(testFileName, "rb");
if( testFp == NULL ) {
// cannot open test file
fprintf(stderr, "Cannot open file %s\n", testFileName);
return -2;
}
int frames = 0;
int frameBytes = 3*width*height/2; // bytes in one frame I420
unsigned char *ref = new unsigned char[frameBytes]; // space for one frame I420
unsigned char *test = new unsigned char[frameBytes]; // space for one frame I420
if (startFrame >= 0)
{
if (fseek(refFp, frameBytes * startFrame, SEEK_SET) != 0){
fprintf(stderr, "Cannot go to frame %i in %s\n", startFrame, refFileName);
return -1;
}
if (fseek(testFp, frameBytes * startFrame, SEEK_SET) != 0){
fprintf(stderr, "Cannot go to frame %i in %s\n", startFrame, testFileName);
return -1;
}
}
int refBytes = (int) fread(ref, 1, frameBytes, refFp);
int testBytes = (int) fread(test, 1, frameBytes, testFp);
//
// SSIM: variable definition, window function, initialization
int window = 10;
int flag_window = 0; //0 for uniform window filter, 1 for gaussian symmetric window
float variance_window = 2.0; //variance for window function
float ssimFilter[121]; //2d window filter: typically 11x11 = (window+1)*(window+1)
//statistics per column of window (#columns = window+1), 0 element for avg over all columns
float avgTest[12];
float avgRef[12];
float contrastTest[12];
float contrastRef[12];
float crossCorr[12];
//
//offsets for stability
float offset1 = 0.1f;
float offset2 = 0.1f;
float offset3 = offset2/2;
//
//define window for SSIM: take uniform filter for now
float sumfil = 0.0;
int nn=-1;
for(int j=-window/2;j<=window/2;j++)
for(int i=-window/2;i<=window/2;i++)
{
nn+=1;
if (flag_window == 0)
ssimFilter[nn] = 1.0;
else
{
float dist = (float)(i*i) + (float)(j*j);
float tmp = 0.5f*dist/variance_window;
ssimFilter[nn] = exp(-tmp);
}
sumfil +=ssimFilter[nn];
}
//normalize window
nn=-1;
for(int j=-window/2;j<=window/2;j++)
for(int i=-window/2;i<=window/2;i++)
{
nn+=1;
ssimFilter[nn] = ssimFilter[nn]/((float)sumfil);
}
//
float ssimScene = 0.0; //avgerage SSIM for sequence
//
//SSIM: done with variables and defintion
//
while( refBytes == frameBytes && testBytes == frameBytes &&
!(endFrame >= 0 && frames > endFrame - startFrame))
{
float ssimFrame = 0.0;
int sh = window/2+1;
int numPixels = 0;
for(int i=sh;i<height-sh;i++)
for(int j=sh;j<width-sh;j++)
{
avgTest[0] = 0.0;
avgRef[0] = 0.0;
contrastTest[0] = 0.0;
contrastRef[0] = 0.0;
crossCorr[0] = 0.0;
numPixels +=1;
//for uniform window, only need to loop over whole window for first column pixel in image, and then shift
if (j == sh || flag_window == 1)
{
//initialize statistics
for(int k=1;k<window+2;k++)
{
avgTest[k] = 0.0;
avgRef[k] = 0.0;
contrastTest[k] = 0.0;
contrastRef[k] = 0.0;
crossCorr[k] = 0.0;
}
int nn=-1;
//compute contrast and correlation
for(int jj=-window/2;jj<=window/2;jj++)
for(int ii=-window/2;ii<=window/2;ii++)
{
nn+=1;
int i2 = i+ii;
int j2 = j+jj;
float tmp1 = (float)test[i2*width+j2];
float tmp2 = (float)ref[i2*width+j2];
//local average of each signal
avgTest[jj+window/2+1] += ssimFilter[nn]*tmp1;
avgRef[jj+window/2+1] += ssimFilter[nn]*tmp2;
//local correlation/contrast of each signal
contrastTest[jj+window/2+1] += ssimFilter[nn]*tmp1*tmp1;
contrastRef[jj+window/2+1] += ssimFilter[nn]*tmp2*tmp2;
//local cross correlation
crossCorr[jj+window/2+1] += ssimFilter[nn]*tmp1*tmp2;
}
}
//for uniform window case, can shift window horiz, then compute statistics for last column in window
else
{
//shift statistics horiz.
for(int k=1;k<window+1;k++)
{
avgTest[k]=avgTest[k+1];
avgRef[k]=avgRef[k+1];
contrastTest[k] = contrastTest[k+1];
contrastRef[k] = contrastRef[k+1];
crossCorr[k] = crossCorr[k+1];
}
//compute statistics for last column
avgTest[window+1] = 0.0;
avgRef[window+1] = 0.0;
contrastTest[window+1] = 0.0;
contrastRef[window+1] = 0.0;
crossCorr[window+1] = 0.0;
int nn = (window+1)*window - 1;
int jj = window/2;
int j2 = j + jj;
for(int ii=-window/2;ii<=window/2;ii++)
{
nn+=1;
int i2 = i+ii;
float tmp1 = (float)test[i2*width+j2];
float tmp2 = (float)ref[i2*width+j2];
//local average of each signal
avgTest[jj+window/2+1] += ssimFilter[nn]*tmp1;
avgRef[jj+window/2+1] += ssimFilter[nn]*tmp2;
//local correlation/contrast of each signal
contrastTest[jj+window/2+1] += ssimFilter[nn]*tmp1*tmp1;
contrastRef[jj+window/2+1] += ssimFilter[nn]*tmp2*tmp2;
//local cross correlation
crossCorr[jj+window/2+1] += ssimFilter[nn]*tmp1*tmp2;
}
}
//sum over all columns
for(int k=1;k<window+2;k++)
{
avgTest[0] += avgTest[k];
avgRef[0] += avgRef[k];
contrastTest[0] += contrastTest[k];
contrastRef[0] += contrastRef[k];
crossCorr[0] += crossCorr[k];
}
float tmp1 = (contrastTest[0] - avgTest[0]*avgTest[0]);
if (tmp1 < 0.0) tmp1 = 0.0;
contrastTest[0] = sqrt(tmp1);
float tmp2 = (contrastRef[0] - avgRef[0]*avgRef[0]);
if (tmp2 < 0.0) tmp2 = 0.0;
contrastRef[0] = sqrt(tmp2);
crossCorr[0] = crossCorr[0] - avgTest[0]*avgRef[0];
float ssimCorrCoeff = (crossCorr[0]+offset3)/(contrastTest[0]*contrastRef[0] + offset3);
float ssimLuminance = (2*avgTest[0]*avgRef[0]+offset1)/(avgTest[0]*avgTest[0] + avgRef[0]*avgRef[0] + offset1);
float ssimContrast = (2*contrastTest[0]*contrastRef[0]+offset2)/(contrastTest[0]*contrastTest[0] + contrastRef[0]*contrastRef[0] + offset2);
float ssimPixel = ssimCorrCoeff * ssimLuminance * ssimContrast;
ssimFrame += ssimPixel;
}
ssimFrame = ssimFrame / (numPixels);
//printf("***SSIM for frame ***%f \n",ssimFrame);
ssimScene += ssimFrame;
//
//SSIM: done with SSIM computation
//
frames++;
refBytes = (int) fread(ref, 1, frameBytes, refFp);
testBytes = (int) fread(test, 1, frameBytes, testFp);
}
//SSIM: normalize/average for sequence
ssimScene = ssimScene / frames;
*SSIMptr = ssimScene;
delete [] ref;
delete [] test;
fclose(refFp);
fclose(testFp);
return 0;
}
bool
Test::SSIMthread(void *vctx)
{
SSIMcontext *ctx = (SSIMcontext *) vctx;
SSIMfromFiles(ctx->refFileName, ctx->testFileName, ctx->width, ctx->height, ctx->SSIMptr, ctx->startFrame, ctx->endFrame);
ctx->evnt->Set();
return false;
}
double Test::SSIMfromFilesMT(const int numThreads)
{
int numFrames = filesize(_inname.c_str()) / _lengthSourceFrame;
std::vector<int> nFramesVec(numThreads);
std::vector<double> ssimVec(numThreads);
int framesPerCore = (numFrames + numThreads - 1) / numThreads; // rounding up
int i = 0;
int nFrames;
for (nFrames = numFrames; nFrames >= framesPerCore; nFrames -= framesPerCore)
{
nFramesVec[i++] = framesPerCore;
}
if (nFrames > 0)
{
assert(i == numThreads - 1);
nFramesVec[i] = nFrames; // remainder
}
int frameIx = 0;
std::vector<EventWrapper*> eventVec(numThreads);
std::vector<ThreadWrapper*> threadVec(numThreads);
std::vector<SSIMcontext> ctxVec(numThreads);
for (i = 0; i < numThreads; i++)
{
eventVec[i] = EventWrapper::Create();
ctxVec[i] = SSIMcontext(_inname.c_str(), _outname.c_str(), _inst.width, _inst.height, &ssimVec[i], frameIx, frameIx + nFramesVec[i] - 1, eventVec[i]);
threadVec[i] = ThreadWrapper::CreateThread(SSIMthread, &(ctxVec[i]), kLowPriority);
unsigned int id;
threadVec[i]->Start(id);
frameIx += nFramesVec[i];
}
// wait for all events
for (i = 0; i < numThreads; i++) {
eventVec[i]->Wait(100000 /* ms*/);
threadVec[i]->Stop();
delete threadVec[i];
delete eventVec[i];
}
double avgSsim = 0;
for (i = 0; i < numThreads; i++)
{
avgSsim += (ssimVec[i] * nFramesVec[i]);
}
avgSsim /= numFrames;
return avgSsim;
}
double Test::ActualBitRate(int nFrames)
{
return 8.0 * _sumEncBytes / (nFrames / _inst.maxFramerate);
}
bool Test::PacketLoss(double lossRate)
{
return RandUniform() < lossRate;
}
void
Test::VideoBufferToRawImage(TestVideoBuffer& videoBuffer, RawImage &image)
{
image._buffer = videoBuffer.GetBuffer();
image._size = videoBuffer.GetSize();
image._length = videoBuffer.GetLength();
image._width = videoBuffer.GetWidth();
image._height = videoBuffer.GetHeight();
image._timeStamp = videoBuffer.GetTimeStamp();
}
void
Test::VideoEncodedBufferToEncodedImage(TestVideoEncodedBuffer& videoBuffer, EncodedImage &image)
{
image._buffer = videoBuffer.GetBuffer();
image._length = videoBuffer.GetLength();
image._size = videoBuffer.GetSize();
image._frameType = static_cast<VideoFrameType>(videoBuffer.GetFrameType());
image._timeStamp = videoBuffer.GetTimeStamp();
image._encodedWidth = videoBuffer.GetCaptureWidth();
image._encodedHeight = videoBuffer.GetCaptureHeight();
image._completeFrame = true;
}
long filesize(const char *filename)
{
FILE *f = fopen(filename,"rb"); /* open the file in read only */
long size = 0;
if (fseek(f,0,SEEK_END)==0) /* seek was successful */
size = ftell(f);
fclose(f);
return size;
}

View File

@@ -0,0 +1,82 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
#include "video_codec_interface.h"
#include "video_buffer.h"
#include <string>
#include <fstream>
#include <cstdlib>
class Test
{
public:
Test(std::string name, std::string description);
Test(std::string name, std::string description, WebRtc_UWord32 bitRate);
virtual ~Test() {};
virtual void Perform()=0;
virtual void Print();
void SetEncoder(webrtc::VideoEncoder *encoder);
void SetDecoder(webrtc::VideoDecoder *decoder);
void SetLog(std::fstream* log);
protected:
virtual void Setup();
virtual void CodecSettings(int width,
int height,
WebRtc_UWord32 frameRate=30,
WebRtc_UWord32 bitRate=0);
virtual void Teardown();
static int PSNRfromFiles(const char *refFileName,
const char *testFileName,
int width,
int height,
double *YPSNRptr);
static int SSIMfromFiles(const char *refFileName,
const char *testFileName,
int width,
int height,
double *SSIMptr,
int startByte = -1, int endByte = -1);
double SSIMfromFilesMT(int numThreads);
static bool SSIMthread(void *ctx);
double ActualBitRate(int nFrames);
static bool PacketLoss(double lossRate);
static double RandUniform() { return (std::rand() + 1.0)/(RAND_MAX + 1.0); }
static void VideoBufferToRawImage(TestVideoBuffer& videoBuffer,
webrtc::RawImage &image);
static void VideoEncodedBufferToEncodedImage(TestVideoEncodedBuffer& videoBuffer,
webrtc::EncodedImage &image);
webrtc::VideoEncoder* _encoder;
webrtc::VideoDecoder* _decoder;
WebRtc_UWord32 _bitRate;
unsigned int _lengthSourceFrame;
unsigned char* _sourceBuffer;
TestVideoBuffer _inputVideoBuffer;
TestVideoEncodedBuffer _encodedVideoBuffer;
TestVideoBuffer _decodedVideoBuffer;
webrtc::VideoCodec _inst;
std::fstream* _log;
std::string _inname;
std::string _outname;
std::string _encodedName;
int _sumEncBytes;
private:
std::string _name;
std::string _description;
};
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_

View File

@@ -0,0 +1,61 @@
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'includes': [
'../../../../common_settings.gypi', # Common settings
],
'targets': [
{
'target_name': 'test_framework',
'type': '<(library)',
'dependencies': [
'../../../../system_wrappers/source/system_wrappers.gyp:system_wrappers',
'../../../../common_video/vplib/main/source/vplib.gyp:webrtc_vplib',
],
'include_dirs': [
'../interface',
],
'direct_dependent_settings': {
'include_dirs': [
'../interface',
],
},
'sources': [
# header files
'benchmark.h',
'normal_async_test.h',
'normal_test.h',
'packet_loss_test.h',
'performance_test.h',
'test.h',
'unit_test.h',
'video_buffer.h',
'video_source.h',
# source files
'benchmark.cc',
'normal_async_test.cc',
'normal_test.cc',
'packet_loss_test.cc',
'performance_test.cc',
'test.cc',
'unit_test.cc',
'video_buffer.cc',
'video_source.cc',
],
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:

View File

@@ -0,0 +1,815 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "unit_test.h"
#include "video_source.h"
#include "tick_util.h"
#include <cassert>
#include <stdlib.h>
#include <string.h>
#include <math.h>
using namespace webrtc;
UnitTest::UnitTest()
:
Test("UnitTest", "Unit test"),
_tests(0),
_errors(0),
_source(NULL),
_refFrame(NULL),
_refEncFrame(NULL),
_refDecFrame(NULL),
_refEncFrameLength(0),
_sourceFile(NULL),
_encodeCompleteCallback(NULL),
_decodeCompleteCallback(NULL)
{
}
UnitTest::UnitTest(std::string name, std::string description)
:
_tests(0),
_errors(0),
_source(NULL),
_refFrame(NULL),
_refEncFrame(NULL),
_refDecFrame(NULL),
_refEncFrameLength(0),
_sourceFile(NULL),
_encodeCompleteCallback(NULL),
_decodeCompleteCallback(NULL),
Test(name, description)
{
}
UnitTest::~UnitTest()
{
if (_encodeCompleteCallback) {
delete _encodeCompleteCallback;
}
if (_decodeCompleteCallback) {
delete _decodeCompleteCallback;
}
if (_source) {
delete _source;
}
if (_refFrame) {
delete [] _refFrame;
}
if (_refDecFrame) {
delete [] _refDecFrame;
}
if (_sourceBuffer) {
delete [] _sourceBuffer;
}
if (_sourceFile) {
fclose(_sourceFile);
}
if (_refEncFrame) {
delete [] _refEncFrame;
}
}
WebRtc_Word32
UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
const void* codecSpecificInfo,
const webrtc::RTPFragmentationHeader*
fragmentation)
{
_encodedVideoBuffer->VerifyAndAllocate(encodedImage._size);
_encodedVideoBuffer->CopyBuffer(encodedImage._size, encodedImage._buffer);
_encodedVideoBuffer->UpdateLength(encodedImage._length);
_encodedVideoBuffer->SetFrameType(encodedImage._frameType);
_encodedVideoBuffer->SetCaptureWidth(
(WebRtc_UWord16)encodedImage._encodedWidth);
_encodedVideoBuffer->SetCaptureHeight(
(WebRtc_UWord16)encodedImage._encodedHeight);
_encodedVideoBuffer->SetTimeStamp(encodedImage._timeStamp);
_encodeComplete = true;
_encodedFrameType = encodedImage._frameType;
return 0;
}
WebRtc_Word32 UnitTestDecodeCompleteCallback::Decoded(RawImage& image)
{
_decodedVideoBuffer->VerifyAndAllocate(image._length);
_decodedVideoBuffer->CopyBuffer(image._length, image._buffer);
_decodedVideoBuffer->SetWidth(image._width);
_decodedVideoBuffer->SetHeight(image._height);
_decodedVideoBuffer->SetTimeStamp(image._timeStamp);
_decodeComplete = true;
return 0;
}
bool
UnitTestEncodeCompleteCallback::EncodeComplete()
{
if (_encodeComplete)
{
_encodeComplete = false;
return true;
}
return false;
}
VideoFrameType
UnitTestEncodeCompleteCallback::EncodedFrameType() const
{
return _encodedFrameType;
}
bool
UnitTestDecodeCompleteCallback::DecodeComplete()
{
if (_decodeComplete)
{
_decodeComplete = false;
return true;
}
return false;
}
WebRtc_UWord32
UnitTest::WaitForEncodedFrame() const
{
WebRtc_Word64 startTime = TickTime::MillisecondTimestamp();
while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitEncTimeMs)
{
if (_encodeCompleteCallback->EncodeComplete())
{
return _encodedVideoBuffer.GetLength();
}
}
return 0;
}
WebRtc_UWord32
UnitTest::WaitForDecodedFrame() const
{
WebRtc_Word64 startTime = TickTime::MillisecondTimestamp();
while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitDecTimeMs)
{
if (_decodeCompleteCallback->DecodeComplete())
{
return _decodedVideoBuffer.GetLength();
}
}
return 0;
}
WebRtc_UWord32
UnitTest::CodecSpecific_SetBitrate(WebRtc_UWord32 bitRate,
WebRtc_UWord32 /* frameRate */)
{
return _encoder->SetRates(bitRate, _inst.maxFramerate);
}
void
UnitTest::Setup()
{
// Use _sourceFile as a check to prevent multiple Setup() calls.
if (_sourceFile != NULL)
{
return;
}
if (_encodeCompleteCallback == NULL)
{
_encodeCompleteCallback =
new UnitTestEncodeCompleteCallback(&_encodedVideoBuffer);
}
if (_decodeCompleteCallback == NULL)
{
_decodeCompleteCallback =
new UnitTestDecodeCompleteCallback(&_decodedVideoBuffer);
}
_encoder->RegisterEncodeCompleteCallback(_encodeCompleteCallback);
_decoder->RegisterDecodeCompleteCallback(_decodeCompleteCallback);
_source = new VideoSource("test/testFiles/foreman_cif.yuv", kCIF);
_lengthSourceFrame = _source->GetFrameLength();
_refFrame = new unsigned char[_lengthSourceFrame];
_refDecFrame = new unsigned char[_lengthSourceFrame];
_sourceBuffer = new unsigned char [_lengthSourceFrame];
_sourceFile = fopen(_source->GetFileName().c_str(), "rb");
VIDEO_TEST_EXIT_ON_ERR(_sourceFile != NULL);
_inst.maxFramerate = _source->GetFrameRate();
_bitRate = 300;
_inst.startBitrate = 300;
_inst.maxBitrate = 4000;
_inst.width = _source->GetWidth();
_inst.height = _source->GetHeight();
// Get input frame.
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
VIDEO_TEST_EXIT_ON_ERR(fread(_refFrame, 1, _lengthSourceFrame, _sourceFile)
== _lengthSourceFrame);
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
rewind(_sourceFile);
// Get a reference encoded frame.
_encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
RawImage image;
VideoBufferToRawImage(_inputVideoBuffer, image);
// Ensures our initial parameters are valid.
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
_encoder->Encode(image, NULL);
_refEncFrameLength = WaitForEncodedFrame();
VIDEO_TEST_EXIT_ON_ERR(_refEncFrameLength > 0);
_refEncFrame = new unsigned char[_refEncFrameLength];
memcpy(_refEncFrame, _encodedVideoBuffer.GetBuffer(), _refEncFrameLength);
// Get a reference decoded frame.
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
VIDEO_TEST(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
{
exit(EXIT_FAILURE);
}
int frameLength = 0;
int i=0;
while (frameLength == 0)
{
if (i > 0)
{
// Insert yet another frame
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
VIDEO_TEST_EXIT_ON_ERR(fread(_refFrame, 1, _lengthSourceFrame,
_sourceFile) == _lengthSourceFrame);
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
_inputVideoBuffer.SetWidth(_source->GetWidth());
_inputVideoBuffer.SetHeight(_source->GetHeight());
VideoBufferToRawImage(_inputVideoBuffer, image);
_encoder->Encode(image, NULL);
VIDEO_TEST_EXIT_ON_ERR(WaitForEncodedFrame() > 0);
}
EncodedImage encodedImage;
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
VIDEO_TEST_EXIT_ON_ERR(_decoder->Decode(encodedImage, 0, NULL)
== WEBRTC_VIDEO_CODEC_OK);
frameLength = WaitForDecodedFrame();
_encodedVideoBuffer.Reset();
_encodedVideoBuffer.UpdateLength(0);
i++;
}
rewind(_sourceFile);
VIDEO_TEST(frameLength == _lengthSourceFrame);
memcpy(_refDecFrame, _decodedVideoBuffer.GetBuffer(), _lengthSourceFrame);
}
void
UnitTest::Teardown()
{
// Use _sourceFile as a check to prevent multiple Teardown() calls.
if (_sourceFile == NULL)
{
return;
}
_encoder->Release();
_decoder->Release();
fclose(_sourceFile);
_sourceFile = NULL;
delete [] _refFrame;
_refFrame = NULL;
delete [] _refEncFrame;
_refEncFrame = NULL;
delete [] _refDecFrame;
_refDecFrame = NULL;
delete [] _sourceBuffer;
_sourceBuffer = NULL;
}
void
UnitTest::Print()
{
printf("Unit Test\n\n%i tests completed\n", _tests);
if (_errors > 0)
{
printf("%i FAILED\n\n", _errors);
}
else
{
printf("ALL PASSED\n\n");
}
}
int
UnitTest::DecodeWithoutAssert()
{
EncodedImage encodedImage;
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
int ret = _decoder->Decode(encodedImage, 0, NULL);
int frameLength = WaitForDecodedFrame();
_encodedVideoBuffer.Reset();
_encodedVideoBuffer.UpdateLength(0);
return ret == WEBRTC_VIDEO_CODEC_OK ? frameLength : ret;
}
int
UnitTest::Decode()
{
EncodedImage encodedImage;
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
if (encodedImage._length == 0)
{
return WEBRTC_VIDEO_CODEC_OK;
}
int ret = _decoder->Decode(encodedImage, 0, NULL);
int frameLength = WaitForDecodedFrame();
assert(ret == WEBRTC_VIDEO_CODEC_OK && (frameLength == 0 || frameLength
== _lengthSourceFrame));
VIDEO_TEST(ret == WEBRTC_VIDEO_CODEC_OK && (frameLength == 0 || frameLength
== _lengthSourceFrame));
_encodedVideoBuffer.Reset();
_encodedVideoBuffer.UpdateLength(0);
return ret == WEBRTC_VIDEO_CODEC_OK ? frameLength : ret;
}
// Test pure virtual VideoEncoder and VideoDecoder APIs.
void
UnitTest::Perform()
{
UnitTest::Setup();
int frameLength;
RawImage inputImage;
EncodedImage encodedImage;
EventWrapper& sleepEvent = *EventWrapper::Create();
//----- Encoder parameter tests -----
//-- Calls before InitEncode() --
// We want to revert the initialization done in Setup().
VIDEO_TEST(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
VideoBufferToRawImage(_inputVideoBuffer, inputImage);
VIDEO_TEST(_encoder->Encode(inputImage, NULL)
== WEBRTC_VIDEO_CODEC_UNINITIALIZED);
VIDEO_TEST(_encoder->Reset() == WEBRTC_VIDEO_CODEC_UNINITIALIZED);
//-- InitEncode() errors --
// Null pointer.
VIDEO_TEST(_encoder->InitEncode(NULL, 1, 1440) ==
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
// bit rate exceeds max bit rate
WebRtc_Word32 tmpBitRate = _inst.startBitrate;
WebRtc_Word32 tmpMaxBitRate = _inst.maxBitrate;
_inst.startBitrate = 4000;
_inst.maxBitrate = 3000;
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) ==
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
_inst.startBitrate = tmpBitRate;
_inst.maxBitrate = tmpMaxBitRate; //unspecified value
// Bad framerate.
_inst.maxFramerate = 0;
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) ==
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
// Seems like we should allow any framerate in range [0, 255].
//_inst.frameRate = 100;
//VIDEO_TEST(_encoder->InitEncode(&_inst, 1) == -1); // FAILS
_inst.maxFramerate = 30;
// Bad bitrate.
_inst.startBitrate = -1;
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) ==
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
_inst.maxBitrate = _inst.startBitrate - 1;
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) ==
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
_inst.maxBitrate = 0;
_inst.startBitrate = 300;
// Bad maxBitRate.
_inst.maxBitrate = 200;
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) ==
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
_inst.maxBitrate = 4000;
// Bad width.
_inst.width = 0;
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) < 0);
// Should there be a width and height cap?
//_inst.width = 10000;
//VIDEO_TEST(_encoder->InitEncode(&_inst, 1) == -1);
_inst.width = _source->GetWidth();
// Bad height.
_inst.height = 0;
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) < 0);
_inst.height = _source->GetHeight();
// Bad number of cores.
VIDEO_TEST(_encoder->InitEncode(&_inst, -1, 1440) ==
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
//-- Encode() errors --
// inputVideoBuffer unallocated.
_inputVideoBuffer.Free();
VideoBufferToRawImage(_inputVideoBuffer, inputImage);
VIDEO_TEST(_encoder->Encode(inputImage, NULL) ==
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
//----- Encoder stress tests -----
// Vary frame rate and I-frame request.
VideoBufferToRawImage(_inputVideoBuffer, inputImage);
for (int i = 1; i <= 60; i++)
{
VideoFrameType frameType = !(i % 2) ? kKeyFrame : kDeltaFrame;
VIDEO_TEST(_encoder->Encode(inputImage, NULL, frameType) ==
WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(WaitForEncodedFrame() > 0);
sleepEvent.Wait(10); // Allow the encoder's queue to realize it's empty.
}
// Init then encode.
_encodedVideoBuffer.UpdateLength(0);
_encodedVideoBuffer.Reset();
VIDEO_TEST(_encoder->Encode(inputImage, NULL) == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(WaitForEncodedFrame() > 0);
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
_encoder->Encode(inputImage, NULL);
frameLength = WaitForEncodedFrame();
VIDEO_TEST(frameLength > 0);
VIDEO_TEST(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
_encodedVideoBuffer.GetBuffer(), frameLength) == true);
// Reset then encode.
_encodedVideoBuffer.UpdateLength(0);
_encodedVideoBuffer.Reset();
VIDEO_TEST(_encoder->Encode(inputImage, NULL) == WEBRTC_VIDEO_CODEC_OK);
WaitForEncodedFrame();
VIDEO_TEST(_encoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
_encoder->Encode(inputImage, NULL);
frameLength = WaitForEncodedFrame();
VIDEO_TEST(frameLength > 0);
VIDEO_TEST(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
_encodedVideoBuffer.GetBuffer(), frameLength) == true);
// Release then encode.
_encodedVideoBuffer.UpdateLength(0);
_encodedVideoBuffer.Reset();
VIDEO_TEST(_encoder->Encode(inputImage, NULL) == WEBRTC_VIDEO_CODEC_OK);
WaitForEncodedFrame();
VIDEO_TEST(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
_encoder->Encode(inputImage, NULL);
frameLength = WaitForEncodedFrame();
VIDEO_TEST(frameLength > 0);
VIDEO_TEST(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
_encodedVideoBuffer.GetBuffer(), frameLength) == true);
//----- Decoder parameter tests -----
//-- Calls before InitDecode() --
// We want to revert the initialization done in Setup().
VIDEO_TEST(_decoder->Release() == WEBRTC_VIDEO_CODEC_OK);
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
VIDEO_TEST(_decoder->Decode(encodedImage, false, NULL) ==
WEBRTC_VIDEO_CODEC_UNINITIALIZED);
WaitForDecodedFrame();
VIDEO_TEST(_decoder->Reset() == WEBRTC_VIDEO_CODEC_UNINITIALIZED);
VIDEO_TEST(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
{
exit(EXIT_FAILURE);
}
//-- Decode() errors --
// Unallocated encodedVideoBuffer.
_encodedVideoBuffer.Free();
//_encodedVideoBuffer.UpdateLength(10); // Buffer NULL but length > 0
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
VIDEO_TEST(_decoder->Decode(encodedImage, false, NULL) ==
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
_encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
//----- Decoder stress tests -----
unsigned char* tmpBuf = new unsigned char[_lengthSourceFrame];
// "Random" and zero data.
// We either expect an error, or at the least, no output.
// This relies on the codec's ability to detect an erroneous bitstream.
/*
VIDEO_TEST(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
{
exit(EXIT_FAILURE);
}
for (int i = 0; i < 100; i++)
{
VIDEO_TEST_EXIT_ON_ERR(fread(tmpBuf, 1, _refEncFrameLength, _sourceFile)
== _refEncFrameLength);
_encodedVideoBuffer.CopyBuffer(_refEncFrameLength, tmpBuf);
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
FillDecoderSpecificInfo(encodedImage);
int ret = _decoder->Decode(encodedImage, false, _decoderSpecificInfo);
VIDEO_TEST(ret <= 0);
if (ret == 0)
{
VIDEO_TEST(WaitForDecodedFrame() == 0);
}
memset(tmpBuf, 0, _refEncFrameLength);
_encodedVideoBuffer.CopyBuffer(_refEncFrameLength, tmpBuf);
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
FillDecoderSpecificInfo(encodedImage);
ret = _decoder->Decode(encodedImage, false, _decoderSpecificInfo);
VIDEO_TEST(ret <= 0);
if (ret == 0)
{
VIDEO_TEST(WaitForDecodedFrame() == 0);
}
}
*/
rewind(_sourceFile);
_encodedVideoBuffer.UpdateLength(_refEncFrameLength);
_encodedVideoBuffer.CopyBuffer(_refEncFrameLength, _refEncFrame);
// Init then decode.
VIDEO_TEST(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
{
exit(EXIT_FAILURE);
}
frameLength = 0;
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
while (frameLength == 0)
{
_decoder->Decode(encodedImage, false, NULL);
frameLength = WaitForDecodedFrame();
}
VIDEO_TEST(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
_refDecFrame, _lengthSourceFrame) == true);
// Reset then decode.
VIDEO_TEST(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
frameLength = 0;
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
while (frameLength == 0)
{
_decoder->Decode(encodedImage, false, NULL);
frameLength = WaitForDecodedFrame();
}
VIDEO_TEST(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
_refDecFrame, _lengthSourceFrame) == true);
// Decode with other size, reset, then decode with original size again
// to verify that decoder is reset to a "fresh" state upon Reset().
{
// assert that input frame size is a factor of two, so that we can use
// quarter size below
VIDEO_TEST((_inst.width % 2 == 0) && (_inst.height % 2 == 0));
VideoCodec tempInst;
memcpy(&tempInst, &_inst, sizeof(VideoCodec));
tempInst.width /= 2;
tempInst.height /= 2;
// Encode reduced (quarter) frame size
VIDEO_TEST(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(_encoder->InitEncode(&tempInst, 1, 1440) ==
WEBRTC_VIDEO_CODEC_OK);
RawImage tempInput(inputImage._buffer, inputImage._length/4,
inputImage._size/4);
_encoder->Encode(tempInput, NULL);
frameLength = WaitForEncodedFrame();
VIDEO_TEST(frameLength > 0);
// Reset then decode.
VIDEO_TEST(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
frameLength = 0;
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
while (frameLength == 0)
{
_decoder->Decode(encodedImage, false, NULL);
frameLength = WaitForDecodedFrame();
}
// Encode original frame again
VIDEO_TEST(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) ==
WEBRTC_VIDEO_CODEC_OK);
_encoder->Encode(inputImage, NULL);
frameLength = WaitForEncodedFrame();
VIDEO_TEST(frameLength > 0);
// Reset then decode original frame again.
VIDEO_TEST(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
frameLength = 0;
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
while (frameLength == 0)
{
_decoder->Decode(encodedImage, false, NULL);
frameLength = WaitForDecodedFrame();
}
// check that decoded frame matches with reference
VIDEO_TEST(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
_refDecFrame, _lengthSourceFrame) == true);
}
// Release then decode.
VIDEO_TEST(_decoder->Release() == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
{
exit(EXIT_FAILURE);
}
frameLength = 0;
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
while (frameLength == 0)
{
_decoder->Decode(encodedImage, false, NULL);
frameLength = WaitForDecodedFrame();
}
VIDEO_TEST(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
_refDecFrame, _lengthSourceFrame) == true);
_encodedVideoBuffer.UpdateLength(0);
_encodedVideoBuffer.Reset();
delete [] tmpBuf;
//----- Function tests -----
int frames = 0;
// Do not specify maxBitRate (as in ViE).
_inst.maxBitrate = 0;
//-- Timestamp propagation --
VIDEO_TEST(_encoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
{
exit(EXIT_FAILURE);
}
printf("\nTimestamp propagation test...\n");
frames = 0;
int frameDelay = 0;
int encTimeStamp;
_decodedVideoBuffer.SetTimeStamp(0);
while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
_lengthSourceFrame)
{
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
_inputVideoBuffer.SetTimeStamp(frames);
VideoBufferToRawImage(_inputVideoBuffer, inputImage);
VIDEO_TEST_EXIT_ON_ERR(_encoder->Encode(inputImage, NULL) ==
WEBRTC_VIDEO_CODEC_OK);
frameLength = WaitForEncodedFrame();
//VIDEO_TEST_EXIT_ON_ERR(frameLength);
VIDEO_TEST(frameLength > 0);
encTimeStamp = _encodedVideoBuffer.GetTimeStamp();
VIDEO_TEST(_inputVideoBuffer.GetTimeStamp() == encTimeStamp);
frameLength = Decode();
if (frameLength == 0)
{
frameDelay++;
}
encTimeStamp -= frameDelay;
if (encTimeStamp < 0)
{
encTimeStamp = 0;
}
VIDEO_TEST(_decodedVideoBuffer.GetTimeStamp() == encTimeStamp);
frames++;
sleepEvent.Wait(33);
}
delete &sleepEvent;
VIDEO_TEST_EXIT_ON_ERR(feof(_sourceFile) != 0);
rewind(_sourceFile);
RateControlTests();
Teardown();
}
void
UnitTest::RateControlTests()
{
FILE *outFile = NULL;
std::string outFileName;
int frames = 0;
RawImage inputImage;
WebRtc_UWord32 frameLength;
EventWrapper& sleepEvent = *EventWrapper::Create();
// Do not specify maxBitRate (as in ViE).
_inst.maxBitrate = 0;
//-- Verify rate control --
VIDEO_TEST(_encoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
// add: should also be 0, and 1
const int bitRate[] =
{100, 200, 300, 400, 500, 600, 800, 1000, 2000, 3000, 4000, 10000};
const int nBitrates = sizeof(bitRate)/sizeof(*bitRate);
printf("\nRate control test\n");
for (int i = 0; i < nBitrates; i++)
{
_bitRate = bitRate[i];
int totalBytes = 0;
_encoder->Reset();
_inst.startBitrate = _bitRate;
_encoder->InitEncode(&_inst, 4, 1440);
_decoder->Reset();
_decoder->InitDecode(&_inst, 1);
frames = 0;
if (_bitRate > _inst.maxBitrate)
{
CodecSpecific_SetBitrate(_bitRate, _inst.maxFramerate);
}
else
{
CodecSpecific_SetBitrate(_bitRate, _inst.maxFramerate);
}
while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
_lengthSourceFrame)
{
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
_inputVideoBuffer.SetTimeStamp(_inputVideoBuffer.GetTimeStamp() +
static_cast<WebRtc_UWord32>(9e4 /
static_cast<float>(_inst.maxFramerate)));
VideoBufferToRawImage(_inputVideoBuffer, inputImage);
VIDEO_TEST_EXIT_ON_ERR(_encoder->Encode(inputImage, NULL) ==
WEBRTC_VIDEO_CODEC_OK);
frameLength = WaitForEncodedFrame();
VIDEO_TEST_EXIT_ON_ERR(frameLength > 0);
//VIDEO_TEST(frameLength > 0);
totalBytes += frameLength;
frames++;
_encodedVideoBuffer.UpdateLength(0);
_encodedVideoBuffer.Reset();
sleepEvent.Wait(10);
}
WebRtc_UWord32 actualBitrate =
(totalBytes / frames * _inst.maxFramerate * 8)/1000;
printf("Target bitrate: %d kbps, actual bitrate: %d kbps\n", _bitRate,
actualBitrate);
// Test for close match over reasonable range.
if (_bitRate >= 100 && _bitRate <= 4000)
{
//VIDEO_TEST(fabs(actualBitrate - _bitRate) < 0.05 * _bitRate);
VIDEO_TEST(abs(WebRtc_Word32(actualBitrate - _bitRate)) <
0.1 * _bitRate); // for VP8
}
VIDEO_TEST_EXIT_ON_ERR(feof(_sourceFile) != 0);
rewind(_sourceFile);
}
}
bool
UnitTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
const void* ptrB, unsigned int bLengthBytes)
{
if (aLengthBytes != bLengthBytes)
{
return false;
}
return memcmp(ptrA, ptrB, aLengthBytes) == 0;
}

View File

@@ -0,0 +1,133 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_UNIT_TEST_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_UNIT_TEST_H_
#include "test.h"
#include "event_wrapper.h"
// Disable "conditional expression is constant" warnings on the perfectly
// acceptable
// do { ... } while (0) constructions below.
// Refer to http://stackoverflow.com/questions/1946445/
// is-there-better-way-to-write-do-while0-construct-to-avoid-compiler-warnings
// for some discussion of the issue.
#pragma warning(disable : 4127)
#define VIDEO_TEST(expr) \
do \
{ \
_tests++; \
if (!(expr)) \
{ \
fprintf(stderr, "Error at line %i of %s\nAssertion failed: %s\n\n",\
__LINE__, __FILE__, #expr); \
_errors++; \
} \
} while (0)
#define VIDEO_TEST_EXIT_ON_ERR(expr) \
do \
{ \
if (!(expr)) \
{ \
fprintf(stderr, "Error at line %i of %s\nAssertion failed: %s\n", \
__LINE__, __FILE__, #expr); \
fprintf(stderr, "Exiting...\n\n"); \
exit(EXIT_FAILURE); \
} \
} while (0)
class VideoSource;
class UnitTestEncodeCompleteCallback;
class UnitTestDecodeCompleteCallback;
class UnitTest : public Test
{
public:
UnitTest();
virtual ~UnitTest();
virtual void Perform();
virtual void Print();
protected:
UnitTest(std::string name, std::string description);
virtual WebRtc_UWord32 CodecSpecific_SetBitrate(
WebRtc_UWord32 bitRate,
WebRtc_UWord32 /* frameRate */);
virtual void Setup();
virtual void Teardown();
virtual void RateControlTests();
virtual int Decode();
virtual int DecodeWithoutAssert();
virtual int SetCodecSpecificParameters() {return 0;};
virtual bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes,
const void *ptrB, unsigned int bLengthBytes);
WebRtc_UWord32 WaitForEncodedFrame() const;
WebRtc_UWord32 WaitForDecodedFrame() const;
int _tests;
int _errors;
VideoSource* _source;
unsigned char* _refFrame;
unsigned char* _refEncFrame;
unsigned char* _refDecFrame;
int _refEncFrameLength;
FILE* _sourceFile;
UnitTestEncodeCompleteCallback* _encodeCompleteCallback;
UnitTestDecodeCompleteCallback* _decodeCompleteCallback;
enum { kMaxWaitEncTimeMs = 100 };
enum { kMaxWaitDecTimeMs = 25 };
};
class UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback
{
public:
UnitTestEncodeCompleteCallback(TestVideoEncodedBuffer* buffer,
WebRtc_UWord32 decoderSpecificSize = 0,
void* decoderSpecificInfo = NULL) :
_encodedVideoBuffer(buffer),
_decoderSpecificSize(decoderSpecificSize),
_decoderSpecificInfo(decoderSpecificInfo),
_encodeComplete(false) {}
WebRtc_Word32 Encoded(webrtc::EncodedImage& encodedImage,
const void* codecSpecificInfo,
const webrtc::RTPFragmentationHeader*
fragmentation = NULL);
bool EncodeComplete();
// Note that this only makes sense if an encode has been completed
webrtc::VideoFrameType EncodedFrameType() const;
private:
TestVideoEncodedBuffer* _encodedVideoBuffer;
void* _decoderSpecificInfo;
WebRtc_UWord32 _decoderSpecificSize;
bool _encodeComplete;
webrtc::VideoFrameType _encodedFrameType;
};
class UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback
{
public:
UnitTestDecodeCompleteCallback(TestVideoBuffer* buffer) :
_decodedVideoBuffer(buffer), _decodeComplete(false) {}
WebRtc_Word32 Decoded(webrtc::RawImage& image);
bool DecodeComplete();
private:
TestVideoBuffer* _decodedVideoBuffer;
bool _decodeComplete;
};
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_UNIT_TEST_H_

View File

@@ -0,0 +1,319 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <assert.h>
#include <string.h>
#include "video_buffer.h"
using namespace webrtc;
TestVideoBuffer::TestVideoBuffer():
_buffer(0),
_bufferSize(0),
_bufferLength(0),
_startOffset(0),
_timeStamp(0),
_width(0),
_height(0)
{
//
}
TestVideoBuffer::~TestVideoBuffer()
{
_timeStamp = 0;
_startOffset = 0;
_bufferLength = 0;
_bufferSize = 0;
if(_buffer)
{
delete [] _buffer;
_buffer = 0;
}
}
TestVideoBuffer::TestVideoBuffer(const TestVideoBuffer& rhs)
:
_buffer(0),
_bufferSize(rhs._bufferSize),
_bufferLength(rhs._bufferLength),
_startOffset(rhs._startOffset),
_timeStamp(rhs._timeStamp),
_width(rhs._width),
_height(rhs._height)
{
// make sure that our buffer is big enough
_buffer = new unsigned char[_bufferSize];
// only copy required length
memcpy(_buffer + _startOffset, rhs._buffer, _bufferLength); // GetBuffer() includes _startOffset
}
void TestVideoBuffer::SetTimeStamp(unsigned int timeStamp)
{
_timeStamp = timeStamp;
}
unsigned int
TestVideoBuffer::GetWidth() const
{
return _width;
}
unsigned int
TestVideoBuffer::GetHeight() const
{
return _height;
}
void
TestVideoBuffer::SetWidth(unsigned int width)
{
_width = width;
}
void
TestVideoBuffer::SetHeight(unsigned int height)
{
_height = height;
}
void TestVideoBuffer::Free()
{
_timeStamp = 0;
_startOffset = 0;
_bufferLength = 0;
_bufferSize = 0;
_height = 0;
_width = 0;
if(_buffer)
{
delete [] _buffer;
_buffer = 0;
}
}
void TestVideoBuffer::VerifyAndAllocate(unsigned int minimumSize)
{
if(minimumSize > _bufferSize)
{
// make sure that our buffer is big enough
unsigned char * newBufferBuffer = new unsigned char[minimumSize];
if(_buffer)
{
// copy the old data
memcpy(newBufferBuffer, _buffer, _bufferSize);
delete [] _buffer;
}
_buffer = newBufferBuffer;
_bufferSize = minimumSize;
}
}
int TestVideoBuffer::SetOffset(unsigned int length)
{
if (length > _bufferSize ||
length > _bufferLength)
{
return -1;
}
unsigned int oldOffset = _startOffset;
if(oldOffset > length)
{
unsigned int newLength = _bufferLength + (oldOffset-length);// increase by the diff
assert(newLength <= _bufferSize);
_bufferLength = newLength;
}
if(oldOffset < length)
{
if(_bufferLength > (length-oldOffset))
{
_bufferLength -= (length-oldOffset); // decrease by the diff
}
}
_startOffset = length; // update
return 0;
}
void TestVideoBuffer::UpdateLength(unsigned int newLength)
{
assert(newLength +_startOffset <= _bufferSize);
_bufferLength = newLength;
}
void TestVideoBuffer::CopyBuffer(unsigned int length, const unsigned char* buffer)
{
assert(length+_startOffset <= _bufferSize);
memcpy(_buffer+_startOffset, buffer, length);
_bufferLength = length;
}
void TestVideoBuffer::CopyBuffer(TestVideoBuffer& fromVideoBuffer)
{
assert(fromVideoBuffer.GetLength() + fromVideoBuffer.GetStartOffset() <= _bufferSize);
assert(fromVideoBuffer.GetSize() <= _bufferSize);
_bufferLength = fromVideoBuffer.GetLength();
_startOffset = fromVideoBuffer.GetStartOffset();
_timeStamp = fromVideoBuffer.GetTimeStamp();
_height = fromVideoBuffer.GetHeight();
_width = fromVideoBuffer.GetWidth();
// only copy required length
memcpy(_buffer+_startOffset, fromVideoBuffer.GetBuffer(), fromVideoBuffer.GetLength()); // GetBuffer() includes _startOffset
}
void TestVideoBuffer::CopyPointer(const TestVideoBuffer& fromVideoBuffer)
{
_bufferSize = fromVideoBuffer.GetSize();
_bufferLength = fromVideoBuffer.GetLength();
_startOffset = fromVideoBuffer.GetStartOffset();
_timeStamp = fromVideoBuffer.GetTimeStamp();
_height = fromVideoBuffer.GetHeight();
_width = fromVideoBuffer.GetWidth();
_buffer = fromVideoBuffer.GetBuffer();
}
void TestVideoBuffer::ClearPointer()
{
_buffer = NULL;
}
void TestVideoBuffer::SwapBuffers(TestVideoBuffer& videoBuffer)
{
unsigned char* tempBuffer = _buffer;
unsigned int tempSize = _bufferSize;
unsigned int tempLength =_bufferLength;
unsigned int tempOffset = _startOffset;
unsigned int tempTime = _timeStamp;
unsigned int tempHeight = _height;
unsigned int tempWidth = _width;
_buffer = videoBuffer.GetBuffer();
_bufferSize = videoBuffer.GetSize();
_bufferLength = videoBuffer.GetLength();
_startOffset = videoBuffer.GetStartOffset();
_timeStamp = videoBuffer.GetTimeStamp();
_height = videoBuffer.GetHeight();
_width = videoBuffer.GetWidth();
videoBuffer.Set(tempBuffer, tempSize, tempLength, tempOffset, tempTime);
videoBuffer.SetHeight(tempHeight);
videoBuffer.SetWidth(tempWidth);
}
void TestVideoBuffer::Set(unsigned char* tempBuffer,unsigned int tempSize,unsigned int tempLength, unsigned int tempOffset,unsigned int timeStamp)
{
_buffer = tempBuffer;
_bufferSize = tempSize;
_bufferLength = tempLength;
_startOffset = tempOffset;
_timeStamp = timeStamp;
}
unsigned char* TestVideoBuffer::GetBuffer() const
{
return _buffer+_startOffset;
}
unsigned int TestVideoBuffer::GetStartOffset() const
{
return _startOffset;
}
unsigned int TestVideoBuffer::GetSize() const
{
return _bufferSize;
}
unsigned int TestVideoBuffer::GetLength() const
{
return _bufferLength;
}
unsigned int TestVideoBuffer::GetTimeStamp() const
{
return _timeStamp;
}
/**
* TestVideoEncodedBuffer
*
*/
TestVideoEncodedBuffer::TestVideoEncodedBuffer() :
_captureWidth(0),
_captureHeight(0),
_frameRate(-1)
{
_frameType = kDeltaFrame;
}
TestVideoEncodedBuffer::~TestVideoEncodedBuffer()
{
}
void TestVideoEncodedBuffer::SetCaptureWidth(unsigned short width)
{
_captureWidth = width;
}
void TestVideoEncodedBuffer::SetCaptureHeight(unsigned short height)
{
_captureHeight = height;
}
unsigned short TestVideoEncodedBuffer::GetCaptureWidth()
{
return _captureWidth;
}
unsigned short TestVideoEncodedBuffer::GetCaptureHeight()
{
return _captureHeight;
}
VideoFrameType TestVideoEncodedBuffer::GetFrameType()
{
return _frameType;
}
void TestVideoEncodedBuffer::SetFrameType(VideoFrameType frametype)
{
_frameType = frametype;
}
void TestVideoEncodedBuffer::Reset()
{
_captureWidth = 0;
_captureHeight = 0;
_frameRate = -1;
_frameType = kDeltaFrame;
}
void TestVideoEncodedBuffer::SetFrameRate(float frameRate)
{
_frameRate = frameRate;
}
float TestVideoEncodedBuffer::GetFrameRate()
{
return _frameRate;
}

View File

@@ -0,0 +1,122 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_BUFFER_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_BUFFER_H_
#include "typedefs.h"
#include "video_image.h"
class TestVideoBuffer
{
public:
TestVideoBuffer();
virtual ~TestVideoBuffer();
TestVideoBuffer(const TestVideoBuffer& rhs);
/**
* Verifies that current allocated buffer size is larger than or equal to the input size.
* If the current buffer size is smaller, a new allocation is made and the old buffer data is copied to the new buffer.
*/
void VerifyAndAllocate(unsigned int minimumSize);
void UpdateLength(unsigned int newLength);
void SwapBuffers(TestVideoBuffer& videoBuffer);
void CopyBuffer(unsigned int length, const unsigned char* fromBuffer);
void CopyBuffer(TestVideoBuffer& fromVideoBuffer);
// Use with care, and remember to call ClearPointer() when done.
void CopyPointer(const TestVideoBuffer& fromVideoBuffer);
void ClearPointer();
int SetOffset(unsigned int length); // Sets offset to beginning of frame in buffer
void Free(); // Deletes frame buffer and resets members to zero
void SetTimeStamp(unsigned int timeStamp); // Sets timestamp of frame (90kHz)
/**
* Gets pointer to frame buffer
*/
unsigned char* GetBuffer() const;
/**
* Gets allocated buffer size
*/
unsigned int GetSize() const;
/**
* Gets length of frame
*/
unsigned int GetLength() const;
/**
* Gets timestamp of frame (90kHz)
*/
unsigned int GetTimeStamp() const;
unsigned int GetWidth() const;
unsigned int GetHeight() const;
void SetWidth(unsigned int width);
void SetHeight(unsigned int height);
private:
TestVideoBuffer& operator=(const TestVideoBuffer& inBuffer);
private:
void Set(unsigned char* buffer,unsigned int size,unsigned int length,unsigned int offset, unsigned int timeStamp);
unsigned int GetStartOffset() const;
unsigned char* _buffer; // Pointer to frame buffer
unsigned int _bufferSize; // Allocated buffer size
unsigned int _bufferLength; // Length (in bytes) of frame
unsigned int _startOffset; // Offset (in bytes) to beginning of frame in buffer
unsigned int _timeStamp; // Timestamp of frame (90kHz)
unsigned int _width;
unsigned int _height;
};
class TestVideoEncodedBuffer: public TestVideoBuffer
{
public:
TestVideoEncodedBuffer();
~TestVideoEncodedBuffer();
void SetCaptureWidth(unsigned short width);
void SetCaptureHeight(unsigned short height);
unsigned short GetCaptureWidth();
unsigned short GetCaptureHeight();
webrtc::VideoFrameType GetFrameType();
void SetFrameType(webrtc::VideoFrameType frametype);
void Reset();
void SetFrameRate(float frameRate);
float GetFrameRate();
private:
TestVideoEncodedBuffer& operator=(const TestVideoEncodedBuffer& inBuffer);
private:
unsigned short _captureWidth;
unsigned short _captureHeight;
webrtc::VideoFrameType _frameType;
float _frameRate;
};
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_BUFFER_H_

View File

@@ -0,0 +1,417 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_source.h"
#include "vplib.h"
#include <cassert>
#include <stdio.h>
VideoSource::VideoSource()
:
_fileName("../../../../testFiles/foreman.yuv"),
_width(352),
_height(288),
_type(webrtc::kI420),
_frameRate(30)
{
}
VideoSource::VideoSource(std::string fileName, VideoSize size,
int frameRate /*= 30*/, webrtc::VideoType type /*= webrtc::kI420*/)
:
_fileName(fileName),
_type(type),
_frameRate(frameRate)
{
assert(size != kUndefined && size != kNumberOfVideoSizes);
assert(type != webrtc::kUnknown);
assert(frameRate > 0);
assert(GetWidthHeight(size, _width, _height) == 0);
}
VideoSource::VideoSource(std::string fileName, int width, int height,
int frameRate /*= 30*/, webrtc::VideoType type /*= webrtc::kI420*/)
:
_fileName(fileName),
_width(width),
_height(height),
_type(type),
_frameRate(frameRate)
{
assert(width > 0);
assert(height > 0);
assert(type != webrtc::kUnknown);
assert(frameRate > 0);
}
VideoSize
VideoSource::GetSize() const
{
return GetSize(_width, _height);
}
VideoSize
VideoSource::GetSize(WebRtc_UWord16 width, WebRtc_UWord16 height)
{
if(width == 128 && height == 96)
{
return kSQCIF;
}else if(width == 160 && height == 120)
{
return kQQVGA;
}else if(width == 176 && height == 144)
{
return kQCIF;
}else if(width == 320 && height == 240)
{
return kQVGA;
}else if(width == 352 && height == 288)
{
return kCIF;
}else if(width == 640 && height == 480)
{
return kVGA;
}else if(width == 720 && height == 480)
{
return kNTSC;
}else if(width == 704 && height == 576)
{
return k4CIF;
}else if(width == 800 && height == 600)
{
return kSVGA;
}else if(width == 960 && height == 720)
{
return kHD;
}else if(width == 1024 && height == 768)
{
return kXGA;
}else if(width == 1440 && height == 1080)
{
return kFullHD;
}else if(width == 400 && height == 240)
{
return kWQVGA;
}else if(width == 800 && height == 480)
{
return kWVGA;
}else if(width == 1280 && height == 720)
{
return kWHD;
}else if(width == 1920 && height == 1080)
{
return kWFullHD;
}
return kUndefined;
}
unsigned int
VideoSource::GetFrameLength() const
{
return webrtc::CalcBufferSize(_type, _width, _height);
}
const char*
VideoSource::GetMySizeString() const
{
return VideoSource::GetSizeString(GetSize());
}
const char*
VideoSource::GetSizeString(VideoSize size)
{
switch (size)
{
case kSQCIF:
return "SQCIF";
case kQQVGA:
return "QQVGA";
case kQCIF:
return "QCIF";
case kQVGA:
return "QVGA";
case kCIF:
return "CIF";
case kVGA:
return "VGA";
case kNTSC:
return "NTSC";
case k4CIF:
return "4CIF";
case kSVGA:
return "SVGA";
case kHD:
return "HD";
case kXGA:
return "XGA";
case kFullHD:
return "Full_HD";
case kWQVGA:
return "WQVGA";
case kWHD:
return "WHD";
case kWFullHD:
return "WFull_HD";
default:
return "Undefined";
}
}
std::string
VideoSource::GetFilePath() const
{
size_t slashPos = _fileName.find_last_of("/\\");
if (slashPos == std::string::npos)
{
return ".";
}
return _fileName.substr(0, slashPos);
}
std::string
VideoSource::GetName() const
{
// Remove path.
size_t slashPos = _fileName.find_last_of("/\\");
if (slashPos == std::string::npos)
{
slashPos = 0;
}
else
{
slashPos++;
}
// Remove extension and underscored suffix if it exists.
return _fileName.substr(slashPos, std::min(_fileName.find_last_of("_"),
_fileName.find_last_of(".")) - slashPos);
}
void
VideoSource::Convert(const VideoSource &target, bool force /* = false */) const
{
// Ensure target rate is less than or equal to source
// (i.e. we are only temporally downsampling).
assert(target.GetFrameRate() <= _frameRate);
// Only supports YUV420 currently.
assert(_type == webrtc::kI420 && target.GetType() == webrtc::kI420);
if (!force && (FileExists(target.GetFileName().c_str()) ||
(target.GetWidth() == _width && target.GetHeight() == _height && target.GetFrameRate() == _frameRate)))
{
// Assume that the filename uniquely defines the content.
// If the file already exists, it is the correct file.
return;
}
FILE *inFile = NULL;
FILE *outFile = NULL;
inFile = fopen(_fileName.c_str(), "rb");
assert(inFile != NULL);
outFile = fopen(target.GetFileName().c_str(), "wb");
assert(outFile != NULL);
FrameDropper fd;
fd.SetFrameRate(target.GetFrameRate(), _frameRate);
const size_t lengthOutFrame = webrtc::CalcBufferSize(target.GetType(),
target.GetWidth(), target.GetHeight());
assert(lengthOutFrame > 0);
unsigned char *outFrame = new unsigned char[lengthOutFrame];
const size_t lengthInFrame = webrtc::CalcBufferSize(_type, _width, _height);
assert(lengthInFrame > 0);
unsigned char *inFrame = new unsigned char[lengthInFrame];
while (fread(inFrame, 1, lengthInFrame, inFile) == lengthInFrame)
{
if (!fd.DropFrame())
{
assert(target.GetWidth() == _width &&
target.GetHeight() == _height); // Add video interpolator here!
fwrite(outFrame, 1, lengthOutFrame, outFile);
}
}
delete inFrame;
delete outFrame;
fclose(inFile);
fclose(outFile);
}
bool VideoSource::FileExists(const char* fileName)
{
FILE* fp = NULL;
fp = fopen(fileName, "rb");
if(fp != NULL)
{
fclose(fp);
return true;
}
return false;
}
int
VideoSource::GetWidthHeight( VideoSize size, int & width, int& height)
{
switch(size)
{
case kSQCIF:
width = 128;
height = 96;
return 0;
case kQQVGA:
width = 160;
height = 120;
return 0;
case kQCIF:
width = 176;
height = 144;
return 0;
case kCGA:
width = 320;
height = 200;
return 0;
case kQVGA:
width = 320;
height = 240;
return 0;
case kSIF:
width = 352;
height = 240;
return 0;
case kWQVGA:
width = 400;
height = 240;
return 0;
case kCIF:
width = 352;
height = 288;
return 0;
case kW288p:
width = 512;
height = 288;
return 0;
case k448p:
width = 576;
height = 448;
return 0;
case kVGA:
width = 640;
height = 480;
return 0;
case k432p:
width = 720;
height = 432;
return 0;
case kW432p:
width = 768;
height = 432;
return 0;
case k4SIF:
width = 704;
height = 480;
return 0;
case kW448p:
width = 768;
height = 448;
return 0;
case kNTSC:
width = 720;
height = 480;
return 0;
case kFW448p:
width = 800;
height = 448;
return 0;
case kWVGA:
width = 800;
height = 480;
return 0;
case k4CIF:
width = 704;
height = 576;
return 0;
case kSVGA:
width = 800;
height = 600;
return 0;
case kW544p:
width = 960;
height = 544;
return 0;
case kW576p:
width = 1024;
height = 576;
return 0;
case kHD:
width = 960;
height = 720;
return 0;
case kXGA:
width = 1024;
height = 768;
return 0;
case kFullHD:
width = 1440;
height = 1080;
return 0;
case kWHD:
width = 1280;
height = 720;
return 0;
case kWFullHD:
width = 1920;
height = 1080;
return 0;
default:
return -1;
}
}
FrameDropper::FrameDropper()
:
_dropsBetweenRenders(0),
_frameCounter(0)
{
}
bool
FrameDropper::DropFrame()
{
_frameCounter++;
if (_frameCounter > _dropsBetweenRenders)
{
_frameCounter = 0;
return false;
}
return true;
}
unsigned int
FrameDropper::DropsBetweenRenders()
{
return _dropsBetweenRenders;
}
void
FrameDropper::SetFrameRate(double frameRate, double maxFrameRate)
{
if (frameRate >= 1.0)
{
_dropsBetweenRenders = static_cast<unsigned int>(maxFrameRate / frameRate + 0.5) - 1;
}
else
{
_dropsBetweenRenders = 0;
}
}

View File

@@ -0,0 +1,110 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_SOURCE_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_SOURCE_H_
#include <string>
#include "vplib.h"
enum VideoSize
{
kUndefined,
kSQCIF, // 128*96 = 12 288
kQQVGA, // 160*120 = 19 200
kQCIF, // 176*144 = 25 344
kCGA, // 320*200 = 64 000
kQVGA, // 320*240 = 76 800
kSIF, // 352*240 = 84 480
kWQVGA, // 400*240 = 96 000
kCIF, // 352*288 = 101 376
kW288p, // 512*288 = 147 456 (WCIF)
k448p, // 576*448 = 281 088
kVGA, // 640*480 = 307 200
k432p, // 720*432 = 311 040
kW432p, // 768*432 = 331 776
k4SIF, // 704*480 = 337 920
kW448p, // 768*448 = 344 064
kNTSC, // 720*480 = 345 600
kFW448p, // 800*448 = 358 400
kWVGA, // 800*480 = 384 000
k4CIF, // 704<30>576 = 405 504
kSVGA, // 800*600 = 480 000
kW544p, // 960*544 = 522 240
kW576p, // 1024*576 = 589 824 (W4CIF)
kHD, // 960*720 = 691 200
kXGA, // 1024*768 = 786 432
kWHD, // 1280*720 = 921 600
kFullHD, // 1440*1080 = 1 555 200
kWFullHD, // 1920*1080 = 2 073 600
kNumberOfVideoSizes
};
class VideoSource
{
public:
VideoSource();
VideoSource(std::string fileName, VideoSize size, int frameRate = 30,
webrtc::VideoType type = webrtc::kI420);
VideoSource(std::string fileName, int width, int height, int frameRate = 30,
webrtc::VideoType type = webrtc::kI420);
std::string GetFileName() const { return _fileName; }
int GetWidth() const { return _width; }
int GetHeight() const { return _height; }
webrtc::VideoType GetType() const { return _type; }
int GetFrameRate() const { return _frameRate; }
// Returns the file path without a trailing slash.
std::string GetFilePath() const;
// Returns the filename with the path (including the leading slash) removed.
std::string GetName() const;
VideoSize GetSize() const;
static VideoSize GetSize(WebRtc_UWord16 width, WebRtc_UWord16 height);
unsigned int GetFrameLength() const;
// Returns a human-readable size string.
static const char* GetSizeString(VideoSize size);
const char* GetMySizeString() const;
// Opens the video source, converting and writing to the specified target.
// If force is true, the conversion will be done even if the target file
// already exists.
void Convert(const VideoSource& target, bool force = false) const;
static bool FileExists(const char* fileName);
private:
static int GetWidthHeight( VideoSize size, int& width, int& height);
std::string _fileName;
int _width;
int _height;
webrtc::VideoType _type;
int _frameRate;
};
class FrameDropper
{
public:
FrameDropper();
bool DropFrame();
unsigned int DropsBetweenRenders();
void SetFrameRate(double frameRate, double maxFrameRate);
private:
unsigned int _dropsBetweenRenders;
unsigned int _frameCounter;
};
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_SOURCE_H_