Make SANITY_CHECK required for all performance tests

This commit is contained in:
Andrey Kamaev 2012-10-08 17:17:42 +04:00
parent 94b97b7a63
commit 6151a6ea0b
2 changed files with 489 additions and 479 deletions

View File

@ -1,478 +1,482 @@
#ifndef __OPENCV_TS_PERF_HPP__ #ifndef __OPENCV_TS_PERF_HPP__
#define __OPENCV_TS_PERF_HPP__ #define __OPENCV_TS_PERF_HPP__
#include "opencv2/core/core.hpp" #include "opencv2/core/core.hpp"
#include "ts_gtest.h" #include "ts_gtest.h"
#ifdef HAVE_TBB #ifdef HAVE_TBB
#include "tbb/task_scheduler_init.h" #include "tbb/task_scheduler_init.h"
#endif #endif
#if !(defined(LOGD) || defined(LOGI) || defined(LOGW) || defined(LOGE)) #if !(defined(LOGD) || defined(LOGI) || defined(LOGW) || defined(LOGE))
# if defined(ANDROID) && defined(USE_ANDROID_LOGGING) # if defined(ANDROID) && defined(USE_ANDROID_LOGGING)
# include <android/log.h> # include <android/log.h>
# define PERF_TESTS_LOG_TAG "OpenCV_perf" # define PERF_TESTS_LOG_TAG "OpenCV_perf"
# define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, PERF_TESTS_LOG_TAG, __VA_ARGS__)) # define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, PERF_TESTS_LOG_TAG, __VA_ARGS__))
# define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, PERF_TESTS_LOG_TAG, __VA_ARGS__)) # define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, PERF_TESTS_LOG_TAG, __VA_ARGS__))
# define LOGW(...) ((void)__android_log_print(ANDROID_LOG_WARN, PERF_TESTS_LOG_TAG, __VA_ARGS__)) # define LOGW(...) ((void)__android_log_print(ANDROID_LOG_WARN, PERF_TESTS_LOG_TAG, __VA_ARGS__))
# define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, PERF_TESTS_LOG_TAG, __VA_ARGS__)) # define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, PERF_TESTS_LOG_TAG, __VA_ARGS__))
# else # else
# define LOGD(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0) # define LOGD(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0)
# define LOGI(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0) # define LOGI(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0)
# define LOGW(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0) # define LOGW(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0)
# define LOGE(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0) # define LOGE(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0)
# endif # endif
#endif #endif
namespace perf namespace perf
{ {
class TestBase;
/*****************************************************************************************\
* Predefined typical frame sizes and typical test parameters * /*****************************************************************************************\
\*****************************************************************************************/ * Predefined typical frame sizes and typical test parameters *
const cv::Size szQVGA = cv::Size(320, 240); \*****************************************************************************************/
const cv::Size szVGA = cv::Size(640, 480); const cv::Size szQVGA = cv::Size(320, 240);
const cv::Size szSVGA = cv::Size(800, 600); const cv::Size szVGA = cv::Size(640, 480);
const cv::Size szXGA = cv::Size(1024, 768); const cv::Size szSVGA = cv::Size(800, 600);
const cv::Size szSXGA = cv::Size(1280, 1024); const cv::Size szXGA = cv::Size(1024, 768);
const cv::Size szWQHD = cv::Size(2560, 1440); const cv::Size szSXGA = cv::Size(1280, 1024);
const cv::Size szWQHD = cv::Size(2560, 1440);
const cv::Size sznHD = cv::Size(640, 360);
const cv::Size szqHD = cv::Size(960, 540); const cv::Size sznHD = cv::Size(640, 360);
const cv::Size sz240p = szQVGA; const cv::Size szqHD = cv::Size(960, 540);
const cv::Size sz720p = cv::Size(1280, 720); const cv::Size sz240p = szQVGA;
const cv::Size sz1080p = cv::Size(1920, 1080); const cv::Size sz720p = cv::Size(1280, 720);
const cv::Size sz1440p = szWQHD; const cv::Size sz1080p = cv::Size(1920, 1080);
const cv::Size sz2160p = cv::Size(3840, 2160);//UHDTV1 4K const cv::Size sz1440p = szWQHD;
const cv::Size sz4320p = cv::Size(7680, 4320);//UHDTV2 8K const cv::Size sz2160p = cv::Size(3840, 2160);//UHDTV1 4K
const cv::Size sz4320p = cv::Size(7680, 4320);//UHDTV2 8K
const cv::Size sz2K = cv::Size(2048, 2048);
const cv::Size sz2K = cv::Size(2048, 2048);
const cv::Size szODD = cv::Size(127, 61);
const cv::Size szODD = cv::Size(127, 61);
const cv::Size szSmall24 = cv::Size(24, 24);
const cv::Size szSmall32 = cv::Size(32, 32); const cv::Size szSmall24 = cv::Size(24, 24);
const cv::Size szSmall64 = cv::Size(64, 64); const cv::Size szSmall32 = cv::Size(32, 32);
const cv::Size szSmall128 = cv::Size(128, 128); const cv::Size szSmall64 = cv::Size(64, 64);
const cv::Size szSmall128 = cv::Size(128, 128);
#define SZ_ALL_VGA ::testing::Values(::perf::szQVGA, ::perf::szVGA, ::perf::szSVGA)
#define SZ_ALL_GA ::testing::Values(::perf::szQVGA, ::perf::szVGA, ::perf::szSVGA, ::perf::szXGA, ::perf::szSXGA) #define SZ_ALL_VGA ::testing::Values(::perf::szQVGA, ::perf::szVGA, ::perf::szSVGA)
#define SZ_ALL_HD ::testing::Values(::perf::sznHD, ::perf::szqHD, ::perf::sz720p, ::perf::sz1080p) #define SZ_ALL_GA ::testing::Values(::perf::szQVGA, ::perf::szVGA, ::perf::szSVGA, ::perf::szXGA, ::perf::szSXGA)
#define SZ_ALL_SMALL ::testing::Values(::perf::szSmall24, ::perf::szSmall32, ::perf::szSmall64, ::perf::szSmall128) #define SZ_ALL_HD ::testing::Values(::perf::sznHD, ::perf::szqHD, ::perf::sz720p, ::perf::sz1080p)
#define SZ_ALL ::testing::Values(::perf::szQVGA, ::perf::szVGA, ::perf::szSVGA, ::perf::szXGA, ::perf::szSXGA, ::perf::sznHD, ::perf::szqHD, ::perf::sz720p, ::perf::sz1080p) #define SZ_ALL_SMALL ::testing::Values(::perf::szSmall24, ::perf::szSmall32, ::perf::szSmall64, ::perf::szSmall128)
#define SZ_TYPICAL ::testing::Values(::perf::szVGA, ::perf::szqHD, ::perf::sz720p, ::perf::szODD) #define SZ_ALL ::testing::Values(::perf::szQVGA, ::perf::szVGA, ::perf::szSVGA, ::perf::szXGA, ::perf::szSXGA, ::perf::sznHD, ::perf::szqHD, ::perf::sz720p, ::perf::sz1080p)
#define SZ_TYPICAL ::testing::Values(::perf::szVGA, ::perf::szqHD, ::perf::sz720p, ::perf::szODD)
#define TYPICAL_MAT_SIZES ::perf::szVGA, ::perf::sz720p, ::perf::sz1080p, ::perf::szODD
#define TYPICAL_MAT_TYPES CV_8UC1, CV_8UC4, CV_32FC1 #define TYPICAL_MAT_SIZES ::perf::szVGA, ::perf::sz720p, ::perf::sz1080p, ::perf::szODD
#define TYPICAL_MATS testing::Combine( testing::Values( TYPICAL_MAT_SIZES ), testing::Values( TYPICAL_MAT_TYPES ) ) #define TYPICAL_MAT_TYPES CV_8UC1, CV_8UC4, CV_32FC1
#define TYPICAL_MATS_C1 testing::Combine( testing::Values( TYPICAL_MAT_SIZES ), testing::Values( CV_8UC1, CV_32FC1 ) ) #define TYPICAL_MATS testing::Combine( testing::Values( TYPICAL_MAT_SIZES ), testing::Values( TYPICAL_MAT_TYPES ) )
#define TYPICAL_MATS_C4 testing::Combine( testing::Values( TYPICAL_MAT_SIZES ), testing::Values( CV_8UC4 ) ) #define TYPICAL_MATS_C1 testing::Combine( testing::Values( TYPICAL_MAT_SIZES ), testing::Values( CV_8UC1, CV_32FC1 ) )
#define TYPICAL_MATS_C4 testing::Combine( testing::Values( TYPICAL_MAT_SIZES ), testing::Values( CV_8UC4 ) )
/*****************************************************************************************\
* MatType - printable wrapper over integer 'type' of Mat * /*****************************************************************************************\
\*****************************************************************************************/ * MatType - printable wrapper over integer 'type' of Mat *
class MatType \*****************************************************************************************/
{ class MatType
public: {
MatType(int val=0) : _type(val) {} public:
operator int() const {return _type;} MatType(int val=0) : _type(val) {}
operator int() const {return _type;}
private:
int _type; private:
}; int _type;
};
/*****************************************************************************************\
* CV_ENUM and CV_FLAGS - macro to create printable wrappers for defines and enums * /*****************************************************************************************\
\*****************************************************************************************/ * CV_ENUM and CV_FLAGS - macro to create printable wrappers for defines and enums *
\*****************************************************************************************/
#define CV_ENUM(class_name, ...) \
class CV_EXPORTS class_name {\ #define CV_ENUM(class_name, ...) \
public:\ class CV_EXPORTS class_name {\
class_name(int val = 0) : _val(val) {}\ public:\
operator int() const {return _val;}\ class_name(int val = 0) : _val(val) {}\
void PrintTo(std::ostream* os) const {\ operator int() const {return _val;}\
const int vals[] = {__VA_ARGS__};\ void PrintTo(std::ostream* os) const {\
const char* svals = #__VA_ARGS__;\ const int vals[] = {__VA_ARGS__};\
for(int i = 0, pos = 0; i < (int)(sizeof(vals)/sizeof(int)); ++i){\ const char* svals = #__VA_ARGS__;\
while(isspace(svals[pos]) || svals[pos] == ',') ++pos;\ for(int i = 0, pos = 0; i < (int)(sizeof(vals)/sizeof(int)); ++i){\
int start = pos;\ while(isspace(svals[pos]) || svals[pos] == ',') ++pos;\
while(!(isspace(svals[pos]) || svals[pos] == ',' || svals[pos] == 0)) ++pos;\ int start = pos;\
if (_val == vals[i]) {\ while(!(isspace(svals[pos]) || svals[pos] == ',' || svals[pos] == 0)) ++pos;\
*os << std::string(svals + start, svals + pos);\ if (_val == vals[i]) {\
return;\ *os << std::string(svals + start, svals + pos);\
}\ return;\
}\ }\
*os << "UNKNOWN";\ }\
}\ *os << "UNKNOWN";\
struct Container{\ }\
typedef class_name value_type;\ struct Container{\
Container(class_name* first, size_t len): _begin(first), _end(first+len){}\ typedef class_name value_type;\
const class_name* begin() const {return _begin;}\ Container(class_name* first, size_t len): _begin(first), _end(first+len){}\
const class_name* end() const {return _end;}\ const class_name* begin() const {return _begin;}\
private: class_name *_begin, *_end;\ const class_name* end() const {return _end;}\
};\ private: class_name *_begin, *_end;\
static Container all(){\ };\
static class_name vals[] = {__VA_ARGS__};\ static Container all(){\
return Container(vals, sizeof(vals)/sizeof(vals[0]));\ static class_name vals[] = {__VA_ARGS__};\
}\ return Container(vals, sizeof(vals)/sizeof(vals[0]));\
private: int _val;\ }\
};\ private: int _val;\
inline void PrintTo(const class_name& t, std::ostream* os) { t.PrintTo(os); } };\
inline void PrintTo(const class_name& t, std::ostream* os) { t.PrintTo(os); }
#define CV_FLAGS(class_name, ...) \
class CV_EXPORTS class_name {\ #define CV_FLAGS(class_name, ...) \
public:\ class CV_EXPORTS class_name {\
class_name(int val = 0) : _val(val) {}\ public:\
operator int() const {return _val;}\ class_name(int val = 0) : _val(val) {}\
void PrintTo(std::ostream* os) const {\ operator int() const {return _val;}\
const int vals[] = {__VA_ARGS__};\ void PrintTo(std::ostream* os) const {\
const char* svals = #__VA_ARGS__;\ const int vals[] = {__VA_ARGS__};\
int value = _val;\ const char* svals = #__VA_ARGS__;\
bool first = true;\ int value = _val;\
for(int i = 0, pos = 0; i < (int)(sizeof(vals)/sizeof(int)); ++i){\ bool first = true;\
while(isspace(svals[pos]) || svals[pos] == ',') ++pos;\ for(int i = 0, pos = 0; i < (int)(sizeof(vals)/sizeof(int)); ++i){\
int start = pos;\ while(isspace(svals[pos]) || svals[pos] == ',') ++pos;\
while(!(isspace(svals[pos]) || svals[pos] == ',' || svals[pos] == 0)) ++pos;\ int start = pos;\
if ((value & vals[i]) == vals[i]) {\ while(!(isspace(svals[pos]) || svals[pos] == ',' || svals[pos] == 0)) ++pos;\
value &= ~vals[i]; \ if ((value & vals[i]) == vals[i]) {\
if (first) first = false; else *os << "|"; \ value &= ~vals[i]; \
*os << std::string(svals + start, svals + pos);\ if (first) first = false; else *os << "|"; \
if (!value) return;\ *os << std::string(svals + start, svals + pos);\
}\ if (!value) return;\
}\ }\
if (first) *os << "UNKNOWN";\ }\
}\ if (first) *os << "UNKNOWN";\
private: int _val;\ }\
};\ private: int _val;\
inline void PrintTo(const class_name& t, std::ostream* os) { t.PrintTo(os); } };\
inline void PrintTo(const class_name& t, std::ostream* os) { t.PrintTo(os); }
CV_ENUM(MatDepth, CV_8U, CV_8S, CV_16U, CV_16S, CV_32S, CV_32F, CV_64F, CV_USRTYPE1)
CV_ENUM(MatDepth, CV_8U, CV_8S, CV_16U, CV_16S, CV_32S, CV_32F, CV_64F, CV_USRTYPE1)
/*****************************************************************************************\
* Regression control utility for performance testing * /*****************************************************************************************\
\*****************************************************************************************/ * Regression control utility for performance testing *
enum ERROR_TYPE \*****************************************************************************************/
{ enum ERROR_TYPE
ERROR_ABSOLUTE = 0, {
ERROR_RELATIVE = 1 ERROR_ABSOLUTE = 0,
}; ERROR_RELATIVE = 1
};
class CV_EXPORTS Regression
{ class CV_EXPORTS Regression
public: {
static Regression& add(const std::string& name, cv::InputArray array, double eps = DBL_EPSILON, ERROR_TYPE err = ERROR_ABSOLUTE); public:
static void Init(const std::string& testSuitName, const std::string& ext = ".xml"); static Regression& add(TestBase* test, const std::string& name, cv::InputArray array, double eps = DBL_EPSILON, ERROR_TYPE err = ERROR_ABSOLUTE);
static void Init(const std::string& testSuitName, const std::string& ext = ".xml");
Regression& operator() (const std::string& name, cv::InputArray array, double eps = DBL_EPSILON, ERROR_TYPE err = ERROR_ABSOLUTE);
Regression& operator() (const std::string& name, cv::InputArray array, double eps = DBL_EPSILON, ERROR_TYPE err = ERROR_ABSOLUTE);
private:
static Regression& instance(); private:
Regression(); static Regression& instance();
~Regression(); Regression();
~Regression();
Regression(const Regression&);
Regression& operator=(const Regression&); Regression(const Regression&);
Regression& operator=(const Regression&);
cv::RNG regRNG;//own random numbers generator to make collection and verification work identical
std::string storageInPath; cv::RNG regRNG;//own random numbers generator to make collection and verification work identical
std::string storageOutPath; std::string storageInPath;
cv::FileStorage storageIn; std::string storageOutPath;
cv::FileStorage storageOut; cv::FileStorage storageIn;
cv::FileNode rootIn; cv::FileStorage storageOut;
std::string currentTestNodeName; cv::FileNode rootIn;
cv::FileStorage& write(); std::string currentTestNodeName;
static std::string getCurrentTestNodeName(); cv::FileStorage& write();
static bool isVector(cv::InputArray a);
static double getElem(cv::Mat& m, int x, int y, int cn = 0); static std::string getCurrentTestNodeName();
static bool isVector(cv::InputArray a);
void init(const std::string& testSuitName, const std::string& ext); static double getElem(cv::Mat& m, int x, int y, int cn = 0);
void write(cv::InputArray array);
void write(cv::Mat m); void init(const std::string& testSuitName, const std::string& ext);
void verify(cv::FileNode node, cv::InputArray array, double eps, ERROR_TYPE err); void write(cv::InputArray array);
void verify(cv::FileNode node, cv::Mat actual, double eps, std::string argname, ERROR_TYPE err); void write(cv::Mat m);
}; void verify(cv::FileNode node, cv::InputArray array, double eps, ERROR_TYPE err);
void verify(cv::FileNode node, cv::Mat actual, double eps, std::string argname, ERROR_TYPE err);
#define SANITY_CHECK(array, ...) ::perf::Regression::add(#array, array , ## __VA_ARGS__) };
#define SANITY_CHECK(array, ...) ::perf::Regression::add(this, #array, array , ## __VA_ARGS__)
/*****************************************************************************************\
* Container for performance metrics *
\*****************************************************************************************/ /*****************************************************************************************\
typedef struct CV_EXPORTS performance_metrics * Container for performance metrics *
{ \*****************************************************************************************/
size_t bytesIn; typedef struct CV_EXPORTS performance_metrics
size_t bytesOut; {
unsigned int samples; size_t bytesIn;
unsigned int outliers; size_t bytesOut;
double gmean; unsigned int samples;
double gstddev;//stddev for log(time) unsigned int outliers;
double mean; double gmean;
double stddev; double gstddev;//stddev for log(time)
double median; double mean;
double min; double stddev;
double frequency; double median;
int terminationReason; double min;
double frequency;
enum int terminationReason;
{
TERM_ITERATIONS = 0, enum
TERM_TIME = 1, {
TERM_INTERRUPT = 2, TERM_ITERATIONS = 0,
TERM_EXCEPTION = 3, TERM_TIME = 1,
TERM_UNKNOWN = -1 TERM_INTERRUPT = 2,
}; TERM_EXCEPTION = 3,
TERM_UNKNOWN = -1
performance_metrics(); };
} performance_metrics;
performance_metrics();
} performance_metrics;
/*****************************************************************************************\
* Base fixture for performance tests *
\*****************************************************************************************/ /*****************************************************************************************\
class CV_EXPORTS TestBase: public ::testing::Test * Base fixture for performance tests *
{ \*****************************************************************************************/
public: class CV_EXPORTS TestBase: public ::testing::Test
TestBase(); {
public:
static void Init(int argc, const char* const argv[]); TestBase();
static std::string getDataPath(const std::string& relativePath);
static void Init(int argc, const char* const argv[]);
protected: static std::string getDataPath(const std::string& relativePath);
virtual void PerfTestBody() = 0;
protected:
virtual void SetUp(); virtual void PerfTestBody() = 0;
virtual void TearDown();
virtual void SetUp();
void startTimer(); virtual void TearDown();
void stopTimer();
bool next(); void startTimer();
void stopTimer();
//_declareHelper declare; bool next();
enum //_declareHelper declare;
{
WARMUP_READ, enum
WARMUP_WRITE, {
WARMUP_RNG, WARMUP_READ,
WARMUP_NONE WARMUP_WRITE,
}; WARMUP_RNG,
WARMUP_NONE
void reportMetrics(bool toJUnitXML = false); };
static void warmup(cv::InputOutputArray a, int wtype = WARMUP_READ);
void reportMetrics(bool toJUnitXML = false);
performance_metrics& calcMetrics(); static void warmup(cv::InputOutputArray a, int wtype = WARMUP_READ);
void RunPerfTestBody();
private: performance_metrics& calcMetrics();
typedef std::vector<std::pair<int, cv::Size> > SizeVector; void RunPerfTestBody();
typedef std::vector<int64> TimeVector; private:
typedef std::vector<std::pair<int, cv::Size> > SizeVector;
SizeVector inputData; typedef std::vector<int64> TimeVector;
SizeVector outputData;
unsigned int getTotalInputSize() const; SizeVector inputData;
unsigned int getTotalOutputSize() const; SizeVector outputData;
unsigned int getTotalInputSize() const;
TimeVector times; unsigned int getTotalOutputSize() const;
int64 lastTime;
int64 totalTime; TimeVector times;
int64 timeLimit; int64 lastTime;
static int64 timeLimitDefault; int64 totalTime;
static unsigned int iterationsLimitDefault; int64 timeLimit;
static int64 timeLimitDefault;
unsigned int nIters; static unsigned int iterationsLimitDefault;
unsigned int currentIter;
unsigned int runsPerIteration; unsigned int nIters;
unsigned int currentIter;
performance_metrics metrics; unsigned int runsPerIteration;
void validateMetrics();
performance_metrics metrics;
static int64 _timeadjustment; void validateMetrics();
static int64 _calibrate();
static int64 _timeadjustment;
static void warmup_impl(cv::Mat m, int wtype); static int64 _calibrate();
static int getSizeInBytes(cv::InputArray a);
static cv::Size getSize(cv::InputArray a); static void warmup_impl(cv::Mat m, int wtype);
static void declareArray(SizeVector& sizes, cv::InputOutputArray a, int wtype = 0); static int getSizeInBytes(cv::InputArray a);
static cv::Size getSize(cv::InputArray a);
class CV_EXPORTS _declareHelper static void declareArray(SizeVector& sizes, cv::InputOutputArray a, int wtype = 0);
{
public: class CV_EXPORTS _declareHelper
_declareHelper& in(cv::InputOutputArray a1, int wtype = WARMUP_READ); {
_declareHelper& in(cv::InputOutputArray a1, cv::InputOutputArray a2, int wtype = WARMUP_READ); public:
_declareHelper& in(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, int wtype = WARMUP_READ); _declareHelper& in(cv::InputOutputArray a1, int wtype = WARMUP_READ);
_declareHelper& in(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, cv::InputOutputArray a4, int wtype = WARMUP_READ); _declareHelper& in(cv::InputOutputArray a1, cv::InputOutputArray a2, int wtype = WARMUP_READ);
_declareHelper& in(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, int wtype = WARMUP_READ);
_declareHelper& out(cv::InputOutputArray a1, int wtype = WARMUP_WRITE); _declareHelper& in(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, cv::InputOutputArray a4, int wtype = WARMUP_READ);
_declareHelper& out(cv::InputOutputArray a1, cv::InputOutputArray a2, int wtype = WARMUP_WRITE);
_declareHelper& out(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, int wtype = WARMUP_WRITE); _declareHelper& out(cv::InputOutputArray a1, int wtype = WARMUP_WRITE);
_declareHelper& out(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, cv::InputOutputArray a4, int wtype = WARMUP_WRITE); _declareHelper& out(cv::InputOutputArray a1, cv::InputOutputArray a2, int wtype = WARMUP_WRITE);
_declareHelper& out(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, int wtype = WARMUP_WRITE);
_declareHelper& iterations(unsigned int n); _declareHelper& out(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, cv::InputOutputArray a4, int wtype = WARMUP_WRITE);
_declareHelper& time(double timeLimitSecs);
_declareHelper& tbb_threads(int n = -1); _declareHelper& iterations(unsigned int n);
_declareHelper& runs(unsigned int runsNumber); _declareHelper& time(double timeLimitSecs);
private: _declareHelper& tbb_threads(int n = -1);
TestBase* test; _declareHelper& runs(unsigned int runsNumber);
_declareHelper(TestBase* t); private:
_declareHelper(const _declareHelper&); TestBase* test;
_declareHelper& operator=(const _declareHelper&); _declareHelper(TestBase* t);
friend class TestBase; _declareHelper(const _declareHelper&);
}; _declareHelper& operator=(const _declareHelper&);
friend class _declareHelper; friend class TestBase;
};
#ifdef HAVE_TBB friend class _declareHelper;
cv::Ptr<tbb::task_scheduler_init> p_tbb_initializer; friend class Regression;
#else
cv::Ptr<int> fixme; #ifdef HAVE_TBB
#endif cv::Ptr<tbb::task_scheduler_init> p_tbb_initializer;
#else
public: cv::Ptr<int> fixme;
_declareHelper declare; #endif
}; bool verified;
template<typename T> class TestBaseWithParam: public TestBase, public ::testing::WithParamInterface<T> {}; public:
_declareHelper declare;
typedef std::tr1::tuple<cv::Size, MatType> Size_MatType_t; };
typedef TestBaseWithParam<Size_MatType_t> Size_MatType;
template<typename T> class TestBaseWithParam: public TestBase, public ::testing::WithParamInterface<T> {};
/*****************************************************************************************\
* Print functions for googletest * typedef std::tr1::tuple<cv::Size, MatType> Size_MatType_t;
\*****************************************************************************************/ typedef TestBaseWithParam<Size_MatType_t> Size_MatType;
CV_EXPORTS void PrintTo(const MatType& t, std::ostream* os);
/*****************************************************************************************\
} //namespace perf * Print functions for googletest *
\*****************************************************************************************/
namespace cv CV_EXPORTS void PrintTo(const MatType& t, std::ostream* os);
{
} //namespace perf
CV_EXPORTS void PrintTo(const Size& sz, ::std::ostream* os);
namespace cv
} //namespace cv {
CV_EXPORTS void PrintTo(const Size& sz, ::std::ostream* os);
/*****************************************************************************************\
* Macro definitions for performance tests * } //namespace cv
\*****************************************************************************************/
#define PERF_PROXY_NAMESPACE_NAME_(test_case_name, test_name) \
test_case_name##_##test_name##_perf_namespace_proxy /*****************************************************************************************\
* Macro definitions for performance tests *
// Defines a performance test. \*****************************************************************************************/
// #define PERF_PROXY_NAMESPACE_NAME_(test_case_name, test_name) \
// The first parameter is the name of the test case, and the second test_case_name##_##test_name##_perf_namespace_proxy
// parameter is the name of the test within the test case.
// // Defines a performance test.
// The user should put his test code between braces after using this //
// macro. Example: // The first parameter is the name of the test case, and the second
// // parameter is the name of the test within the test case.
// PERF_TEST(FooTest, InitializesCorrectly) { //
// Foo foo; // The user should put his test code between braces after using this
// EXPECT_TRUE(foo.StatusIsOK()); // macro. Example:
// } //
#define PERF_TEST(test_case_name, test_name)\ // PERF_TEST(FooTest, InitializesCorrectly) {
namespace PERF_PROXY_NAMESPACE_NAME_(test_case_name, test_name) {\ // Foo foo;
class TestBase {/*compile error for this class means that you are trying to use perf::TestBase as a fixture*/};\ // EXPECT_TRUE(foo.StatusIsOK());
class test_case_name : public ::perf::TestBase {\ // }
public:\ #define PERF_TEST(test_case_name, test_name)\
test_case_name() {}\ namespace PERF_PROXY_NAMESPACE_NAME_(test_case_name, test_name) {\
protected:\ class TestBase {/*compile error for this class means that you are trying to use perf::TestBase as a fixture*/};\
virtual void PerfTestBody();\ class test_case_name : public ::perf::TestBase {\
};\ public:\
TEST_F(test_case_name, test_name){ RunPerfTestBody(); }\ test_case_name() {}\
}\ protected:\
void PERF_PROXY_NAMESPACE_NAME_(test_case_name, test_name)::test_case_name::PerfTestBody() virtual void PerfTestBody();\
};\
// Defines a performance test that uses a test fixture. TEST_F(test_case_name, test_name){ RunPerfTestBody(); }\
// }\
// The first parameter is the name of the test fixture class, which void PERF_PROXY_NAMESPACE_NAME_(test_case_name, test_name)::test_case_name::PerfTestBody()
// also doubles as the test case name. The second parameter is the
// name of the test within the test case. // Defines a performance test that uses a test fixture.
// //
// A test fixture class must be declared earlier. The user should put // The first parameter is the name of the test fixture class, which
// his test code between braces after using this macro. Example: // also doubles as the test case name. The second parameter is the
// // name of the test within the test case.
// class FooTest : public ::perf::TestBase { //
// protected: // A test fixture class must be declared earlier. The user should put
// virtual void SetUp() { TestBase::SetUp(); b_.AddElement(3); } // his test code between braces after using this macro. Example:
// //
// Foo a_; // class FooTest : public ::perf::TestBase {
// Foo b_; // protected:
// }; // virtual void SetUp() { TestBase::SetUp(); b_.AddElement(3); }
// //
// PERF_TEST_F(FooTest, InitializesCorrectly) { // Foo a_;
// EXPECT_TRUE(a_.StatusIsOK()); // Foo b_;
// } // };
// //
// PERF_TEST_F(FooTest, ReturnsElementCountCorrectly) { // PERF_TEST_F(FooTest, InitializesCorrectly) {
// EXPECT_EQ(0, a_.size()); // EXPECT_TRUE(a_.StatusIsOK());
// EXPECT_EQ(1, b_.size()); // }
// } //
#define PERF_TEST_F(fixture, testname) \ // PERF_TEST_F(FooTest, ReturnsElementCountCorrectly) {
namespace PERF_PROXY_NAMESPACE_NAME_(fixture, testname) {\ // EXPECT_EQ(0, a_.size());
class TestBase {/*compile error for this class means that you are trying to use perf::TestBase as a fixture*/};\ // EXPECT_EQ(1, b_.size());
class fixture : public ::fixture {\ // }
public:\ #define PERF_TEST_F(fixture, testname) \
fixture() {}\ namespace PERF_PROXY_NAMESPACE_NAME_(fixture, testname) {\
protected:\ class TestBase {/*compile error for this class means that you are trying to use perf::TestBase as a fixture*/};\
virtual void PerfTestBody();\ class fixture : public ::fixture {\
};\ public:\
TEST_F(fixture, testname){ RunPerfTestBody(); }\ fixture() {}\
}\ protected:\
void PERF_PROXY_NAMESPACE_NAME_(fixture, testname)::fixture::PerfTestBody() virtual void PerfTestBody();\
};\
// Defines a parametrized performance test. TEST_F(fixture, testname){ RunPerfTestBody(); }\
// }\
// The first parameter is the name of the test fixture class, which void PERF_PROXY_NAMESPACE_NAME_(fixture, testname)::fixture::PerfTestBody()
// also doubles as the test case name. The second parameter is the
// name of the test within the test case. // Defines a parametrized performance test.
// //
// The user should put his test code between braces after using this // The first parameter is the name of the test fixture class, which
// macro. Example: // also doubles as the test case name. The second parameter is the
// // name of the test within the test case.
// typedef ::perf::TestBaseWithParam<cv::Size> FooTest; //
// // The user should put his test code between braces after using this
// PERF_TEST_P(FooTest, DoTestingRight, ::testing::Values(::perf::szVGA, ::perf::sz720p) { // macro. Example:
// cv::Mat b(GetParam(), CV_8U, cv::Scalar(10)); //
// cv::Mat a(GetParam(), CV_8U, cv::Scalar(20)); // typedef ::perf::TestBaseWithParam<cv::Size> FooTest;
// cv::Mat c(GetParam(), CV_8U, cv::Scalar(0)); //
// // PERF_TEST_P(FooTest, DoTestingRight, ::testing::Values(::perf::szVGA, ::perf::sz720p) {
// declare.in(a, b).out(c).time(0.5); // cv::Mat b(GetParam(), CV_8U, cv::Scalar(10));
// // cv::Mat a(GetParam(), CV_8U, cv::Scalar(20));
// TEST_CYCLE() cv::add(a, b, c); // cv::Mat c(GetParam(), CV_8U, cv::Scalar(0));
// //
// SANITY_CHECK(c); // declare.in(a, b).out(c).time(0.5);
// } //
#define PERF_TEST_P(fixture, name, params) \ // TEST_CYCLE() cv::add(a, b, c);
class fixture##_##name : public fixture {\ //
public:\ // SANITY_CHECK(c);
fixture##_##name() {}\ // }
protected:\ #define PERF_TEST_P(fixture, name, params) \
virtual void PerfTestBody();\ class fixture##_##name : public fixture {\
};\ public:\
TEST_P(fixture##_##name, name /*perf*/){ RunPerfTestBody(); }\ fixture##_##name() {}\
INSTANTIATE_TEST_CASE_P(/*none*/, fixture##_##name, params);\ protected:\
void fixture##_##name::PerfTestBody() virtual void PerfTestBody();\
};\
TEST_P(fixture##_##name, name /*perf*/){ RunPerfTestBody(); }\
#define CV_PERF_TEST_MAIN(testsuitname) \ INSTANTIATE_TEST_CASE_P(/*none*/, fixture##_##name, params);\
int main(int argc, char **argv)\ void fixture##_##name::PerfTestBody()
{\
::perf::Regression::Init(#testsuitname);\
::perf::TestBase::Init(argc, argv);\ #define CV_PERF_TEST_MAIN(testsuitname) \
::testing::InitGoogleTest(&argc, argv);\ int main(int argc, char **argv)\
return RUN_ALL_TESTS();\ {\
} ::perf::Regression::Init(#testsuitname);\
::perf::TestBase::Init(argc, argv);\
#define TEST_CYCLE_N(n) for(declare.iterations(n); startTimer(), next(); stopTimer()) ::testing::InitGoogleTest(&argc, argv);\
#define TEST_CYCLE() for(; startTimer(), next(); stopTimer()) return RUN_ALL_TESTS();\
#define TEST_CYCLE_MULTIRUN(runsNum) for(declare.runs(runsNum); startTimer(), next(); stopTimer()) for(int r = 0; r < runsNum; ++r) }
//flags #define TEST_CYCLE_N(n) for(declare.iterations(n); startTimer(), next(); stopTimer())
namespace perf #define TEST_CYCLE() for(; startTimer(), next(); stopTimer())
{ #define TEST_CYCLE_MULTIRUN(runsNum) for(declare.runs(runsNum); startTimer(), next(); stopTimer()) for(int r = 0; r < runsNum; ++r)
//GTEST_DECLARE_int32_(allowed_outliers);
} //namespace perf //flags
namespace perf
#endif //__OPENCV_TS_PERF_HPP__ {
//GTEST_DECLARE_int32_(allowed_outliers);
} //namespace perf
#endif //__OPENCV_TS_PERF_HPP__

View File

@ -97,8 +97,9 @@ Regression& Regression::instance()
return single; return single;
} }
Regression& Regression::add(const std::string& name, cv::InputArray array, double eps, ERROR_TYPE err) Regression& Regression::add(TestBase* test, const std::string& name, cv::InputArray array, double eps, ERROR_TYPE err)
{ {
if(test) test->verified = true;
return instance()(name, array, eps, err); return instance()(name, array, eps, err);
} }
@ -493,6 +494,7 @@ Regression& Regression::operator() (const std::string& name, cv::InputArray arra
else else
verify(this_arg, array, eps, err); verify(this_arg, array, eps, err);
} }
return *this; return *this;
} }
@ -914,6 +916,7 @@ void TestBase::SetUp()
if (param_affinity_mask) if (param_affinity_mask)
setCurrentThreadAffinityMask(param_affinity_mask); setCurrentThreadAffinityMask(param_affinity_mask);
#endif #endif
verified = false;
lastTime = 0; lastTime = 0;
totalTime = 0; totalTime = 0;
runsPerIteration = 1; runsPerIteration = 1;
@ -926,6 +929,9 @@ void TestBase::SetUp()
void TestBase::TearDown() void TestBase::TearDown()
{ {
if (!HasFailure() && !verified)
ADD_FAILURE() << "The test has no sanity checks. There should be at least one check at the end of performance test.";
validateMetrics(); validateMetrics();
if (HasFailure()) if (HasFailure())
reportMetrics(false); reportMetrics(false);