Perf tests: fixed some issues
This commit is contained in:
parent
6ceb322a47
commit
ccfb3e6a21
@ -4,24 +4,33 @@ using namespace std;
|
|||||||
using namespace cv;
|
using namespace cv;
|
||||||
using namespace perf;
|
using namespace perf;
|
||||||
|
|
||||||
typedef std::tr1::tuple<const char*, int> ImageName_MinSize_t;
|
typedef std::tr1::tuple<std::string, int> ImageName_MinSize_t;
|
||||||
typedef perf::TestBaseWithParam<ImageName_MinSize_t> ImageName_MinSize;
|
typedef perf::TestBaseWithParam<ImageName_MinSize_t> ImageName_MinSize;
|
||||||
|
|
||||||
PERF_TEST_P( ImageName_MinSize, CascadeClassifierLBPFrontalFace, testing::Values( ImageName_MinSize_t("cv/shared/lena.jpg", 10) ) )
|
PERF_TEST_P( ImageName_MinSize, CascadeClassifierLBPFrontalFace, testing::Values( ImageName_MinSize_t("cv/shared/lena.jpg", 10) ) )
|
||||||
{
|
{
|
||||||
const char* filename = std::tr1::get<0>(GetParam());
|
const string filename = std::tr1::get<0>(GetParam());
|
||||||
int min_size = std::tr1::get<1>(GetParam());
|
int min_size = std::tr1::get<1>(GetParam());
|
||||||
Size minSize(min_size, min_size);
|
Size minSize(min_size, min_size);
|
||||||
|
|
||||||
CascadeClassifier cc(getDataPath("cv/cascadeandhog/cascades/lbpcascade_frontalface.xml"));
|
CascadeClassifier cc(getDataPath("cv/cascadeandhog/cascades/lbpcascade_frontalface.xml"));
|
||||||
|
if (cc.empty())
|
||||||
|
FAIL() << "Can't load cascade file";
|
||||||
|
|
||||||
Mat img=imread(getDataPath(filename));
|
Mat img=imread(getDataPath(filename));
|
||||||
|
if (img.empty())
|
||||||
|
FAIL() << "Can't load source image";
|
||||||
|
|
||||||
vector<Rect> res;
|
vector<Rect> res;
|
||||||
|
|
||||||
declare.in(img).time(10000);
|
declare.in(img);//.out(res)
|
||||||
TEST_CYCLE(100)
|
|
||||||
|
while(next())
|
||||||
{
|
{
|
||||||
res.clear();
|
res.clear();
|
||||||
|
|
||||||
|
startTimer();
|
||||||
cc.detectMultiScale(img, res, 1.1, 3, 0, minSize);
|
cc.detectMultiScale(img, res, 1.1, 3, 0, minSize);
|
||||||
|
stopTimer();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -93,8 +93,6 @@ private: int _val;\
|
|||||||
};\
|
};\
|
||||||
inline void PrintTo(const class_name& t, std::ostream* os) { t.PrintTo(os); }
|
inline void PrintTo(const class_name& t, std::ostream* os) { t.PrintTo(os); }
|
||||||
|
|
||||||
CV_ENUM(MatDepth, CV_8U, CV_8S, CV_16U, CV_16S, CV_32S, CV_32F, CV_64F, CV_USRTYPE1)
|
|
||||||
|
|
||||||
#define CV_FLAGS(class_name, ...) \
|
#define CV_FLAGS(class_name, ...) \
|
||||||
class CV_EXPORTS class_name {\
|
class CV_EXPORTS class_name {\
|
||||||
public:\
|
public:\
|
||||||
@ -122,6 +120,8 @@ private: int _val;\
|
|||||||
};\
|
};\
|
||||||
inline void PrintTo(const class_name& t, std::ostream* os) { t.PrintTo(os); }
|
inline void PrintTo(const class_name& t, std::ostream* os) { t.PrintTo(os); }
|
||||||
|
|
||||||
|
CV_ENUM(MatDepth, CV_8U, CV_8S, CV_16U, CV_16S, CV_32S, CV_32F, CV_64F, CV_USRTYPE1)
|
||||||
|
|
||||||
/*****************************************************************************************\
|
/*****************************************************************************************\
|
||||||
* Regression control utility for performance testing *
|
* Regression control utility for performance testing *
|
||||||
\*****************************************************************************************/
|
\*****************************************************************************************/
|
||||||
@ -186,7 +186,8 @@ typedef struct CV_EXPORTS performance_metrics
|
|||||||
{
|
{
|
||||||
TERM_ITERATIONS = 0,
|
TERM_ITERATIONS = 0,
|
||||||
TERM_TIME = 1,
|
TERM_TIME = 1,
|
||||||
TERM_UNKNOWN = 2
|
TERM_INTERRUPT = 2,
|
||||||
|
TERM_UNKNOWN = -1
|
||||||
};
|
};
|
||||||
|
|
||||||
performance_metrics();
|
performance_metrics();
|
||||||
@ -224,10 +225,12 @@ protected:
|
|||||||
WARMUP_RNG,
|
WARMUP_RNG,
|
||||||
WARMUP_NONE
|
WARMUP_NONE
|
||||||
};
|
};
|
||||||
|
|
||||||
|
void reportMetrics(bool toJUnitXML = false);
|
||||||
static void warmup(cv::InputOutputArray a, int wtype = WARMUP_READ);
|
static void warmup(cv::InputOutputArray a, int wtype = WARMUP_READ);
|
||||||
|
|
||||||
performance_metrics& calcMetrics();
|
performance_metrics& calcMetrics();
|
||||||
void reportMetrics(bool toJUnitXML = false);
|
void RunPerfTestBody();
|
||||||
private:
|
private:
|
||||||
typedef std::vector<std::pair<int, cv::Size> > SizeVector;
|
typedef std::vector<std::pair<int, cv::Size> > SizeVector;
|
||||||
typedef std::vector<int64> TimeVector;
|
typedef std::vector<int64> TimeVector;
|
||||||
@ -332,12 +335,7 @@ CV_EXPORTS void PrintTo(const Size& sz, ::std::ostream* os);
|
|||||||
protected:\
|
protected:\
|
||||||
virtual void PerfTestBody();\
|
virtual void PerfTestBody();\
|
||||||
};\
|
};\
|
||||||
TEST_F(test_case_name, test_name){\
|
TEST_F(test_case_name, test_name){ RunPerfTestBody(); }\
|
||||||
try {\
|
|
||||||
PerfTestBody();\
|
|
||||||
}catch(cv::Exception e) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws:\n " << e.what(); }\
|
|
||||||
catch(...) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws."; }\
|
|
||||||
}\
|
|
||||||
}\
|
}\
|
||||||
void PERF_PROXY_NAMESPACE_NAME_(test_case_name, test_name)::test_case_name::PerfTestBody()
|
void PERF_PROXY_NAMESPACE_NAME_(test_case_name, test_name)::test_case_name::PerfTestBody()
|
||||||
|
|
||||||
@ -375,12 +373,7 @@ CV_EXPORTS void PrintTo(const Size& sz, ::std::ostream* os);
|
|||||||
protected:\
|
protected:\
|
||||||
virtual void PerfTestBody();\
|
virtual void PerfTestBody();\
|
||||||
};\
|
};\
|
||||||
TEST_F(fixture, testname){\
|
TEST_F(fixture, testname){ RunPerfTestBody(); }\
|
||||||
try {\
|
|
||||||
PerfTestBody();\
|
|
||||||
}catch(cv::Exception e) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws:\n " << e.what(); }\
|
|
||||||
catch(...) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws."; }\
|
|
||||||
}\
|
|
||||||
}\
|
}\
|
||||||
void PERF_PROXY_NAMESPACE_NAME_(fixture, testname)::fixture::PerfTestBody()
|
void PERF_PROXY_NAMESPACE_NAME_(fixture, testname)::fixture::PerfTestBody()
|
||||||
|
|
||||||
@ -413,12 +406,7 @@ CV_EXPORTS void PrintTo(const Size& sz, ::std::ostream* os);
|
|||||||
protected:\
|
protected:\
|
||||||
virtual void PerfTestBody();\
|
virtual void PerfTestBody();\
|
||||||
};\
|
};\
|
||||||
TEST_P(fixture##_##name, name /*perf*/){\
|
TEST_P(fixture##_##name, name /*perf*/){ RunPerfTestBody(); }\
|
||||||
try {\
|
|
||||||
PerfTestBody();\
|
|
||||||
}catch(cv::Exception e) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws:\n " << e.what(); }\
|
|
||||||
catch(...) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws."; }\
|
|
||||||
}\
|
|
||||||
INSTANTIATE_TEST_CASE_P(/*none*/, fixture##_##name, params);\
|
INSTANTIATE_TEST_CASE_P(/*none*/, fixture##_##name, params);\
|
||||||
void fixture##_##name::PerfTestBody()
|
void fixture##_##name::PerfTestBody()
|
||||||
|
|
||||||
|
@ -27,6 +27,11 @@ void randu(cv::Mat& m)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*****************************************************************************************\
|
||||||
|
* inner exception class for early termination
|
||||||
|
\*****************************************************************************************/
|
||||||
|
|
||||||
|
class PerfEarlyExitException: public cv::Exception {};
|
||||||
|
|
||||||
/*****************************************************************************************\
|
/*****************************************************************************************\
|
||||||
* ::perf::Regression
|
* ::perf::Regression
|
||||||
@ -381,9 +386,9 @@ const char *command_line_keys =
|
|||||||
"{!!bugbugbugbug!! |perf_min_samples |10 |minimal required numer of samples}"
|
"{!!bugbugbugbug!! |perf_min_samples |10 |minimal required numer of samples}"
|
||||||
"{!!bugbugbugbug!! |perf_seed |809564 |seed for random numbers generator}"
|
"{!!bugbugbugbug!! |perf_seed |809564 |seed for random numbers generator}"
|
||||||
#if ANDROID
|
#if ANDROID
|
||||||
"{!!bugbugbugbug!! |perf_time_limit |2.0 |default time limit for a single test (in seconds)}"
|
"{!!bugbugbugbug!! |perf_time_limit |6.0 |default time limit for a single test (in seconds)}"
|
||||||
#else
|
#else
|
||||||
"{!!bugbugbugbug!! |perf_time_limit |1.0 |default time limit for a single test (in seconds)}"
|
"{!!bugbugbugbug!! |perf_time_limit |3.0 |default time limit for a single test (in seconds)}"
|
||||||
#endif
|
#endif
|
||||||
"{!!bugbugbugbug!! |perf_max_deviation |1.0 |}"
|
"{!!bugbugbugbug!! |perf_max_deviation |1.0 |}"
|
||||||
"{h |help |false |}"
|
"{h |help |false |}"
|
||||||
@ -411,8 +416,6 @@ void TestBase::Init(int argc, const char* const argv[])
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
//LOGD("!!!!!!!!!!!! %f !!!!!!", param_time_limit);
|
|
||||||
|
|
||||||
timeLimitDefault = param_time_limit == 0.0 ? 1 : (int64)(param_time_limit * cv::getTickFrequency());
|
timeLimitDefault = param_time_limit == 0.0 ? 1 : (int64)(param_time_limit * cv::getTickFrequency());
|
||||||
_timeadjustment = _calibrate();
|
_timeadjustment = _calibrate();
|
||||||
}
|
}
|
||||||
@ -567,12 +570,15 @@ performance_metrics& TestBase::calcMetrics()
|
|||||||
metrics.samples = (unsigned int)times.size();
|
metrics.samples = (unsigned int)times.size();
|
||||||
metrics.outliers = 0;
|
metrics.outliers = 0;
|
||||||
|
|
||||||
|
if (metrics.terminationReason != performance_metrics::TERM_INTERRUPT)
|
||||||
|
{
|
||||||
if (currentIter == nIters)
|
if (currentIter == nIters)
|
||||||
metrics.terminationReason = performance_metrics::TERM_ITERATIONS;
|
metrics.terminationReason = performance_metrics::TERM_ITERATIONS;
|
||||||
else if (totalTime >= timeLimit)
|
else if (totalTime >= timeLimit)
|
||||||
metrics.terminationReason = performance_metrics::TERM_TIME;
|
metrics.terminationReason = performance_metrics::TERM_TIME;
|
||||||
else
|
else
|
||||||
metrics.terminationReason = performance_metrics::TERM_UNKNOWN;
|
metrics.terminationReason = performance_metrics::TERM_UNKNOWN;
|
||||||
|
}
|
||||||
|
|
||||||
std::sort(times.begin(), times.end());
|
std::sort(times.begin(), times.end());
|
||||||
|
|
||||||
@ -697,7 +703,7 @@ void TestBase::reportMetrics(bool toJUnitXML)
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (type_param) LOGD("type = %11s", type_param);
|
if (type_param) LOGD("type = %11s", type_param);
|
||||||
if (value_param) LOGD("param = %11s", value_param);
|
if (value_param) LOGD("params = %11s", value_param);
|
||||||
|
|
||||||
switch (m.terminationReason)
|
switch (m.terminationReason)
|
||||||
{
|
{
|
||||||
@ -707,6 +713,9 @@ void TestBase::reportMetrics(bool toJUnitXML)
|
|||||||
case performance_metrics::TERM_TIME:
|
case performance_metrics::TERM_TIME:
|
||||||
LOGD("termination reason: reached time limit");
|
LOGD("termination reason: reached time limit");
|
||||||
break;
|
break;
|
||||||
|
case performance_metrics::TERM_INTERRUPT:
|
||||||
|
LOGD("termination reason: aborted by the performance testing framework");
|
||||||
|
break;
|
||||||
case performance_metrics::TERM_UNKNOWN:
|
case performance_metrics::TERM_UNKNOWN:
|
||||||
default:
|
default:
|
||||||
LOGD("termination reason: unknown");
|
LOGD("termination reason: unknown");
|
||||||
@ -721,6 +730,8 @@ void TestBase::reportMetrics(bool toJUnitXML)
|
|||||||
LOGD("samples =%11u of %u", m.samples, nIters);
|
LOGD("samples =%11u of %u", m.samples, nIters);
|
||||||
LOGD("outliers =%11u", m.outliers);
|
LOGD("outliers =%11u", m.outliers);
|
||||||
LOGD("frequency =%11.0f", m.frequency);
|
LOGD("frequency =%11.0f", m.frequency);
|
||||||
|
if (m.samples > 0)
|
||||||
|
{
|
||||||
LOGD("min =%11.0f = %.2fms", m.min, m.min * 1e3 / m.frequency);
|
LOGD("min =%11.0f = %.2fms", m.min, m.min * 1e3 / m.frequency);
|
||||||
LOGD("median =%11.0f = %.2fms", m.median, m.median * 1e3 / m.frequency);
|
LOGD("median =%11.0f = %.2fms", m.median, m.median * 1e3 / m.frequency);
|
||||||
LOGD("gmean =%11.0f = %.2fms", m.gmean, m.gmean * 1e3 / m.frequency);
|
LOGD("gmean =%11.0f = %.2fms", m.gmean, m.gmean * 1e3 / m.frequency);
|
||||||
@ -729,6 +740,7 @@ void TestBase::reportMetrics(bool toJUnitXML)
|
|||||||
LOGD("stddev =%11.0f = %.2fms", m.stddev, m.stddev * 1e3 / m.frequency);
|
LOGD("stddev =%11.0f = %.2fms", m.stddev, m.stddev * 1e3 / m.frequency);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void TestBase::SetUp()
|
void TestBase::SetUp()
|
||||||
{
|
{
|
||||||
@ -762,7 +774,7 @@ std::string TestBase::getDataPath(const std::string& relativePath)
|
|||||||
if (relativePath.empty())
|
if (relativePath.empty())
|
||||||
{
|
{
|
||||||
ADD_FAILURE() << " Bad path to test resource";
|
ADD_FAILURE() << " Bad path to test resource";
|
||||||
return std::string();
|
throw PerfEarlyExitException();
|
||||||
}
|
}
|
||||||
|
|
||||||
const char *data_path_dir = getenv("OPENCV_TEST_DATA_PATH");
|
const char *data_path_dir = getenv("OPENCV_TEST_DATA_PATH");
|
||||||
@ -791,10 +803,34 @@ std::string TestBase::getDataPath(const std::string& relativePath)
|
|||||||
if (fp)
|
if (fp)
|
||||||
fclose(fp);
|
fclose(fp);
|
||||||
else
|
else
|
||||||
|
{
|
||||||
ADD_FAILURE() << " Requested file \"" << path << "\" does not exist.";
|
ADD_FAILURE() << " Requested file \"" << path << "\" does not exist.";
|
||||||
|
throw PerfEarlyExitException();
|
||||||
|
}
|
||||||
return path;
|
return path;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void TestBase::RunPerfTestBody()
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
this->PerfTestBody();
|
||||||
|
}
|
||||||
|
catch(PerfEarlyExitException)
|
||||||
|
{
|
||||||
|
metrics.terminationReason = performance_metrics::TERM_INTERRUPT;
|
||||||
|
return;//no additional failure logging
|
||||||
|
}
|
||||||
|
catch(cv::Exception e)
|
||||||
|
{
|
||||||
|
FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws:\n " << e.what();
|
||||||
|
}
|
||||||
|
catch(...)
|
||||||
|
{
|
||||||
|
FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws.";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/*****************************************************************************************\
|
/*****************************************************************************************\
|
||||||
* ::perf::TestBase::_declareHelper
|
* ::perf::TestBase::_declareHelper
|
||||||
\*****************************************************************************************/
|
\*****************************************************************************************/
|
||||||
|
Loading…
x
Reference in New Issue
Block a user