perf tests: allow to skip performance tests

This commit is contained in:
Alexander Alekhin 2013-11-12 18:15:50 +04:00
parent 376993be4c
commit f1873bbca1
2 changed files with 37 additions and 13 deletions

View File

@ -243,6 +243,7 @@ typedef struct CV_EXPORTS performance_metrics
TERM_TIME = 1,
TERM_INTERRUPT = 2,
TERM_EXCEPTION = 3,
TERM_SKIP_TEST = 4, // there are some limitations and test should be skipped
TERM_UNKNOWN = -1
};
@ -279,6 +280,8 @@ public:
static enum PERF_STRATEGY getPerformanceStrategy();
static enum PERF_STRATEGY setPerformanceStrategy(enum PERF_STRATEGY strategy);
class PerfSkipTestException: public cv::Exception {};
protected:
virtual void PerfTestBody() = 0;

View File

@ -1175,7 +1175,14 @@ void TestBase::reportMetrics(bool toJUnitXML)
{
performance_metrics& m = calcMetrics();
if (toJUnitXML)
if (m.terminationReason == performance_metrics::TERM_SKIP_TEST)
{
if (toJUnitXML)
{
RecordProperty("custom_status", "skipped");
}
}
else if (toJUnitXML)
{
RecordProperty("bytesIn", (int)m.bytesIn);
RecordProperty("bytesOut", (int)m.bytesOut);
@ -1267,21 +1274,30 @@ void TestBase::SetUp()
void TestBase::TearDown()
{
if (!HasFailure() && !verified)
ADD_FAILURE() << "The test has no sanity checks. There should be at least one check at the end of performance test.";
validateMetrics();
if (HasFailure())
reportMetrics(false);
if (metrics.terminationReason == performance_metrics::TERM_SKIP_TEST)
{
LOGI("\tTest was skipped");
GTEST_SUCCEED() << "Test was skipped";
}
else
{
const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info();
const char* type_param = test_info->type_param();
const char* value_param = test_info->value_param();
if (value_param) printf("[ VALUE ] \t%s\n", value_param), fflush(stdout);
if (type_param) printf("[ TYPE ] \t%s\n", type_param), fflush(stdout);
reportMetrics(true);
if (!HasFailure() && !verified)
ADD_FAILURE() << "The test has no sanity checks. There should be at least one check at the end of performance test.";
validateMetrics();
if (HasFailure())
{
reportMetrics(false);
return;
}
}
const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info();
const char* type_param = test_info->type_param();
const char* value_param = test_info->value_param();
if (value_param) printf("[ VALUE ] \t%s\n", value_param), fflush(stdout);
if (type_param) printf("[ TYPE ] \t%s\n", type_param), fflush(stdout);
reportMetrics(true);
}
std::string TestBase::getDataPath(const std::string& relativePath)
@ -1331,6 +1347,11 @@ void TestBase::RunPerfTestBody()
{
this->PerfTestBody();
}
catch(PerfSkipTestException&)
{
metrics.terminationReason = performance_metrics::TERM_SKIP_TEST;
return;
}
catch(PerfEarlyExitException&)
{
metrics.terminationReason = performance_metrics::TERM_INTERRUPT;