perf tests: allow to skip performance tests
This commit is contained in:
parent
376993be4c
commit
f1873bbca1
@ -243,6 +243,7 @@ typedef struct CV_EXPORTS performance_metrics
|
|||||||
TERM_TIME = 1,
|
TERM_TIME = 1,
|
||||||
TERM_INTERRUPT = 2,
|
TERM_INTERRUPT = 2,
|
||||||
TERM_EXCEPTION = 3,
|
TERM_EXCEPTION = 3,
|
||||||
|
TERM_SKIP_TEST = 4, // there are some limitations and test should be skipped
|
||||||
TERM_UNKNOWN = -1
|
TERM_UNKNOWN = -1
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -279,6 +280,8 @@ public:
|
|||||||
static enum PERF_STRATEGY getPerformanceStrategy();
|
static enum PERF_STRATEGY getPerformanceStrategy();
|
||||||
static enum PERF_STRATEGY setPerformanceStrategy(enum PERF_STRATEGY strategy);
|
static enum PERF_STRATEGY setPerformanceStrategy(enum PERF_STRATEGY strategy);
|
||||||
|
|
||||||
|
class PerfSkipTestException: public cv::Exception {};
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
virtual void PerfTestBody() = 0;
|
virtual void PerfTestBody() = 0;
|
||||||
|
|
||||||
|
@ -1175,7 +1175,14 @@ void TestBase::reportMetrics(bool toJUnitXML)
|
|||||||
{
|
{
|
||||||
performance_metrics& m = calcMetrics();
|
performance_metrics& m = calcMetrics();
|
||||||
|
|
||||||
|
if (m.terminationReason == performance_metrics::TERM_SKIP_TEST)
|
||||||
|
{
|
||||||
if (toJUnitXML)
|
if (toJUnitXML)
|
||||||
|
{
|
||||||
|
RecordProperty("custom_status", "skipped");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (toJUnitXML)
|
||||||
{
|
{
|
||||||
RecordProperty("bytesIn", (int)m.bytesIn);
|
RecordProperty("bytesIn", (int)m.bytesIn);
|
||||||
RecordProperty("bytesOut", (int)m.bytesOut);
|
RecordProperty("bytesOut", (int)m.bytesOut);
|
||||||
@ -1267,21 +1274,30 @@ void TestBase::SetUp()
|
|||||||
|
|
||||||
void TestBase::TearDown()
|
void TestBase::TearDown()
|
||||||
{
|
{
|
||||||
|
if (metrics.terminationReason == performance_metrics::TERM_SKIP_TEST)
|
||||||
|
{
|
||||||
|
LOGI("\tTest was skipped");
|
||||||
|
GTEST_SUCCEED() << "Test was skipped";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
if (!HasFailure() && !verified)
|
if (!HasFailure() && !verified)
|
||||||
ADD_FAILURE() << "The test has no sanity checks. There should be at least one check at the end of performance test.";
|
ADD_FAILURE() << "The test has no sanity checks. There should be at least one check at the end of performance test.";
|
||||||
|
|
||||||
validateMetrics();
|
validateMetrics();
|
||||||
if (HasFailure())
|
if (HasFailure())
|
||||||
reportMetrics(false);
|
|
||||||
else
|
|
||||||
{
|
{
|
||||||
|
reportMetrics(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info();
|
const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info();
|
||||||
const char* type_param = test_info->type_param();
|
const char* type_param = test_info->type_param();
|
||||||
const char* value_param = test_info->value_param();
|
const char* value_param = test_info->value_param();
|
||||||
if (value_param) printf("[ VALUE ] \t%s\n", value_param), fflush(stdout);
|
if (value_param) printf("[ VALUE ] \t%s\n", value_param), fflush(stdout);
|
||||||
if (type_param) printf("[ TYPE ] \t%s\n", type_param), fflush(stdout);
|
if (type_param) printf("[ TYPE ] \t%s\n", type_param), fflush(stdout);
|
||||||
reportMetrics(true);
|
reportMetrics(true);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string TestBase::getDataPath(const std::string& relativePath)
|
std::string TestBase::getDataPath(const std::string& relativePath)
|
||||||
@ -1331,6 +1347,11 @@ void TestBase::RunPerfTestBody()
|
|||||||
{
|
{
|
||||||
this->PerfTestBody();
|
this->PerfTestBody();
|
||||||
}
|
}
|
||||||
|
catch(PerfSkipTestException&)
|
||||||
|
{
|
||||||
|
metrics.terminationReason = performance_metrics::TERM_SKIP_TEST;
|
||||||
|
return;
|
||||||
|
}
|
||||||
catch(PerfEarlyExitException&)
|
catch(PerfEarlyExitException&)
|
||||||
{
|
{
|
||||||
metrics.terminationReason = performance_metrics::TERM_INTERRUPT;
|
metrics.terminationReason = performance_metrics::TERM_INTERRUPT;
|
||||||
|
Loading…
Reference in New Issue
Block a user