2012-08-10 21:17:09 +08:00
|
|
|
#include "precomp.hpp"
|
|
|
|
|
2013-01-23 19:31:10 +08:00
|
|
|
#ifdef HAVE_CUDA
|
|
|
|
#include "opencv2/core/gpumat.hpp"
|
|
|
|
#endif
|
|
|
|
|
2012-08-10 21:17:09 +08:00
|
|
|
#ifdef ANDROID
|
|
|
|
# include <sys/time.h>
|
|
|
|
#endif
|
|
|
|
|
|
|
|
using namespace perf;
|
|
|
|
|
|
|
|
int64 TestBase::timeLimitDefault = 0;
|
|
|
|
unsigned int TestBase::iterationsLimitDefault = (unsigned int)(-1);
|
|
|
|
int64 TestBase::_timeadjustment = 0;
|
|
|
|
|
2013-06-11 20:06:51 +08:00
|
|
|
// Item [0] will be considered the default implementation.
|
|
|
|
static std::vector<std::string> available_impls;
|
2012-08-10 21:17:09 +08:00
|
|
|
|
2013-06-11 20:06:51 +08:00
|
|
|
static std::string param_impl;
|
2013-10-07 01:48:35 +08:00
|
|
|
|
|
|
|
static enum PERF_STRATEGY param_strategy = PERF_STRATEGY_BASE;
|
|
|
|
|
2012-08-10 21:17:09 +08:00
|
|
|
static double param_max_outliers;
|
|
|
|
static double param_max_deviation;
|
|
|
|
static unsigned int param_min_samples;
|
|
|
|
static unsigned int param_force_samples;
|
|
|
|
static uint64 param_seed;
|
|
|
|
static double param_time_limit;
|
2012-10-15 20:14:11 +08:00
|
|
|
static int param_threads;
|
2012-08-10 21:17:09 +08:00
|
|
|
static bool param_write_sanity;
|
2012-12-11 21:16:27 +08:00
|
|
|
static bool param_verify_sanity;
|
2012-10-10 05:34:21 +08:00
|
|
|
#ifdef HAVE_CUDA
|
2012-10-10 19:16:28 +08:00
|
|
|
static int param_cuda_device;
|
2012-10-10 05:34:21 +08:00
|
|
|
#endif
|
2012-10-15 20:14:11 +08:00
|
|
|
|
|
|
|
|
2012-08-10 21:17:09 +08:00
|
|
|
#ifdef ANDROID
|
|
|
|
static int param_affinity_mask;
|
|
|
|
static bool log_power_checkpoints;
|
|
|
|
|
|
|
|
#include <sys/syscall.h>
|
|
|
|
#include <pthread.h>
|
|
|
|
static void setCurrentThreadAffinityMask(int mask)
|
|
|
|
{
|
|
|
|
pid_t pid=gettid();
|
|
|
|
int syscallres=syscall(__NR_sched_setaffinity, pid, sizeof(mask), &mask);
|
|
|
|
if (syscallres)
|
|
|
|
{
|
|
|
|
int err=errno;
|
|
|
|
err=err;//to avoid warnings about unused variables
|
|
|
|
LOGE("Error in the syscall setaffinity: mask=%d=0x%x err=%d=0x%x", mask, mask, err, err);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2012-10-15 20:14:11 +08:00
|
|
|
namespace {
|
|
|
|
|
|
|
|
class PerfEnvironment: public ::testing::Environment
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
void TearDown()
|
|
|
|
{
|
|
|
|
cv::setNumThreads(-1);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
} // namespace
|
|
|
|
|
2012-08-10 21:17:09 +08:00
|
|
|
static void randu(cv::Mat& m)
|
|
|
|
{
|
|
|
|
const int bigValue = 0x00000FFF;
|
|
|
|
if (m.depth() < CV_32F)
|
|
|
|
{
|
|
|
|
int minmax[] = {0, 256};
|
|
|
|
cv::Mat mr = cv::Mat(m.rows, (int)(m.cols * m.elemSize()), CV_8U, m.ptr(), m.step[0]);
|
|
|
|
cv::randu(mr, cv::Mat(1, 1, CV_32S, minmax), cv::Mat(1, 1, CV_32S, minmax + 1));
|
|
|
|
}
|
|
|
|
else if (m.depth() == CV_32F)
|
|
|
|
{
|
|
|
|
//float minmax[] = {-FLT_MAX, FLT_MAX};
|
|
|
|
float minmax[] = {-bigValue, bigValue};
|
|
|
|
cv::Mat mr = m.reshape(1);
|
|
|
|
cv::randu(mr, cv::Mat(1, 1, CV_32F, minmax), cv::Mat(1, 1, CV_32F, minmax + 1));
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
//double minmax[] = {-DBL_MAX, DBL_MAX};
|
|
|
|
double minmax[] = {-bigValue, bigValue};
|
|
|
|
cv::Mat mr = m.reshape(1);
|
|
|
|
cv::randu(mr, cv::Mat(1, 1, CV_64F, minmax), cv::Mat(1, 1, CV_64F, minmax + 1));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*****************************************************************************************\
|
|
|
|
* inner exception class for early termination
|
|
|
|
\*****************************************************************************************/
|
|
|
|
|
|
|
|
class PerfEarlyExitException: public cv::Exception {};
|
|
|
|
|
|
|
|
/*****************************************************************************************\
|
|
|
|
* ::perf::Regression
|
|
|
|
\*****************************************************************************************/
|
|
|
|
|
|
|
|
Regression& Regression::instance()
|
|
|
|
{
|
|
|
|
static Regression single;
|
|
|
|
return single;
|
|
|
|
}
|
|
|
|
|
2012-10-08 21:17:42 +08:00
|
|
|
Regression& Regression::add(TestBase* test, const std::string& name, cv::InputArray array, double eps, ERROR_TYPE err)
|
2012-08-10 21:17:09 +08:00
|
|
|
{
|
2013-09-30 20:07:50 +08:00
|
|
|
if(test) test->setVerified();
|
2012-08-10 21:17:09 +08:00
|
|
|
return instance()(name, array, eps, err);
|
|
|
|
}
|
|
|
|
|
2012-10-09 22:37:26 +08:00
|
|
|
Regression& Regression::addKeypoints(TestBase* test, const std::string& name, const std::vector<cv::KeyPoint>& array, double eps, ERROR_TYPE err)
|
|
|
|
{
|
|
|
|
int len = (int)array.size();
|
2013-02-08 21:44:57 +08:00
|
|
|
cv::Mat pt (len, 1, CV_32FC2, len ? (void*)&array[0].pt : 0, sizeof(cv::KeyPoint));
|
|
|
|
cv::Mat size (len, 1, CV_32FC1, len ? (void*)&array[0].size : 0, sizeof(cv::KeyPoint));
|
|
|
|
cv::Mat angle (len, 1, CV_32FC1, len ? (void*)&array[0].angle : 0, sizeof(cv::KeyPoint));
|
|
|
|
cv::Mat response(len, 1, CV_32FC1, len ? (void*)&array[0].response : 0, sizeof(cv::KeyPoint));
|
|
|
|
cv::Mat octave (len, 1, CV_32SC1, len ? (void*)&array[0].octave : 0, sizeof(cv::KeyPoint));
|
|
|
|
cv::Mat class_id(len, 1, CV_32SC1, len ? (void*)&array[0].class_id : 0, sizeof(cv::KeyPoint));
|
2012-10-09 22:37:26 +08:00
|
|
|
|
|
|
|
return Regression::add(test, name + "-pt", pt, eps, ERROR_ABSOLUTE)
|
|
|
|
(name + "-size", size, eps, ERROR_ABSOLUTE)
|
|
|
|
(name + "-angle", angle, eps, ERROR_ABSOLUTE)
|
|
|
|
(name + "-response", response, eps, err)
|
|
|
|
(name + "-octave", octave, eps, ERROR_ABSOLUTE)
|
|
|
|
(name + "-class_id", class_id, eps, ERROR_ABSOLUTE);
|
|
|
|
}
|
|
|
|
|
2012-10-09 23:13:57 +08:00
|
|
|
Regression& Regression::addMatches(TestBase* test, const std::string& name, const std::vector<cv::DMatch>& array, double eps, ERROR_TYPE err)
|
|
|
|
{
|
2013-07-18 17:41:27 +08:00
|
|
|
int len = (int)array.size();
|
2013-02-08 21:44:57 +08:00
|
|
|
cv::Mat queryIdx(len, 1, CV_32SC1, len ? (void*)&array[0].queryIdx : 0, sizeof(cv::DMatch));
|
|
|
|
cv::Mat trainIdx(len, 1, CV_32SC1, len ? (void*)&array[0].trainIdx : 0, sizeof(cv::DMatch));
|
|
|
|
cv::Mat imgIdx (len, 1, CV_32SC1, len ? (void*)&array[0].imgIdx : 0, sizeof(cv::DMatch));
|
|
|
|
cv::Mat distance(len, 1, CV_32FC1, len ? (void*)&array[0].distance : 0, sizeof(cv::DMatch));
|
2012-10-09 23:13:57 +08:00
|
|
|
|
|
|
|
return Regression::add(test, name + "-queryIdx", queryIdx, DBL_EPSILON, ERROR_ABSOLUTE)
|
|
|
|
(name + "-trainIdx", trainIdx, DBL_EPSILON, ERROR_ABSOLUTE)
|
|
|
|
(name + "-imgIdx", imgIdx, DBL_EPSILON, ERROR_ABSOLUTE)
|
|
|
|
(name + "-distance", distance, eps, err);
|
|
|
|
}
|
|
|
|
|
2012-08-10 21:17:09 +08:00
|
|
|
void Regression::Init(const std::string& testSuitName, const std::string& ext)
|
|
|
|
{
|
|
|
|
instance().init(testSuitName, ext);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Regression::init(const std::string& testSuitName, const std::string& ext)
|
|
|
|
{
|
|
|
|
if (!storageInPath.empty())
|
|
|
|
{
|
2013-10-07 01:48:35 +08:00
|
|
|
LOGE("Subsequent initialization of Regression utility is not allowed.");
|
2012-08-10 21:17:09 +08:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const char *data_path_dir = getenv("OPENCV_TEST_DATA_PATH");
|
|
|
|
const char *path_separator = "/";
|
|
|
|
|
|
|
|
if (data_path_dir)
|
|
|
|
{
|
|
|
|
int len = (int)strlen(data_path_dir)-1;
|
|
|
|
if (len < 0) len = 0;
|
|
|
|
std::string path_base = (data_path_dir[0] == 0 ? std::string(".") : std::string(data_path_dir))
|
|
|
|
+ (data_path_dir[len] == '/' || data_path_dir[len] == '\\' ? "" : path_separator)
|
|
|
|
+ "perf"
|
|
|
|
+ path_separator;
|
|
|
|
|
|
|
|
storageInPath = path_base + testSuitName + ext;
|
|
|
|
storageOutPath = path_base + testSuitName;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
storageInPath = testSuitName + ext;
|
|
|
|
storageOutPath = testSuitName;
|
|
|
|
}
|
|
|
|
|
2012-10-12 18:32:47 +08:00
|
|
|
suiteName = testSuitName;
|
|
|
|
|
2012-08-10 21:17:09 +08:00
|
|
|
try
|
|
|
|
{
|
|
|
|
if (storageIn.open(storageInPath, cv::FileStorage::READ))
|
|
|
|
{
|
|
|
|
rootIn = storageIn.root();
|
|
|
|
if (storageInPath.length() > 3 && storageInPath.substr(storageInPath.length()-3) == ".gz")
|
|
|
|
storageOutPath += "_new";
|
|
|
|
storageOutPath += ext;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
catch(cv::Exception&)
|
|
|
|
{
|
|
|
|
LOGE("Failed to open sanity data for reading: %s", storageInPath.c_str());
|
|
|
|
}
|
|
|
|
|
|
|
|
if(!storageIn.isOpened())
|
|
|
|
storageOutPath = storageInPath;
|
|
|
|
}
|
|
|
|
|
|
|
|
Regression::Regression() : regRNG(cv::getTickCount())//this rng should be really random
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
Regression::~Regression()
|
|
|
|
{
|
|
|
|
if (storageIn.isOpened())
|
|
|
|
storageIn.release();
|
|
|
|
if (storageOut.isOpened())
|
|
|
|
{
|
|
|
|
if (!currentTestNodeName.empty())
|
|
|
|
storageOut << "}";
|
|
|
|
storageOut.release();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
cv::FileStorage& Regression::write()
|
|
|
|
{
|
|
|
|
if (!storageOut.isOpened() && !storageOutPath.empty())
|
|
|
|
{
|
|
|
|
int mode = (storageIn.isOpened() && storageInPath == storageOutPath)
|
|
|
|
? cv::FileStorage::APPEND : cv::FileStorage::WRITE;
|
|
|
|
storageOut.open(storageOutPath, mode);
|
|
|
|
if (!storageOut.isOpened())
|
|
|
|
{
|
|
|
|
LOGE("Could not open \"%s\" file for writing", storageOutPath.c_str());
|
|
|
|
storageOutPath.clear();
|
|
|
|
}
|
|
|
|
else if (mode == cv::FileStorage::WRITE && !rootIn.empty())
|
|
|
|
{
|
|
|
|
//TODO: write content of rootIn node into the storageOut
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return storageOut;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::string Regression::getCurrentTestNodeName()
|
|
|
|
{
|
|
|
|
const ::testing::TestInfo* const test_info =
|
|
|
|
::testing::UnitTest::GetInstance()->current_test_info();
|
|
|
|
|
|
|
|
if (test_info == 0)
|
|
|
|
return "undefined";
|
|
|
|
|
|
|
|
std::string nodename = std::string(test_info->test_case_name()) + "--" + test_info->name();
|
|
|
|
size_t idx = nodename.find_first_of('/');
|
|
|
|
if (idx != std::string::npos)
|
|
|
|
nodename.erase(idx);
|
|
|
|
|
|
|
|
const char* type_param = test_info->type_param();
|
|
|
|
if (type_param != 0)
|
|
|
|
(nodename += "--") += type_param;
|
|
|
|
|
|
|
|
const char* value_param = test_info->value_param();
|
|
|
|
if (value_param != 0)
|
|
|
|
(nodename += "--") += value_param;
|
|
|
|
|
|
|
|
for(size_t i = 0; i < nodename.length(); ++i)
|
|
|
|
if (!isalnum(nodename[i]) && '_' != nodename[i])
|
|
|
|
nodename[i] = '-';
|
|
|
|
|
|
|
|
return nodename;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool Regression::isVector(cv::InputArray a)
|
|
|
|
{
|
|
|
|
return a.kind() == cv::_InputArray::STD_VECTOR_MAT || a.kind() == cv::_InputArray::STD_VECTOR_VECTOR;
|
|
|
|
}
|
|
|
|
|
|
|
|
double Regression::getElem(cv::Mat& m, int y, int x, int cn)
|
|
|
|
{
|
|
|
|
switch (m.depth())
|
|
|
|
{
|
|
|
|
case CV_8U: return *(m.ptr<unsigned char>(y, x) + cn);
|
|
|
|
case CV_8S: return *(m.ptr<signed char>(y, x) + cn);
|
|
|
|
case CV_16U: return *(m.ptr<unsigned short>(y, x) + cn);
|
|
|
|
case CV_16S: return *(m.ptr<signed short>(y, x) + cn);
|
|
|
|
case CV_32S: return *(m.ptr<signed int>(y, x) + cn);
|
|
|
|
case CV_32F: return *(m.ptr<float>(y, x) + cn);
|
|
|
|
case CV_64F: return *(m.ptr<double>(y, x) + cn);
|
|
|
|
default: return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Regression::write(cv::Mat m)
|
|
|
|
{
|
2012-12-16 00:15:34 +08:00
|
|
|
if (!m.empty() && m.dims < 2) return;
|
|
|
|
|
2012-08-10 21:17:09 +08:00
|
|
|
double min, max;
|
2012-12-16 00:15:34 +08:00
|
|
|
cv::minMaxIdx(m, &min, &max);
|
2012-08-10 21:17:09 +08:00
|
|
|
write() << "min" << min << "max" << max;
|
|
|
|
|
2012-12-16 00:15:34 +08:00
|
|
|
write() << "last" << "{" << "x" << m.size.p[1] - 1 << "y" << m.size.p[0] - 1
|
|
|
|
<< "val" << getElem(m, m.size.p[0] - 1, m.size.p[1] - 1, m.channels() - 1) << "}";
|
2012-08-10 21:17:09 +08:00
|
|
|
|
|
|
|
int x, y, cn;
|
2012-12-16 00:15:34 +08:00
|
|
|
x = regRNG.uniform(0, m.size.p[1]);
|
|
|
|
y = regRNG.uniform(0, m.size.p[0]);
|
2012-08-10 21:17:09 +08:00
|
|
|
cn = regRNG.uniform(0, m.channels());
|
|
|
|
write() << "rng1" << "{" << "x" << x << "y" << y;
|
|
|
|
if(cn > 0) write() << "cn" << cn;
|
|
|
|
write() << "val" << getElem(m, y, x, cn) << "}";
|
|
|
|
|
2012-12-16 00:15:34 +08:00
|
|
|
x = regRNG.uniform(0, m.size.p[1]);
|
|
|
|
y = regRNG.uniform(0, m.size.p[0]);
|
2012-08-10 21:17:09 +08:00
|
|
|
cn = regRNG.uniform(0, m.channels());
|
|
|
|
write() << "rng2" << "{" << "x" << x << "y" << y;
|
|
|
|
if (cn > 0) write() << "cn" << cn;
|
|
|
|
write() << "val" << getElem(m, y, x, cn) << "}";
|
|
|
|
}
|
|
|
|
|
2013-01-31 19:21:14 +08:00
|
|
|
void Regression::verify(cv::FileNode node, cv::Mat actual, double eps, std::string argname, ERROR_TYPE err)
|
2012-08-10 21:17:09 +08:00
|
|
|
{
|
2012-12-16 00:15:34 +08:00
|
|
|
if (!actual.empty() && actual.dims < 2) return;
|
|
|
|
|
2013-01-31 19:21:14 +08:00
|
|
|
double expect_min = (double)node["min"];
|
|
|
|
double expect_max = (double)node["max"];
|
|
|
|
|
|
|
|
if (err == ERROR_RELATIVE)
|
|
|
|
eps *= std::max(std::abs(expect_min), std::abs(expect_max));
|
|
|
|
|
2012-08-10 21:17:09 +08:00
|
|
|
double actual_min, actual_max;
|
2012-12-16 00:15:34 +08:00
|
|
|
cv::minMaxIdx(actual, &actual_min, &actual_max);
|
2012-08-10 21:17:09 +08:00
|
|
|
|
2012-10-11 21:41:02 +08:00
|
|
|
ASSERT_NEAR(expect_min, actual_min, eps)
|
|
|
|
<< argname << " has unexpected minimal value" << std::endl;
|
|
|
|
ASSERT_NEAR(expect_max, actual_max, eps)
|
|
|
|
<< argname << " has unexpected maximal value" << std::endl;
|
2012-08-10 21:17:09 +08:00
|
|
|
|
|
|
|
cv::FileNode last = node["last"];
|
2012-12-16 00:15:34 +08:00
|
|
|
double actual_last = getElem(actual, actual.size.p[0] - 1, actual.size.p[1] - 1, actual.channels() - 1);
|
2012-10-11 21:41:02 +08:00
|
|
|
int expect_cols = (int)last["x"] + 1;
|
|
|
|
int expect_rows = (int)last["y"] + 1;
|
2012-12-16 00:15:34 +08:00
|
|
|
ASSERT_EQ(expect_cols, actual.size.p[1])
|
2012-10-11 21:41:02 +08:00
|
|
|
<< argname << " has unexpected number of columns" << std::endl;
|
2012-12-16 00:15:34 +08:00
|
|
|
ASSERT_EQ(expect_rows, actual.size.p[0])
|
2012-10-11 21:41:02 +08:00
|
|
|
<< argname << " has unexpected number of rows" << std::endl;
|
|
|
|
|
|
|
|
double expect_last = (double)last["val"];
|
|
|
|
ASSERT_NEAR(expect_last, actual_last, eps)
|
|
|
|
<< argname << " has unexpected value of the last element" << std::endl;
|
2012-08-10 21:17:09 +08:00
|
|
|
|
|
|
|
cv::FileNode rng1 = node["rng1"];
|
|
|
|
int x1 = rng1["x"];
|
|
|
|
int y1 = rng1["y"];
|
|
|
|
int cn1 = rng1["cn"];
|
|
|
|
|
2012-10-11 21:41:02 +08:00
|
|
|
double expect_rng1 = (double)rng1["val"];
|
2012-12-16 00:15:34 +08:00
|
|
|
// it is safe to use x1 and y1 without checks here because we have already
|
|
|
|
// verified that mat size is the same as recorded
|
2012-10-11 21:41:02 +08:00
|
|
|
double actual_rng1 = getElem(actual, y1, x1, cn1);
|
|
|
|
|
|
|
|
ASSERT_NEAR(expect_rng1, actual_rng1, eps)
|
|
|
|
<< argname << " has unexpected value of the ["<< x1 << ":" << y1 << ":" << cn1 <<"] element" << std::endl;
|
2012-08-10 21:17:09 +08:00
|
|
|
|
|
|
|
cv::FileNode rng2 = node["rng2"];
|
|
|
|
int x2 = rng2["x"];
|
|
|
|
int y2 = rng2["y"];
|
|
|
|
int cn2 = rng2["cn"];
|
|
|
|
|
2012-10-11 21:41:02 +08:00
|
|
|
double expect_rng2 = (double)rng2["val"];
|
|
|
|
double actual_rng2 = getElem(actual, y2, x2, cn2);
|
|
|
|
|
|
|
|
ASSERT_NEAR(expect_rng2, actual_rng2, eps)
|
|
|
|
<< argname << " has unexpected value of the ["<< x2 << ":" << y2 << ":" << cn2 <<"] element" << std::endl;
|
2012-08-10 21:17:09 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void Regression::write(cv::InputArray array)
|
|
|
|
{
|
|
|
|
write() << "kind" << array.kind();
|
|
|
|
write() << "type" << array.type();
|
|
|
|
if (isVector(array))
|
|
|
|
{
|
|
|
|
int total = (int)array.total();
|
|
|
|
int idx = regRNG.uniform(0, total);
|
|
|
|
write() << "len" << total;
|
|
|
|
write() << "idx" << idx;
|
|
|
|
|
|
|
|
cv::Mat m = array.getMat(idx);
|
|
|
|
|
|
|
|
if (m.total() * m.channels() < 26) //5x5 or smaller
|
|
|
|
write() << "val" << m;
|
|
|
|
else
|
|
|
|
write(m);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
if (array.total() * array.channels() < 26) //5x5 or smaller
|
|
|
|
write() << "val" << array.getMat();
|
|
|
|
else
|
|
|
|
write(array.getMat());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static int countViolations(const cv::Mat& expected, const cv::Mat& actual, const cv::Mat& diff, double eps, double* max_violation = 0, double* max_allowed = 0)
|
|
|
|
{
|
|
|
|
cv::Mat diff64f;
|
|
|
|
diff.reshape(1).convertTo(diff64f, CV_64F);
|
|
|
|
|
|
|
|
cv::Mat expected_abs = cv::abs(expected.reshape(1));
|
|
|
|
cv::Mat actual_abs = cv::abs(actual.reshape(1));
|
|
|
|
cv::Mat maximum, mask;
|
|
|
|
cv::max(expected_abs, actual_abs, maximum);
|
|
|
|
cv::multiply(maximum, cv::Vec<double, 1>(eps), maximum, CV_64F);
|
|
|
|
cv::compare(diff64f, maximum, mask, cv::CMP_GT);
|
|
|
|
|
|
|
|
int v = cv::countNonZero(mask);
|
|
|
|
|
|
|
|
if (v > 0 && max_violation != 0 && max_allowed != 0)
|
|
|
|
{
|
|
|
|
int loc[10];
|
|
|
|
cv::minMaxIdx(maximum, 0, max_allowed, 0, loc, mask);
|
|
|
|
*max_violation = diff64f.at<double>(loc[1], loc[0]);
|
|
|
|
}
|
|
|
|
|
|
|
|
return v;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Regression::verify(cv::FileNode node, cv::InputArray array, double eps, ERROR_TYPE err)
|
|
|
|
{
|
2012-10-11 21:41:02 +08:00
|
|
|
int expected_kind = (int)node["kind"];
|
|
|
|
int expected_type = (int)node["type"];
|
|
|
|
ASSERT_EQ(expected_kind, array.kind()) << " Argument \"" << node.name() << "\" has unexpected kind";
|
|
|
|
ASSERT_EQ(expected_type, array.type()) << " Argument \"" << node.name() << "\" has unexpected type";
|
2012-08-10 21:17:09 +08:00
|
|
|
|
|
|
|
cv::FileNode valnode = node["val"];
|
|
|
|
if (isVector(array))
|
|
|
|
{
|
2012-10-11 21:41:02 +08:00
|
|
|
int expected_length = (int)node["len"];
|
|
|
|
ASSERT_EQ(expected_length, (int)array.total()) << " Vector \"" << node.name() << "\" has unexpected length";
|
2012-08-10 21:17:09 +08:00
|
|
|
int idx = node["idx"];
|
|
|
|
|
|
|
|
cv::Mat actual = array.getMat(idx);
|
|
|
|
|
|
|
|
if (valnode.isNone())
|
|
|
|
{
|
|
|
|
ASSERT_LE((size_t)26, actual.total() * (size_t)actual.channels())
|
|
|
|
<< " \"" << node.name() << "[" << idx << "]\" has unexpected number of elements";
|
|
|
|
verify(node, actual, eps, cv::format("%s[%d]", node.name().c_str(), idx), err);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
cv::Mat expected;
|
|
|
|
valnode >> expected;
|
|
|
|
|
2012-10-09 19:27:26 +08:00
|
|
|
if(expected.empty())
|
|
|
|
{
|
|
|
|
ASSERT_TRUE(actual.empty())
|
|
|
|
<< " expected empty " << node.name() << "[" << idx<< "]";
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
ASSERT_EQ(expected.size(), actual.size())
|
|
|
|
<< " " << node.name() << "[" << idx<< "] has unexpected size";
|
2012-08-10 21:17:09 +08:00
|
|
|
|
2012-10-09 19:27:26 +08:00
|
|
|
cv::Mat diff;
|
|
|
|
cv::absdiff(expected, actual, diff);
|
2012-08-10 21:17:09 +08:00
|
|
|
|
2012-10-09 19:27:26 +08:00
|
|
|
if (err == ERROR_ABSOLUTE)
|
2012-08-10 21:17:09 +08:00
|
|
|
{
|
2012-10-09 19:27:26 +08:00
|
|
|
if (!cv::checkRange(diff, true, 0, 0, eps))
|
|
|
|
{
|
|
|
|
if(expected.total() * expected.channels() < 12)
|
|
|
|
std::cout << " Expected: " << std::endl << expected << std::endl << " Actual:" << std::endl << actual << std::endl;
|
|
|
|
|
|
|
|
double max;
|
2012-12-16 00:15:34 +08:00
|
|
|
cv::minMaxIdx(diff.reshape(1), 0, &max);
|
2012-10-09 19:27:26 +08:00
|
|
|
|
|
|
|
FAIL() << " Absolute difference (=" << max << ") between argument \""
|
2012-12-12 05:00:47 +08:00
|
|
|
<< node.name() << "[" << idx << "]\" and expected value is greater than " << eps;
|
2012-10-09 19:27:26 +08:00
|
|
|
}
|
2012-08-10 21:17:09 +08:00
|
|
|
}
|
2012-10-09 19:27:26 +08:00
|
|
|
else if (err == ERROR_RELATIVE)
|
2012-08-10 21:17:09 +08:00
|
|
|
{
|
2012-10-09 19:27:26 +08:00
|
|
|
double maxv, maxa;
|
|
|
|
int violations = countViolations(expected, actual, diff, eps, &maxv, &maxa);
|
|
|
|
if (violations > 0)
|
|
|
|
{
|
2013-09-29 10:35:12 +08:00
|
|
|
if(expected.total() * expected.channels() < 12)
|
|
|
|
std::cout << " Expected: " << std::endl << expected << std::endl << " Actual:" << std::endl << actual << std::endl;
|
|
|
|
|
2012-10-09 19:27:26 +08:00
|
|
|
FAIL() << " Relative difference (" << maxv << " of " << maxa << " allowed) between argument \""
|
2012-12-12 05:00:47 +08:00
|
|
|
<< node.name() << "[" << idx << "]\" and expected value is greater than " << eps << " in " << violations << " points";
|
2012-10-09 19:27:26 +08:00
|
|
|
}
|
2012-08-10 21:17:09 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
if (valnode.isNone())
|
|
|
|
{
|
|
|
|
ASSERT_LE((size_t)26, array.total() * (size_t)array.channels())
|
|
|
|
<< " Argument \"" << node.name() << "\" has unexpected number of elements";
|
2012-10-11 21:41:02 +08:00
|
|
|
verify(node, array.getMat(), eps, "Argument \"" + node.name() + "\"", err);
|
2012-08-10 21:17:09 +08:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
cv::Mat expected;
|
|
|
|
valnode >> expected;
|
|
|
|
cv::Mat actual = array.getMat();
|
|
|
|
|
2012-10-09 19:27:26 +08:00
|
|
|
if(expected.empty())
|
|
|
|
{
|
|
|
|
ASSERT_TRUE(actual.empty())
|
|
|
|
<< " expected empty " << node.name();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
ASSERT_EQ(expected.size(), actual.size())
|
|
|
|
<< " Argument \"" << node.name() << "\" has unexpected size";
|
2012-08-10 21:17:09 +08:00
|
|
|
|
2012-10-09 19:27:26 +08:00
|
|
|
cv::Mat diff;
|
|
|
|
cv::absdiff(expected, actual, diff);
|
2012-08-10 21:17:09 +08:00
|
|
|
|
2012-10-09 19:27:26 +08:00
|
|
|
if (err == ERROR_ABSOLUTE)
|
2012-08-10 21:17:09 +08:00
|
|
|
{
|
2012-10-09 19:27:26 +08:00
|
|
|
if (!cv::checkRange(diff, true, 0, 0, eps))
|
|
|
|
{
|
|
|
|
if(expected.total() * expected.channels() < 12)
|
|
|
|
std::cout << " Expected: " << std::endl << expected << std::endl << " Actual:" << std::endl << actual << std::endl;
|
|
|
|
|
|
|
|
double max;
|
2012-12-16 00:15:34 +08:00
|
|
|
cv::minMaxIdx(diff.reshape(1), 0, &max);
|
2012-10-09 19:27:26 +08:00
|
|
|
|
|
|
|
FAIL() << " Difference (=" << max << ") between argument1 \"" << node.name()
|
2012-12-12 05:00:47 +08:00
|
|
|
<< "\" and expected value is greater than " << eps;
|
2012-10-09 19:27:26 +08:00
|
|
|
}
|
2012-08-10 21:17:09 +08:00
|
|
|
}
|
2012-10-09 19:27:26 +08:00
|
|
|
else if (err == ERROR_RELATIVE)
|
2012-08-10 21:17:09 +08:00
|
|
|
{
|
2012-10-09 19:27:26 +08:00
|
|
|
double maxv, maxa;
|
|
|
|
int violations = countViolations(expected, actual, diff, eps, &maxv, &maxa);
|
|
|
|
if (violations > 0)
|
|
|
|
{
|
2013-09-29 10:35:12 +08:00
|
|
|
if(expected.total() * expected.channels() < 12)
|
|
|
|
std::cout << " Expected: " << std::endl << expected << std::endl << " Actual:" << std::endl << actual << std::endl;
|
|
|
|
|
2012-10-09 19:27:26 +08:00
|
|
|
FAIL() << " Relative difference (" << maxv << " of " << maxa << " allowed) between argument \"" << node.name()
|
2012-12-12 05:00:47 +08:00
|
|
|
<< "\" and expected value is greater than " << eps << " in " << violations << " points";
|
2012-10-09 19:27:26 +08:00
|
|
|
}
|
2012-08-10 21:17:09 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Regression& Regression::operator() (const std::string& name, cv::InputArray array, double eps, ERROR_TYPE err)
|
|
|
|
{
|
2012-10-11 21:41:02 +08:00
|
|
|
// exit if current test is already failed
|
|
|
|
if(::testing::UnitTest::GetInstance()->current_test_info()->result()->Failed()) return *this;
|
|
|
|
|
2012-10-09 22:37:26 +08:00
|
|
|
if(!array.empty() && array.depth() == CV_USRTYPE1)
|
|
|
|
{
|
|
|
|
ADD_FAILURE() << " Can not check regression for CV_USRTYPE1 data type for " << name;
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
2012-08-10 21:17:09 +08:00
|
|
|
std::string nodename = getCurrentTestNodeName();
|
|
|
|
|
|
|
|
cv::FileNode n = rootIn[nodename];
|
|
|
|
if(n.isNone())
|
|
|
|
{
|
|
|
|
if(param_write_sanity)
|
|
|
|
{
|
|
|
|
if (nodename != currentTestNodeName)
|
|
|
|
{
|
|
|
|
if (!currentTestNodeName.empty())
|
|
|
|
write() << "}";
|
|
|
|
currentTestNodeName = nodename;
|
|
|
|
|
|
|
|
write() << nodename << "{";
|
|
|
|
}
|
2012-12-12 05:00:47 +08:00
|
|
|
// TODO: verify that name is alphanumeric, current error message is useless
|
2012-08-10 21:17:09 +08:00
|
|
|
write() << name << "{";
|
|
|
|
write(array);
|
|
|
|
write() << "}";
|
|
|
|
}
|
2012-12-11 21:16:27 +08:00
|
|
|
else if(param_verify_sanity)
|
|
|
|
{
|
|
|
|
ADD_FAILURE() << " No regression data for " << name << " argument";
|
|
|
|
}
|
2012-08-10 21:17:09 +08:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
cv::FileNode this_arg = n[name];
|
|
|
|
if (!this_arg.isMap())
|
|
|
|
ADD_FAILURE() << " No regression data for " << name << " argument";
|
|
|
|
else
|
|
|
|
verify(this_arg, array, eps, err);
|
|
|
|
}
|
2012-10-08 21:17:42 +08:00
|
|
|
|
2012-08-10 21:17:09 +08:00
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/*****************************************************************************************\
|
|
|
|
* ::perf::performance_metrics
|
|
|
|
\*****************************************************************************************/
|
|
|
|
performance_metrics::performance_metrics()
|
2013-10-07 01:48:35 +08:00
|
|
|
{
|
|
|
|
clear();
|
|
|
|
}
|
|
|
|
|
|
|
|
void performance_metrics::clear()
|
2012-08-10 21:17:09 +08:00
|
|
|
{
|
|
|
|
bytesIn = 0;
|
|
|
|
bytesOut = 0;
|
|
|
|
samples = 0;
|
|
|
|
outliers = 0;
|
|
|
|
gmean = 0;
|
|
|
|
gstddev = 0;
|
|
|
|
mean = 0;
|
|
|
|
stddev = 0;
|
|
|
|
median = 0;
|
|
|
|
min = 0;
|
|
|
|
frequency = 0;
|
|
|
|
terminationReason = TERM_UNKNOWN;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/*****************************************************************************************\
|
|
|
|
* ::perf::TestBase
|
|
|
|
\*****************************************************************************************/
|
|
|
|
|
|
|
|
|
|
|
|
void TestBase::Init(int argc, const char* const argv[])
|
|
|
|
{
|
2013-06-11 20:06:51 +08:00
|
|
|
std::vector<std::string> plain_only;
|
|
|
|
plain_only.push_back("plain");
|
|
|
|
TestBase::Init(plain_only, argc, argv);
|
|
|
|
}
|
|
|
|
|
|
|
|
void TestBase::Init(const std::vector<std::string> & availableImpls,
|
|
|
|
int argc, const char* const argv[])
|
|
|
|
{
|
|
|
|
available_impls = availableImpls;
|
|
|
|
|
|
|
|
const std::string command_line_keys =
|
|
|
|
"{ |perf_max_outliers |8 |percent of allowed outliers}"
|
|
|
|
"{ |perf_min_samples |10 |minimal required numer of samples}"
|
|
|
|
"{ |perf_force_samples |100 |force set maximum number of samples for all tests}"
|
|
|
|
"{ |perf_seed |809564 |seed for random numbers generator}"
|
|
|
|
"{ |perf_threads |-1 |the number of worker threads, if parallel execution is enabled}"
|
|
|
|
"{ |perf_write_sanity |false |create new records for sanity checks}"
|
|
|
|
"{ |perf_verify_sanity |false |fail tests having no regression data for sanity checks}"
|
|
|
|
"{ |perf_impl |" + available_impls[0] +
|
|
|
|
"|the implementation variant of functions under test}"
|
2013-06-19 22:47:15 +08:00
|
|
|
"{ |perf_list_impls |false |list available implementation variants and exit}"
|
2013-06-11 20:06:51 +08:00
|
|
|
"{ |perf_run_cpu |false |deprecated, equivalent to --perf_impl=plain}"
|
2013-10-07 01:48:35 +08:00
|
|
|
"{ |perf_strategy |default |specifies performance measuring strategy: default, base or simple (weak restrictions)}"
|
2013-06-11 20:06:51 +08:00
|
|
|
#ifdef ANDROID
|
|
|
|
"{ |perf_time_limit |6.0 |default time limit for a single test (in seconds)}"
|
|
|
|
"{ |perf_affinity_mask |0 |set affinity mask for the main thread}"
|
|
|
|
"{ |perf_log_power_checkpoints | |additional xml logging for power measurement}"
|
|
|
|
#else
|
|
|
|
"{ |perf_time_limit |3.0 |default time limit for a single test (in seconds)}"
|
|
|
|
#endif
|
|
|
|
"{ |perf_max_deviation |1.0 |}"
|
|
|
|
"{h |help |false |print help info}"
|
|
|
|
#ifdef HAVE_CUDA
|
|
|
|
"{ |perf_cuda_device |0 |run GPU test suite onto specific CUDA capable device}"
|
|
|
|
"{ |perf_cuda_info_only |false |print an information about system and an available CUDA devices and then exit.}"
|
|
|
|
#endif
|
|
|
|
;
|
|
|
|
|
2012-10-15 23:46:57 +08:00
|
|
|
cv::CommandLineParser args(argc, argv, command_line_keys.c_str());
|
|
|
|
if (args.get<bool>("help"))
|
2012-09-07 17:24:48 +08:00
|
|
|
{
|
2012-10-15 23:46:57 +08:00
|
|
|
args.printParams();
|
|
|
|
printf("\n\n");
|
2012-09-07 17:24:48 +08:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2012-10-15 20:14:11 +08:00
|
|
|
::testing::AddGlobalTestEnvironment(new PerfEnvironment);
|
|
|
|
|
2013-06-11 20:06:51 +08:00
|
|
|
param_impl = args.get<bool>("perf_run_cpu") ? "plain" : args.get<std::string>("perf_impl");
|
2013-10-07 01:48:35 +08:00
|
|
|
std::string perf_strategy = args.get<std::string>("perf_strategy");
|
|
|
|
if (perf_strategy == "default")
|
|
|
|
{
|
|
|
|
// nothing
|
|
|
|
}
|
|
|
|
else if (perf_strategy == "base")
|
|
|
|
{
|
|
|
|
param_strategy = PERF_STRATEGY_BASE;
|
|
|
|
}
|
|
|
|
else if (perf_strategy == "simple")
|
|
|
|
{
|
|
|
|
param_strategy = PERF_STRATEGY_SIMPLE;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
printf("No such strategy: %s\n", perf_strategy.c_str());
|
|
|
|
exit(1);
|
|
|
|
}
|
2012-09-07 17:24:48 +08:00
|
|
|
param_max_outliers = std::min(100., std::max(0., args.get<double>("perf_max_outliers")));
|
|
|
|
param_min_samples = std::max(1u, args.get<unsigned int>("perf_min_samples"));
|
2012-08-10 21:17:09 +08:00
|
|
|
param_max_deviation = std::max(0., args.get<double>("perf_max_deviation"));
|
2012-10-15 23:46:57 +08:00
|
|
|
param_seed = args.get<uint64>("perf_seed");
|
2012-09-07 17:24:48 +08:00
|
|
|
param_time_limit = std::max(0., args.get<double>("perf_time_limit"));
|
2012-08-10 21:17:09 +08:00
|
|
|
param_force_samples = args.get<unsigned int>("perf_force_samples");
|
2012-10-15 23:46:57 +08:00
|
|
|
param_write_sanity = args.get<bool>("perf_write_sanity");
|
2012-12-11 21:16:27 +08:00
|
|
|
param_verify_sanity = args.get<bool>("perf_verify_sanity");
|
2012-10-15 20:14:11 +08:00
|
|
|
param_threads = args.get<int>("perf_threads");
|
2012-08-10 21:17:09 +08:00
|
|
|
#ifdef ANDROID
|
2012-09-07 17:24:48 +08:00
|
|
|
param_affinity_mask = args.get<int>("perf_affinity_mask");
|
2012-10-15 23:46:57 +08:00
|
|
|
log_power_checkpoints = args.get<bool>("perf_log_power_checkpoints");
|
2012-08-10 21:17:09 +08:00
|
|
|
#endif
|
|
|
|
|
2013-06-19 22:47:15 +08:00
|
|
|
bool param_list_impls = args.get<bool>("perf_list_impls");
|
|
|
|
|
|
|
|
if (param_list_impls)
|
|
|
|
{
|
|
|
|
fputs("Available implementation variants:", stdout);
|
|
|
|
for (size_t i = 0; i < available_impls.size(); ++i) {
|
|
|
|
putchar(' ');
|
|
|
|
fputs(available_impls[i].c_str(), stdout);
|
|
|
|
}
|
|
|
|
putchar('\n');
|
|
|
|
exit(0);
|
|
|
|
}
|
|
|
|
|
2013-06-11 20:06:51 +08:00
|
|
|
if (std::find(available_impls.begin(), available_impls.end(), param_impl) == available_impls.end())
|
|
|
|
{
|
|
|
|
printf("No such implementation: %s\n", param_impl.c_str());
|
|
|
|
exit(1);
|
|
|
|
}
|
|
|
|
|
2012-10-10 05:34:21 +08:00
|
|
|
#ifdef HAVE_CUDA
|
2012-10-10 19:16:28 +08:00
|
|
|
|
2012-10-15 23:46:57 +08:00
|
|
|
bool printOnly = args.get<bool>("perf_cuda_info_only");
|
2012-10-10 19:16:28 +08:00
|
|
|
|
|
|
|
if (printOnly)
|
|
|
|
exit(0);
|
2013-06-11 20:06:51 +08:00
|
|
|
#endif
|
|
|
|
|
|
|
|
if (available_impls.size() > 1)
|
|
|
|
printf("[----------]\n[ INFO ] \tImplementation variant: %s.\n[----------]\n", param_impl.c_str()), fflush(stdout);
|
|
|
|
|
|
|
|
#ifdef HAVE_CUDA
|
2012-10-10 19:16:28 +08:00
|
|
|
|
|
|
|
param_cuda_device = std::max(0, std::min(cv::gpu::getCudaEnabledDeviceCount(), args.get<int>("perf_cuda_device")));
|
2012-10-10 17:36:24 +08:00
|
|
|
|
2013-06-11 20:06:51 +08:00
|
|
|
if (param_impl == "cuda")
|
2012-10-10 19:16:28 +08:00
|
|
|
{
|
|
|
|
cv::gpu::DeviceInfo info(param_cuda_device);
|
|
|
|
if (!info.isCompatible())
|
|
|
|
{
|
|
|
|
printf("[----------]\n[ FAILURE ] \tDevice %s is NOT compatible with current GPU module build.\n[----------]\n", info.name().c_str()), fflush(stdout);
|
|
|
|
exit(-1);
|
|
|
|
}
|
|
|
|
|
|
|
|
cv::gpu::setDevice(param_cuda_device);
|
|
|
|
|
|
|
|
printf("[----------]\n[ GPU INFO ] \tRun test suite on %s GPU.\n[----------]\n", info.name().c_str()), fflush(stdout);
|
|
|
|
}
|
2012-10-10 05:34:21 +08:00
|
|
|
#endif
|
|
|
|
|
2012-10-15 23:46:57 +08:00
|
|
|
// if (!args.check())
|
|
|
|
// {
|
|
|
|
// args.printErrors();
|
|
|
|
// return;
|
|
|
|
// }
|
2012-08-10 21:17:09 +08:00
|
|
|
|
|
|
|
timeLimitDefault = param_time_limit == 0.0 ? 1 : (int64)(param_time_limit * cv::getTickFrequency());
|
|
|
|
iterationsLimitDefault = param_force_samples == 0 ? (unsigned)(-1) : param_force_samples;
|
|
|
|
_timeadjustment = _calibrate();
|
|
|
|
}
|
|
|
|
|
2013-06-18 22:40:55 +08:00
|
|
|
void TestBase::RecordRunParameters()
|
|
|
|
{
|
|
|
|
::testing::Test::RecordProperty("cv_implementation", param_impl);
|
|
|
|
::testing::Test::RecordProperty("cv_num_threads", param_threads);
|
2013-08-21 14:12:44 +08:00
|
|
|
|
|
|
|
#ifdef HAVE_CUDA
|
|
|
|
if (param_impl == "cuda")
|
|
|
|
{
|
|
|
|
cv::gpu::DeviceInfo info(param_cuda_device);
|
|
|
|
::testing::Test::RecordProperty("cv_cuda_gpu", info.name());
|
|
|
|
}
|
|
|
|
#endif
|
2013-06-18 22:40:55 +08:00
|
|
|
}
|
2013-06-11 20:06:51 +08:00
|
|
|
|
|
|
|
std::string TestBase::getSelectedImpl()
|
|
|
|
{
|
|
|
|
return param_impl;
|
|
|
|
}
|
|
|
|
|
2013-10-07 01:48:35 +08:00
|
|
|
enum PERF_STRATEGY TestBase::getPerformanceStrategy()
|
|
|
|
{
|
|
|
|
return param_strategy;
|
|
|
|
}
|
|
|
|
|
|
|
|
enum PERF_STRATEGY TestBase::setPerformanceStrategy(enum PERF_STRATEGY strategy)
|
|
|
|
{
|
|
|
|
enum PERF_STRATEGY ret = param_strategy;
|
|
|
|
param_strategy = strategy;
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2013-06-11 20:06:51 +08:00
|
|
|
|
2012-08-10 21:17:09 +08:00
|
|
|
int64 TestBase::_calibrate()
|
|
|
|
{
|
|
|
|
class _helper : public ::perf::TestBase
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
performance_metrics& getMetrics() { return calcMetrics(); }
|
|
|
|
virtual void TestBody() {}
|
|
|
|
virtual void PerfTestBody()
|
|
|
|
{
|
|
|
|
//the whole system warmup
|
|
|
|
SetUp();
|
|
|
|
cv::Mat a(2048, 2048, CV_32S, cv::Scalar(1));
|
|
|
|
cv::Mat b(2048, 2048, CV_32S, cv::Scalar(2));
|
|
|
|
declare.time(30);
|
|
|
|
double s = 0;
|
|
|
|
for(declare.iterations(20); startTimer(), next(); stopTimer())
|
|
|
|
s+=a.dot(b);
|
|
|
|
declare.time(s);
|
|
|
|
|
|
|
|
//self calibration
|
|
|
|
SetUp();
|
|
|
|
for(declare.iterations(1000); startTimer(), next(); stopTimer()){}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
_timeadjustment = 0;
|
|
|
|
_helper h;
|
|
|
|
h.PerfTestBody();
|
|
|
|
double compensation = h.getMetrics().min;
|
2013-10-07 01:48:35 +08:00
|
|
|
if (param_strategy == PERF_STRATEGY_SIMPLE)
|
|
|
|
{
|
|
|
|
CV_Assert(compensation < 0.01 * cv::getTickFrequency());
|
|
|
|
compensation = 0.0f; // simple strategy doesn't require any compensation
|
|
|
|
}
|
2012-08-10 21:17:09 +08:00
|
|
|
LOGD("Time compensation is %.0f", compensation);
|
|
|
|
return (int64)compensation;
|
|
|
|
}
|
|
|
|
|
|
|
|
#ifdef _MSC_VER
|
|
|
|
# pragma warning(push)
|
|
|
|
# pragma warning(disable:4355) // 'this' : used in base member initializer list
|
|
|
|
#endif
|
|
|
|
TestBase::TestBase(): declare(this)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
#ifdef _MSC_VER
|
|
|
|
# pragma warning(pop)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
|
|
void TestBase::declareArray(SizeVector& sizes, cv::InputOutputArray a, int wtype)
|
|
|
|
{
|
|
|
|
if (!a.empty())
|
|
|
|
{
|
|
|
|
sizes.push_back(std::pair<int, cv::Size>(getSizeInBytes(a), getSize(a)));
|
|
|
|
warmup(a, wtype);
|
|
|
|
}
|
|
|
|
else if (a.kind() != cv::_InputArray::NONE)
|
|
|
|
ADD_FAILURE() << " Uninitialized input/output parameters are not allowed for performance tests";
|
|
|
|
}
|
|
|
|
|
|
|
|
void TestBase::warmup(cv::InputOutputArray a, int wtype)
|
|
|
|
{
|
|
|
|
if (a.empty()) return;
|
|
|
|
if (a.kind() != cv::_InputArray::STD_VECTOR_MAT && a.kind() != cv::_InputArray::STD_VECTOR_VECTOR)
|
|
|
|
warmup_impl(a.getMat(), wtype);
|
|
|
|
else
|
|
|
|
{
|
|
|
|
size_t total = a.total();
|
|
|
|
for (size_t i = 0; i < total; ++i)
|
|
|
|
warmup_impl(a.getMat((int)i), wtype);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
int TestBase::getSizeInBytes(cv::InputArray a)
|
|
|
|
{
|
|
|
|
if (a.empty()) return 0;
|
|
|
|
int total = (int)a.total();
|
|
|
|
if (a.kind() != cv::_InputArray::STD_VECTOR_MAT && a.kind() != cv::_InputArray::STD_VECTOR_VECTOR)
|
|
|
|
return total * CV_ELEM_SIZE(a.type());
|
|
|
|
|
|
|
|
int size = 0;
|
|
|
|
for (int i = 0; i < total; ++i)
|
|
|
|
size += (int)a.total(i) * CV_ELEM_SIZE(a.type(i));
|
|
|
|
|
|
|
|
return size;
|
|
|
|
}
|
|
|
|
|
|
|
|
cv::Size TestBase::getSize(cv::InputArray a)
|
|
|
|
{
|
|
|
|
if (a.kind() != cv::_InputArray::STD_VECTOR_MAT && a.kind() != cv::_InputArray::STD_VECTOR_VECTOR)
|
|
|
|
return a.size();
|
|
|
|
return cv::Size();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool TestBase::next()
|
|
|
|
{
|
2013-10-07 01:48:35 +08:00
|
|
|
static int64 lastActivityPrintTime = 0;
|
|
|
|
|
|
|
|
if (currentIter != (unsigned int)-1)
|
|
|
|
{
|
|
|
|
if (currentIter + 1 != times.size())
|
|
|
|
ADD_FAILURE() << " next() is called before stopTimer()";
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
lastActivityPrintTime = 0;
|
|
|
|
metrics.clear();
|
|
|
|
}
|
|
|
|
|
2012-10-09 22:34:55 +08:00
|
|
|
cv::theRNG().state = param_seed; //this rng should generate same numbers for each run
|
2013-10-07 01:48:35 +08:00
|
|
|
++currentIter;
|
|
|
|
|
|
|
|
bool has_next = false;
|
|
|
|
|
|
|
|
do {
|
|
|
|
assert(currentIter == times.size());
|
|
|
|
if (currentIter == 0)
|
|
|
|
{
|
|
|
|
has_next = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (param_strategy == PERF_STRATEGY_BASE)
|
|
|
|
{
|
|
|
|
has_next = currentIter < nIters && totalTime < timeLimit;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
assert(param_strategy == PERF_STRATEGY_SIMPLE);
|
|
|
|
if (totalTime - lastActivityPrintTime >= cv::getTickFrequency() * 10)
|
|
|
|
{
|
|
|
|
std::cout << '.' << std::endl;
|
|
|
|
lastActivityPrintTime = totalTime;
|
|
|
|
}
|
|
|
|
if (currentIter >= nIters)
|
|
|
|
{
|
|
|
|
has_next = false;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (currentIter < param_min_samples)
|
|
|
|
{
|
|
|
|
has_next = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
calcMetrics();
|
|
|
|
|
|
|
|
double criteria = 0.03; // 3%
|
|
|
|
if (fabs(metrics.mean) > 1e-6)
|
|
|
|
has_next = metrics.stddev > criteria * fabs(metrics.mean);
|
|
|
|
else
|
|
|
|
has_next = true;
|
|
|
|
}
|
|
|
|
} while (false);
|
2012-10-09 22:34:55 +08:00
|
|
|
|
2012-08-10 21:17:09 +08:00
|
|
|
#ifdef ANDROID
|
|
|
|
if (log_power_checkpoints)
|
|
|
|
{
|
|
|
|
timeval tim;
|
|
|
|
gettimeofday(&tim, NULL);
|
|
|
|
unsigned long long t1 = tim.tv_sec * 1000LLU + (unsigned long long)(tim.tv_usec / 1000.f);
|
|
|
|
|
|
|
|
if (currentIter == 1) RecordProperty("test_start", cv::format("%llu",t1).c_str());
|
|
|
|
if (!has_next) RecordProperty("test_complete", cv::format("%llu",t1).c_str());
|
|
|
|
}
|
|
|
|
#endif
|
2013-10-07 01:48:35 +08:00
|
|
|
|
|
|
|
if (has_next)
|
|
|
|
startTimer(); // really we should measure activity from this moment, so reset start time
|
2012-08-10 21:17:09 +08:00
|
|
|
return has_next;
|
|
|
|
}
|
|
|
|
|
|
|
|
void TestBase::warmup_impl(cv::Mat m, int wtype)
|
|
|
|
{
|
|
|
|
switch(wtype)
|
|
|
|
{
|
|
|
|
case WARMUP_READ:
|
|
|
|
cv::sum(m.reshape(1));
|
|
|
|
return;
|
|
|
|
case WARMUP_WRITE:
|
|
|
|
m.reshape(1).setTo(cv::Scalar::all(0));
|
|
|
|
return;
|
|
|
|
case WARMUP_RNG:
|
|
|
|
randu(m);
|
|
|
|
return;
|
|
|
|
default:
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned int TestBase::getTotalInputSize() const
|
|
|
|
{
|
|
|
|
unsigned int res = 0;
|
|
|
|
for (SizeVector::const_iterator i = inputData.begin(); i != inputData.end(); ++i)
|
|
|
|
res += i->first;
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned int TestBase::getTotalOutputSize() const
|
|
|
|
{
|
|
|
|
unsigned int res = 0;
|
|
|
|
for (SizeVector::const_iterator i = outputData.begin(); i != outputData.end(); ++i)
|
|
|
|
res += i->first;
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
void TestBase::startTimer()
|
|
|
|
{
|
|
|
|
lastTime = cv::getTickCount();
|
|
|
|
}
|
|
|
|
|
|
|
|
void TestBase::stopTimer()
|
|
|
|
{
|
|
|
|
int64 time = cv::getTickCount();
|
|
|
|
if (lastTime == 0)
|
2013-10-07 01:48:35 +08:00
|
|
|
ADD_FAILURE() << " stopTimer() is called before startTimer()/next()";
|
2012-08-10 21:17:09 +08:00
|
|
|
lastTime = time - lastTime;
|
|
|
|
totalTime += lastTime;
|
|
|
|
lastTime -= _timeadjustment;
|
|
|
|
if (lastTime < 0) lastTime = 0;
|
|
|
|
times.push_back(lastTime);
|
|
|
|
lastTime = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
performance_metrics& TestBase::calcMetrics()
|
|
|
|
{
|
2013-10-07 01:48:35 +08:00
|
|
|
CV_Assert(metrics.samples <= (unsigned int)currentIter);
|
2012-08-10 21:17:09 +08:00
|
|
|
if ((metrics.samples == (unsigned int)currentIter) || times.size() == 0)
|
|
|
|
return metrics;
|
|
|
|
|
|
|
|
metrics.bytesIn = getTotalInputSize();
|
|
|
|
metrics.bytesOut = getTotalOutputSize();
|
|
|
|
metrics.frequency = cv::getTickFrequency();
|
|
|
|
metrics.samples = (unsigned int)times.size();
|
|
|
|
metrics.outliers = 0;
|
|
|
|
|
|
|
|
if (metrics.terminationReason != performance_metrics::TERM_INTERRUPT && metrics.terminationReason != performance_metrics::TERM_EXCEPTION)
|
|
|
|
{
|
|
|
|
if (currentIter == nIters)
|
|
|
|
metrics.terminationReason = performance_metrics::TERM_ITERATIONS;
|
|
|
|
else if (totalTime >= timeLimit)
|
|
|
|
metrics.terminationReason = performance_metrics::TERM_TIME;
|
|
|
|
else
|
|
|
|
metrics.terminationReason = performance_metrics::TERM_UNKNOWN;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::sort(times.begin(), times.end());
|
|
|
|
|
2013-10-07 01:48:35 +08:00
|
|
|
TimeVector::const_iterator start = times.begin();
|
|
|
|
TimeVector::const_iterator end = times.end();
|
2012-08-10 21:17:09 +08:00
|
|
|
|
2013-10-07 01:48:35 +08:00
|
|
|
if (param_strategy == PERF_STRATEGY_BASE)
|
|
|
|
{
|
|
|
|
//estimate mean and stddev for log(time)
|
|
|
|
double gmean = 0;
|
|
|
|
double gstddev = 0;
|
|
|
|
int n = 0;
|
|
|
|
for(TimeVector::const_iterator i = times.begin(); i != times.end(); ++i)
|
|
|
|
{
|
|
|
|
double x = static_cast<double>(*i)/runsPerIteration;
|
|
|
|
if (x < DBL_EPSILON) continue;
|
|
|
|
double lx = log(x);
|
2012-08-10 21:17:09 +08:00
|
|
|
|
2013-10-07 01:48:35 +08:00
|
|
|
++n;
|
|
|
|
double delta = lx - gmean;
|
|
|
|
gmean += delta / n;
|
|
|
|
gstddev += delta * (lx - gmean);
|
|
|
|
}
|
2012-08-10 21:17:09 +08:00
|
|
|
|
2013-10-07 01:48:35 +08:00
|
|
|
gstddev = n > 1 ? sqrt(gstddev / (n - 1)) : 0;
|
2012-08-10 21:17:09 +08:00
|
|
|
|
2013-10-07 01:48:35 +08:00
|
|
|
//filter outliers assuming log-normal distribution
|
|
|
|
//http://stackoverflow.com/questions/1867426/modeling-distribution-of-performance-measurements
|
|
|
|
if (gstddev > DBL_EPSILON)
|
|
|
|
{
|
|
|
|
double minout = exp(gmean - 3 * gstddev) * runsPerIteration;
|
|
|
|
double maxout = exp(gmean + 3 * gstddev) * runsPerIteration;
|
|
|
|
while(*start < minout) ++start, ++metrics.outliers;
|
|
|
|
do --end, ++metrics.outliers; while(*end > maxout);
|
|
|
|
++end, --metrics.outliers;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else if (param_strategy == PERF_STRATEGY_SIMPLE)
|
|
|
|
{
|
|
|
|
metrics.outliers = static_cast<int>(times.size() * param_max_outliers / 100);
|
|
|
|
for (unsigned int i = 0; i < metrics.outliers; i++)
|
|
|
|
--end;
|
|
|
|
}
|
|
|
|
else
|
2012-08-10 21:17:09 +08:00
|
|
|
{
|
2013-10-07 01:48:35 +08:00
|
|
|
assert(false);
|
2012-08-10 21:17:09 +08:00
|
|
|
}
|
|
|
|
|
2013-10-07 01:48:35 +08:00
|
|
|
int offset = static_cast<int>(start - times.begin());
|
|
|
|
|
2012-08-10 21:17:09 +08:00
|
|
|
metrics.min = static_cast<double>(*start)/runsPerIteration;
|
|
|
|
//calc final metrics
|
2013-10-07 01:48:35 +08:00
|
|
|
unsigned int n = 0;
|
|
|
|
double gmean = 0;
|
|
|
|
double gstddev = 0;
|
2012-08-10 21:17:09 +08:00
|
|
|
double mean = 0;
|
|
|
|
double stddev = 0;
|
2013-10-07 01:48:35 +08:00
|
|
|
unsigned int m = 0;
|
2012-08-10 21:17:09 +08:00
|
|
|
for(; start != end; ++start)
|
|
|
|
{
|
|
|
|
double x = static_cast<double>(*start)/runsPerIteration;
|
|
|
|
if (x > DBL_EPSILON)
|
|
|
|
{
|
|
|
|
double lx = log(x);
|
|
|
|
++m;
|
|
|
|
double gdelta = lx - gmean;
|
|
|
|
gmean += gdelta / m;
|
|
|
|
gstddev += gdelta * (lx - gmean);
|
|
|
|
}
|
|
|
|
++n;
|
|
|
|
double delta = x - mean;
|
|
|
|
mean += delta / n;
|
|
|
|
stddev += delta * (x - mean);
|
|
|
|
}
|
|
|
|
|
|
|
|
metrics.mean = mean;
|
|
|
|
metrics.gmean = exp(gmean);
|
|
|
|
metrics.gstddev = m > 1 ? sqrt(gstddev / (m - 1)) : 0;
|
|
|
|
metrics.stddev = n > 1 ? sqrt(stddev / (n - 1)) : 0;
|
2013-10-07 01:48:35 +08:00
|
|
|
metrics.median = (n % 2
|
2012-08-10 21:17:09 +08:00
|
|
|
? (double)times[offset + n / 2]
|
2013-10-07 01:48:35 +08:00
|
|
|
: 0.5 * (times[offset + n / 2] + times[offset + n / 2 - 1])
|
|
|
|
) / runsPerIteration;
|
2012-08-10 21:17:09 +08:00
|
|
|
|
|
|
|
return metrics;
|
|
|
|
}
|
|
|
|
|
|
|
|
void TestBase::validateMetrics()
|
|
|
|
{
|
|
|
|
performance_metrics& m = calcMetrics();
|
|
|
|
|
|
|
|
if (HasFailure()) return;
|
|
|
|
|
|
|
|
ASSERT_GE(m.samples, 1u)
|
|
|
|
<< " No time measurements was performed.\nstartTimer() and stopTimer() commands are required for performance tests.";
|
|
|
|
|
2013-10-07 01:48:35 +08:00
|
|
|
if (param_strategy == PERF_STRATEGY_BASE)
|
|
|
|
{
|
|
|
|
EXPECT_GE(m.samples, param_min_samples)
|
|
|
|
<< " Only a few samples are collected.\nPlease increase number of iterations or/and time limit to get reliable performance measurements.";
|
|
|
|
|
|
|
|
if (m.gstddev > DBL_EPSILON)
|
|
|
|
{
|
|
|
|
EXPECT_GT(/*m.gmean * */1., /*m.gmean * */ 2 * sinh(m.gstddev * param_max_deviation))
|
|
|
|
<< " Test results are not reliable ((mean-sigma,mean+sigma) deviation interval is greater than measured time interval).";
|
|
|
|
}
|
2012-08-10 21:17:09 +08:00
|
|
|
|
2013-10-07 01:48:35 +08:00
|
|
|
EXPECT_LE(m.outliers, std::max((unsigned int)cvCeil(m.samples * param_max_outliers / 100.), 1u))
|
|
|
|
<< " Test results are not reliable (too many outliers).";
|
|
|
|
}
|
|
|
|
else if (param_strategy == PERF_STRATEGY_SIMPLE)
|
2012-08-10 21:17:09 +08:00
|
|
|
{
|
2013-10-07 01:48:35 +08:00
|
|
|
double mean = metrics.mean * 1000.0f / metrics.frequency;
|
|
|
|
double stddev = metrics.stddev * 1000.0f / metrics.frequency;
|
|
|
|
double percents = stddev / mean * 100.f;
|
|
|
|
printf(" samples = %d, mean = %.2f, stddev = %.2f (%.1f%%)\n", (int)metrics.samples, mean, stddev, percents);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
assert(false);
|
2012-08-10 21:17:09 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void TestBase::reportMetrics(bool toJUnitXML)
|
|
|
|
{
|
|
|
|
performance_metrics& m = calcMetrics();
|
|
|
|
|
2013-11-12 22:15:50 +08:00
|
|
|
if (m.terminationReason == performance_metrics::TERM_SKIP_TEST)
|
|
|
|
{
|
|
|
|
if (toJUnitXML)
|
|
|
|
{
|
|
|
|
RecordProperty("custom_status", "skipped");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else if (toJUnitXML)
|
2012-08-10 21:17:09 +08:00
|
|
|
{
|
|
|
|
RecordProperty("bytesIn", (int)m.bytesIn);
|
|
|
|
RecordProperty("bytesOut", (int)m.bytesOut);
|
|
|
|
RecordProperty("term", m.terminationReason);
|
|
|
|
RecordProperty("samples", (int)m.samples);
|
|
|
|
RecordProperty("outliers", (int)m.outliers);
|
|
|
|
RecordProperty("frequency", cv::format("%.0f", m.frequency).c_str());
|
|
|
|
RecordProperty("min", cv::format("%.0f", m.min).c_str());
|
|
|
|
RecordProperty("median", cv::format("%.0f", m.median).c_str());
|
|
|
|
RecordProperty("gmean", cv::format("%.0f", m.gmean).c_str());
|
|
|
|
RecordProperty("gstddev", cv::format("%.6f", m.gstddev).c_str());
|
|
|
|
RecordProperty("mean", cv::format("%.0f", m.mean).c_str());
|
|
|
|
RecordProperty("stddev", cv::format("%.0f", m.stddev).c_str());
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info();
|
|
|
|
const char* type_param = test_info->type_param();
|
|
|
|
const char* value_param = test_info->value_param();
|
|
|
|
|
|
|
|
#if defined(ANDROID) && defined(USE_ANDROID_LOGGING)
|
|
|
|
LOGD("[ FAILED ] %s.%s", test_info->test_case_name(), test_info->name());
|
|
|
|
#endif
|
|
|
|
|
|
|
|
if (type_param) LOGD("type = %11s", type_param);
|
|
|
|
if (value_param) LOGD("params = %11s", value_param);
|
|
|
|
|
|
|
|
switch (m.terminationReason)
|
|
|
|
{
|
|
|
|
case performance_metrics::TERM_ITERATIONS:
|
|
|
|
LOGD("termination reason: reached maximum number of iterations");
|
|
|
|
break;
|
|
|
|
case performance_metrics::TERM_TIME:
|
|
|
|
LOGD("termination reason: reached time limit");
|
|
|
|
break;
|
|
|
|
case performance_metrics::TERM_INTERRUPT:
|
|
|
|
LOGD("termination reason: aborted by the performance testing framework");
|
|
|
|
break;
|
|
|
|
case performance_metrics::TERM_EXCEPTION:
|
|
|
|
LOGD("termination reason: unhandled exception");
|
|
|
|
break;
|
|
|
|
case performance_metrics::TERM_UNKNOWN:
|
|
|
|
default:
|
|
|
|
LOGD("termination reason: unknown");
|
|
|
|
break;
|
|
|
|
};
|
|
|
|
|
|
|
|
LOGD("bytesIn =%11lu", (unsigned long)m.bytesIn);
|
|
|
|
LOGD("bytesOut =%11lu", (unsigned long)m.bytesOut);
|
|
|
|
if (nIters == (unsigned int)-1 || m.terminationReason == performance_metrics::TERM_ITERATIONS)
|
|
|
|
LOGD("samples =%11u", m.samples);
|
|
|
|
else
|
|
|
|
LOGD("samples =%11u of %u", m.samples, nIters);
|
|
|
|
LOGD("outliers =%11u", m.outliers);
|
|
|
|
LOGD("frequency =%11.0f", m.frequency);
|
|
|
|
if (m.samples > 0)
|
|
|
|
{
|
|
|
|
LOGD("min =%11.0f = %.2fms", m.min, m.min * 1e3 / m.frequency);
|
|
|
|
LOGD("median =%11.0f = %.2fms", m.median, m.median * 1e3 / m.frequency);
|
|
|
|
LOGD("gmean =%11.0f = %.2fms", m.gmean, m.gmean * 1e3 / m.frequency);
|
|
|
|
LOGD("gstddev =%11.8f = %.2fms for 97%% dispersion interval", m.gstddev, m.gmean * 2 * sinh(m.gstddev * 3) * 1e3 / m.frequency);
|
|
|
|
LOGD("mean =%11.0f = %.2fms", m.mean, m.mean * 1e3 / m.frequency);
|
|
|
|
LOGD("stddev =%11.0f = %.2fms", m.stddev, m.stddev * 1e3 / m.frequency);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void TestBase::SetUp()
|
|
|
|
{
|
2012-10-10 22:07:03 +08:00
|
|
|
cv::theRNG().state = param_seed; // this rng should generate same numbers for each run
|
|
|
|
|
2012-10-15 20:14:11 +08:00
|
|
|
if (param_threads >= 0)
|
|
|
|
cv::setNumThreads(param_threads);
|
|
|
|
|
2012-08-10 21:17:09 +08:00
|
|
|
#ifdef ANDROID
|
|
|
|
if (param_affinity_mask)
|
|
|
|
setCurrentThreadAffinityMask(param_affinity_mask);
|
|
|
|
#endif
|
2012-10-15 20:14:11 +08:00
|
|
|
|
2012-10-08 21:17:42 +08:00
|
|
|
verified = false;
|
2012-08-10 21:17:09 +08:00
|
|
|
lastTime = 0;
|
|
|
|
totalTime = 0;
|
|
|
|
runsPerIteration = 1;
|
|
|
|
nIters = iterationsLimitDefault;
|
|
|
|
currentIter = (unsigned int)-1;
|
|
|
|
timeLimit = timeLimitDefault;
|
|
|
|
times.clear();
|
|
|
|
}
|
|
|
|
|
|
|
|
void TestBase::TearDown()
|
|
|
|
{
|
2013-11-12 22:15:50 +08:00
|
|
|
if (metrics.terminationReason == performance_metrics::TERM_SKIP_TEST)
|
|
|
|
{
|
|
|
|
LOGI("\tTest was skipped");
|
|
|
|
GTEST_SUCCEED() << "Test was skipped";
|
|
|
|
}
|
2012-08-10 21:17:09 +08:00
|
|
|
else
|
|
|
|
{
|
2013-11-12 22:15:50 +08:00
|
|
|
if (!HasFailure() && !verified)
|
|
|
|
ADD_FAILURE() << "The test has no sanity checks. There should be at least one check at the end of performance test.";
|
|
|
|
|
|
|
|
validateMetrics();
|
|
|
|
if (HasFailure())
|
|
|
|
{
|
|
|
|
reportMetrics(false);
|
|
|
|
return;
|
|
|
|
}
|
2012-08-10 21:17:09 +08:00
|
|
|
}
|
2013-11-12 22:15:50 +08:00
|
|
|
|
|
|
|
const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info();
|
|
|
|
const char* type_param = test_info->type_param();
|
|
|
|
const char* value_param = test_info->value_param();
|
|
|
|
if (value_param) printf("[ VALUE ] \t%s\n", value_param), fflush(stdout);
|
|
|
|
if (type_param) printf("[ TYPE ] \t%s\n", type_param), fflush(stdout);
|
|
|
|
reportMetrics(true);
|
2012-08-10 21:17:09 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
std::string TestBase::getDataPath(const std::string& relativePath)
|
|
|
|
{
|
|
|
|
if (relativePath.empty())
|
|
|
|
{
|
|
|
|
ADD_FAILURE() << " Bad path to test resource";
|
|
|
|
throw PerfEarlyExitException();
|
|
|
|
}
|
|
|
|
|
|
|
|
const char *data_path_dir = getenv("OPENCV_TEST_DATA_PATH");
|
|
|
|
const char *path_separator = "/";
|
|
|
|
|
|
|
|
std::string path;
|
|
|
|
if (data_path_dir)
|
|
|
|
{
|
|
|
|
int len = (int)strlen(data_path_dir) - 1;
|
|
|
|
if (len < 0) len = 0;
|
|
|
|
path = (data_path_dir[0] == 0 ? std::string(".") : std::string(data_path_dir))
|
|
|
|
+ (data_path_dir[len] == '/' || data_path_dir[len] == '\\' ? "" : path_separator);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
path = ".";
|
|
|
|
path += path_separator;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (relativePath[0] == '/' || relativePath[0] == '\\')
|
|
|
|
path += relativePath.substr(1);
|
|
|
|
else
|
|
|
|
path += relativePath;
|
|
|
|
|
|
|
|
FILE* fp = fopen(path.c_str(), "r");
|
|
|
|
if (fp)
|
|
|
|
fclose(fp);
|
|
|
|
else
|
|
|
|
{
|
|
|
|
ADD_FAILURE() << " Requested file \"" << path << "\" does not exist.";
|
|
|
|
throw PerfEarlyExitException();
|
|
|
|
}
|
|
|
|
return path;
|
|
|
|
}
|
|
|
|
|
|
|
|
void TestBase::RunPerfTestBody()
|
|
|
|
{
|
|
|
|
try
|
|
|
|
{
|
|
|
|
this->PerfTestBody();
|
|
|
|
}
|
2013-11-12 22:15:50 +08:00
|
|
|
catch(PerfSkipTestException&)
|
|
|
|
{
|
|
|
|
metrics.terminationReason = performance_metrics::TERM_SKIP_TEST;
|
|
|
|
return;
|
|
|
|
}
|
2013-10-07 01:48:35 +08:00
|
|
|
catch(PerfEarlyExitException&)
|
2012-08-10 21:17:09 +08:00
|
|
|
{
|
|
|
|
metrics.terminationReason = performance_metrics::TERM_INTERRUPT;
|
|
|
|
return;//no additional failure logging
|
|
|
|
}
|
2013-10-07 01:48:35 +08:00
|
|
|
catch(cv::Exception& e)
|
2012-08-10 21:17:09 +08:00
|
|
|
{
|
|
|
|
metrics.terminationReason = performance_metrics::TERM_EXCEPTION;
|
2013-01-23 19:31:10 +08:00
|
|
|
#ifdef HAVE_CUDA
|
|
|
|
if (e.code == CV_GpuApiCallError)
|
|
|
|
cv::gpu::resetDevice();
|
|
|
|
#endif
|
2012-12-16 00:15:34 +08:00
|
|
|
FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws cv::Exception:\n " << e.what();
|
|
|
|
}
|
2013-10-07 01:48:35 +08:00
|
|
|
catch(std::exception& e)
|
2012-12-16 00:15:34 +08:00
|
|
|
{
|
|
|
|
metrics.terminationReason = performance_metrics::TERM_EXCEPTION;
|
|
|
|
FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws std::exception:\n " << e.what();
|
2012-08-10 21:17:09 +08:00
|
|
|
}
|
|
|
|
catch(...)
|
|
|
|
{
|
|
|
|
metrics.terminationReason = performance_metrics::TERM_EXCEPTION;
|
2012-12-16 00:15:34 +08:00
|
|
|
FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws...";
|
2012-08-10 21:17:09 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*****************************************************************************************\
|
|
|
|
* ::perf::TestBase::_declareHelper
|
|
|
|
\*****************************************************************************************/
|
|
|
|
TestBase::_declareHelper& TestBase::_declareHelper::iterations(unsigned int n)
|
|
|
|
{
|
|
|
|
test->times.clear();
|
|
|
|
test->times.reserve(n);
|
|
|
|
test->nIters = std::min(n, TestBase::iterationsLimitDefault);
|
|
|
|
test->currentIter = (unsigned int)-1;
|
2013-10-07 01:48:35 +08:00
|
|
|
test->metrics.clear();
|
2012-08-10 21:17:09 +08:00
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
TestBase::_declareHelper& TestBase::_declareHelper::time(double timeLimitSecs)
|
|
|
|
{
|
|
|
|
test->times.clear();
|
|
|
|
test->currentIter = (unsigned int)-1;
|
|
|
|
test->timeLimit = (int64)(timeLimitSecs * cv::getTickFrequency());
|
2013-10-07 01:48:35 +08:00
|
|
|
test->metrics.clear();
|
2012-08-10 21:17:09 +08:00
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
TestBase::_declareHelper& TestBase::_declareHelper::tbb_threads(int n)
|
|
|
|
{
|
2012-10-15 20:14:11 +08:00
|
|
|
cv::setNumThreads(n);
|
2012-08-10 21:17:09 +08:00
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
TestBase::_declareHelper& TestBase::_declareHelper::runs(unsigned int runsNumber)
|
|
|
|
{
|
|
|
|
test->runsPerIteration = runsNumber;
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
TestBase::_declareHelper& TestBase::_declareHelper::in(cv::InputOutputArray a1, int wtype)
|
|
|
|
{
|
|
|
|
if (!test->times.empty()) return *this;
|
|
|
|
TestBase::declareArray(test->inputData, a1, wtype);
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
TestBase::_declareHelper& TestBase::_declareHelper::in(cv::InputOutputArray a1, cv::InputOutputArray a2, int wtype)
|
|
|
|
{
|
|
|
|
if (!test->times.empty()) return *this;
|
|
|
|
TestBase::declareArray(test->inputData, a1, wtype);
|
|
|
|
TestBase::declareArray(test->inputData, a2, wtype);
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
TestBase::_declareHelper& TestBase::_declareHelper::in(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, int wtype)
|
|
|
|
{
|
|
|
|
if (!test->times.empty()) return *this;
|
|
|
|
TestBase::declareArray(test->inputData, a1, wtype);
|
|
|
|
TestBase::declareArray(test->inputData, a2, wtype);
|
|
|
|
TestBase::declareArray(test->inputData, a3, wtype);
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
TestBase::_declareHelper& TestBase::_declareHelper::in(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, cv::InputOutputArray a4, int wtype)
|
|
|
|
{
|
|
|
|
if (!test->times.empty()) return *this;
|
|
|
|
TestBase::declareArray(test->inputData, a1, wtype);
|
|
|
|
TestBase::declareArray(test->inputData, a2, wtype);
|
|
|
|
TestBase::declareArray(test->inputData, a3, wtype);
|
|
|
|
TestBase::declareArray(test->inputData, a4, wtype);
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
TestBase::_declareHelper& TestBase::_declareHelper::out(cv::InputOutputArray a1, int wtype)
|
|
|
|
{
|
|
|
|
if (!test->times.empty()) return *this;
|
|
|
|
TestBase::declareArray(test->outputData, a1, wtype);
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
TestBase::_declareHelper& TestBase::_declareHelper::out(cv::InputOutputArray a1, cv::InputOutputArray a2, int wtype)
|
|
|
|
{
|
|
|
|
if (!test->times.empty()) return *this;
|
|
|
|
TestBase::declareArray(test->outputData, a1, wtype);
|
|
|
|
TestBase::declareArray(test->outputData, a2, wtype);
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
TestBase::_declareHelper& TestBase::_declareHelper::out(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, int wtype)
|
|
|
|
{
|
|
|
|
if (!test->times.empty()) return *this;
|
|
|
|
TestBase::declareArray(test->outputData, a1, wtype);
|
|
|
|
TestBase::declareArray(test->outputData, a2, wtype);
|
|
|
|
TestBase::declareArray(test->outputData, a3, wtype);
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
TestBase::_declareHelper& TestBase::_declareHelper::out(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, cv::InputOutputArray a4, int wtype)
|
|
|
|
{
|
|
|
|
if (!test->times.empty()) return *this;
|
|
|
|
TestBase::declareArray(test->outputData, a1, wtype);
|
|
|
|
TestBase::declareArray(test->outputData, a2, wtype);
|
|
|
|
TestBase::declareArray(test->outputData, a3, wtype);
|
|
|
|
TestBase::declareArray(test->outputData, a4, wtype);
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
TestBase::_declareHelper::_declareHelper(TestBase* t) : test(t)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2012-10-11 22:25:45 +08:00
|
|
|
/*****************************************************************************************\
|
|
|
|
* miscellaneous
|
|
|
|
\*****************************************************************************************/
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
struct KeypointComparator
|
|
|
|
{
|
|
|
|
std::vector<cv::KeyPoint>& pts_;
|
|
|
|
comparators::KeypointGreater cmp;
|
|
|
|
|
|
|
|
KeypointComparator(std::vector<cv::KeyPoint>& pts) : pts_(pts), cmp() {}
|
|
|
|
|
|
|
|
bool operator()(int idx1, int idx2) const
|
|
|
|
{
|
|
|
|
return cmp(pts_[idx1], pts_[idx2]);
|
|
|
|
}
|
2012-10-12 16:19:34 +08:00
|
|
|
private:
|
|
|
|
const KeypointComparator& operator=(const KeypointComparator&); // quiet MSVC
|
2012-10-11 22:25:45 +08:00
|
|
|
};
|
|
|
|
}//namespace
|
|
|
|
|
|
|
|
void perf::sort(std::vector<cv::KeyPoint>& pts, cv::InputOutputArray descriptors)
|
|
|
|
{
|
|
|
|
cv::Mat desc = descriptors.getMat();
|
|
|
|
|
|
|
|
CV_Assert(pts.size() == (size_t)desc.rows);
|
|
|
|
cv::AutoBuffer<int> idxs(desc.rows);
|
|
|
|
|
|
|
|
for (int i = 0; i < desc.rows; ++i)
|
|
|
|
idxs[i] = i;
|
|
|
|
|
|
|
|
std::sort((int*)idxs, (int*)idxs + desc.rows, KeypointComparator(pts));
|
|
|
|
|
|
|
|
std::vector<cv::KeyPoint> spts(pts.size());
|
|
|
|
cv::Mat sdesc(desc.size(), desc.type());
|
|
|
|
|
|
|
|
for(int j = 0; j < desc.rows; ++j)
|
|
|
|
{
|
|
|
|
spts[j] = pts[idxs[j]];
|
|
|
|
cv::Mat row = sdesc.row(j);
|
|
|
|
desc.row(idxs[j]).copyTo(row);
|
|
|
|
}
|
|
|
|
|
|
|
|
spts.swap(pts);
|
|
|
|
sdesc.copyTo(desc);
|
|
|
|
}
|
|
|
|
|
2012-10-10 05:34:21 +08:00
|
|
|
/*****************************************************************************************\
|
|
|
|
* ::perf::GpuPerf
|
|
|
|
\*****************************************************************************************/
|
|
|
|
bool perf::GpuPerf::targetDevice()
|
|
|
|
{
|
2013-06-11 20:06:51 +08:00
|
|
|
return param_impl == "cuda";
|
2012-10-10 05:34:21 +08:00
|
|
|
}
|
|
|
|
|
2012-08-10 21:17:09 +08:00
|
|
|
/*****************************************************************************************\
|
|
|
|
* ::perf::PrintTo
|
|
|
|
\*****************************************************************************************/
|
|
|
|
namespace perf
|
|
|
|
{
|
|
|
|
|
|
|
|
void PrintTo(const MatType& t, ::std::ostream* os)
|
|
|
|
{
|
|
|
|
switch( CV_MAT_DEPTH((int)t) )
|
|
|
|
{
|
|
|
|
case CV_8U: *os << "8U"; break;
|
|
|
|
case CV_8S: *os << "8S"; break;
|
|
|
|
case CV_16U: *os << "16U"; break;
|
|
|
|
case CV_16S: *os << "16S"; break;
|
|
|
|
case CV_32S: *os << "32S"; break;
|
|
|
|
case CV_32F: *os << "32F"; break;
|
|
|
|
case CV_64F: *os << "64F"; break;
|
|
|
|
case CV_USRTYPE1: *os << "USRTYPE1"; break;
|
|
|
|
default: *os << "INVALID_TYPE"; break;
|
|
|
|
}
|
|
|
|
*os << 'C' << CV_MAT_CN((int)t);
|
|
|
|
}
|
|
|
|
|
|
|
|
} //namespace perf
|
|
|
|
|
|
|
|
/*****************************************************************************************\
|
|
|
|
* ::cv::PrintTo
|
|
|
|
\*****************************************************************************************/
|
|
|
|
namespace cv {
|
|
|
|
|
|
|
|
void PrintTo(const Size& sz, ::std::ostream* os)
|
|
|
|
{
|
|
|
|
*os << /*"Size:" << */sz.width << "x" << sz.height;
|
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace cv
|