mirror of
https://github.com/opencv/opencv.git
synced 2025-06-08 18:13:13 +08:00
Merge pull request #12061 from alalek:dnn_test_skip_checks_only
This commit is contained in:
commit
2f0fc920dd
@ -16,7 +16,7 @@ using namespace cv;
|
|||||||
using namespace cv::dnn;
|
using namespace cv::dnn;
|
||||||
using namespace testing;
|
using namespace testing;
|
||||||
|
|
||||||
static void test(Mat& input, Net& net, int backendId, int targetId)
|
static void test(Mat& input, Net& net, Backend backendId, Target targetId, bool skipCheck = false)
|
||||||
{
|
{
|
||||||
DNNTestLayer::checkBackend(backendId, targetId);
|
DNNTestLayer::checkBackend(backendId, targetId);
|
||||||
randu(input, -1.0f, 1.0f);
|
randu(input, -1.0f, 1.0f);
|
||||||
@ -29,16 +29,19 @@ static void test(Mat& input, Net& net, int backendId, int targetId)
|
|||||||
net.setPreferableTarget(targetId);
|
net.setPreferableTarget(targetId);
|
||||||
Mat outputHalide = net.forward().clone();
|
Mat outputHalide = net.forward().clone();
|
||||||
|
|
||||||
|
if (skipCheck)
|
||||||
|
return;
|
||||||
|
|
||||||
double l1, lInf;
|
double l1, lInf;
|
||||||
DNNTestLayer::getDefaultThresholds(backendId, targetId, &l1, &lInf);
|
DNNTestLayer::getDefaultThresholds(backendId, targetId, &l1, &lInf);
|
||||||
normAssert(outputDefault, outputHalide, "", l1, lInf);
|
normAssert(outputDefault, outputHalide, "", l1, lInf);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void test(LayerParams& params, Mat& input, int backendId, int targetId)
|
static void test(LayerParams& params, Mat& input, Backend backendId, Target targetId, bool skipCheck = false)
|
||||||
{
|
{
|
||||||
Net net;
|
Net net;
|
||||||
net.addLayerToPrev(params.name, params.type, params);
|
net.addLayerToPrev(params.name, params.type, params);
|
||||||
test(input, net, backendId, targetId);
|
test(input, net, backendId, targetId, skipCheck);
|
||||||
}
|
}
|
||||||
|
|
||||||
static testing::internal::ParamGenerator<tuple<Backend, Target> > dnnBackendsAndTargetsWithHalide()
|
static testing::internal::ParamGenerator<tuple<Backend, Target> > dnnBackendsAndTargetsWithHalide()
|
||||||
@ -101,16 +104,17 @@ TEST_P(Convolution, Accuracy)
|
|||||||
Size pad = get<4>(GetParam());
|
Size pad = get<4>(GetParam());
|
||||||
Size dilation = get<5>(GetParam());
|
Size dilation = get<5>(GetParam());
|
||||||
bool hasBias = get<6>(GetParam());
|
bool hasBias = get<6>(GetParam());
|
||||||
int backendId = get<0>(get<7>(GetParam()));
|
Backend backendId = get<0>(get<7>(GetParam()));
|
||||||
int targetId = get<1>(get<7>(GetParam()));
|
Target targetId = get<1>(get<7>(GetParam()));
|
||||||
|
|
||||||
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD)
|
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD)
|
||||||
throw SkipTestException("");
|
throw SkipTestException("");
|
||||||
|
|
||||||
|
bool skipCheck = false;
|
||||||
if (cvtest::skipUnstableTests && backendId == DNN_BACKEND_OPENCV &&
|
if (cvtest::skipUnstableTests && backendId == DNN_BACKEND_OPENCV &&
|
||||||
(targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16) &&
|
(targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16) &&
|
||||||
kernel == Size(3, 1) && stride == Size(1, 1) && pad == Size(0, 1))
|
kernel == Size(3, 1) && stride == Size(1, 1) && pad == Size(0, 1))
|
||||||
throw SkipTestException("Skip unstable test");
|
skipCheck = true;
|
||||||
|
|
||||||
int sz[] = {outChannels, inChannels / group, kernel.height, kernel.width};
|
int sz[] = {outChannels, inChannels / group, kernel.height, kernel.width};
|
||||||
Mat weights(4, &sz[0], CV_32F);
|
Mat weights(4, &sz[0], CV_32F);
|
||||||
@ -139,7 +143,9 @@ TEST_P(Convolution, Accuracy)
|
|||||||
}
|
}
|
||||||
int inpSz[] = {1, inChannels, inSize.height, inSize.width};
|
int inpSz[] = {1, inChannels, inSize.height, inSize.width};
|
||||||
Mat input(4, &inpSz[0], CV_32F);
|
Mat input(4, &inpSz[0], CV_32F);
|
||||||
test(lp, input, backendId, targetId);
|
test(lp, input, backendId, targetId, skipCheck);
|
||||||
|
if (skipCheck)
|
||||||
|
throw SkipTestException("Skip checks in unstable test");
|
||||||
}
|
}
|
||||||
|
|
||||||
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Convolution, Combine(
|
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Convolution, Combine(
|
||||||
@ -171,8 +177,8 @@ TEST_P(Deconvolution, Accuracy)
|
|||||||
Size stride = Size(get<5>(GetParam())[0], get<5>(GetParam())[1]);
|
Size stride = Size(get<5>(GetParam())[0], get<5>(GetParam())[1]);
|
||||||
Size adjPad = Size(get<5>(GetParam())[2], get<5>(GetParam())[3]);
|
Size adjPad = Size(get<5>(GetParam())[2], get<5>(GetParam())[3]);
|
||||||
bool hasBias = get<6>(GetParam());
|
bool hasBias = get<6>(GetParam());
|
||||||
int backendId = get<0>(get<7>(GetParam()));
|
Backend backendId = get<0>(get<7>(GetParam()));
|
||||||
int targetId = get<1>(get<7>(GetParam()));
|
Target targetId = get<1>(get<7>(GetParam()));
|
||||||
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_CPU &&
|
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_CPU &&
|
||||||
dilation.width == 2 && dilation.height == 2)
|
dilation.width == 2 && dilation.height == 2)
|
||||||
throw SkipTestException("");
|
throw SkipTestException("");
|
||||||
@ -235,8 +241,8 @@ TEST_P(LRN, Accuracy)
|
|||||||
float bias = get<2>(GetParam())[2];
|
float bias = get<2>(GetParam())[2];
|
||||||
bool normBySize = get<3>(GetParam());
|
bool normBySize = get<3>(GetParam());
|
||||||
std::string nrmType = get<4>(GetParam());
|
std::string nrmType = get<4>(GetParam());
|
||||||
int backendId = get<0>(get<5>(GetParam()));
|
Backend backendId = get<0>(get<5>(GetParam()));
|
||||||
int targetId = get<1>(get<5>(GetParam()));
|
Target targetId = get<1>(get<5>(GetParam()));
|
||||||
if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
|
if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
|
||||||
throw SkipTestException("");
|
throw SkipTestException("");
|
||||||
|
|
||||||
@ -276,8 +282,8 @@ TEST_P(AvePooling, Accuracy)
|
|||||||
Size outSize = get<1>(GetParam());; // Input size will be computed from parameters.
|
Size outSize = get<1>(GetParam());; // Input size will be computed from parameters.
|
||||||
Size kernel = get<2>(GetParam());
|
Size kernel = get<2>(GetParam());
|
||||||
Size stride = get<3>(GetParam());
|
Size stride = get<3>(GetParam());
|
||||||
int backendId = get<0>(get<4>(GetParam()));
|
Backend backendId = get<0>(get<4>(GetParam()));
|
||||||
int targetId = get<1>(get<4>(GetParam()));
|
Target targetId = get<1>(get<4>(GetParam()));
|
||||||
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD)
|
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD)
|
||||||
throw SkipTestException("");
|
throw SkipTestException("");
|
||||||
|
|
||||||
@ -317,8 +323,8 @@ TEST_P(MaxPooling, Accuracy)
|
|||||||
Size kernel = get<2>(GetParam());
|
Size kernel = get<2>(GetParam());
|
||||||
Size stride = get<3>(GetParam());
|
Size stride = get<3>(GetParam());
|
||||||
Size pad = get<4>(GetParam());
|
Size pad = get<4>(GetParam());
|
||||||
int backendId = get<0>(get<5>(GetParam()));
|
Backend backendId = get<0>(get<5>(GetParam()));
|
||||||
int targetId = get<1>(get<5>(GetParam()));
|
Target targetId = get<1>(get<5>(GetParam()));
|
||||||
|
|
||||||
LayerParams lp;
|
LayerParams lp;
|
||||||
lp.set("pool", "max");
|
lp.set("pool", "max");
|
||||||
@ -355,8 +361,8 @@ TEST_P(FullyConnected, Accuracy)
|
|||||||
Size inSize = get<1>(GetParam());
|
Size inSize = get<1>(GetParam());
|
||||||
int outChannels = get<2>(GetParam());
|
int outChannels = get<2>(GetParam());
|
||||||
bool hasBias = get<3>(GetParam());
|
bool hasBias = get<3>(GetParam());
|
||||||
int backendId = get<0>(get<4>(GetParam()));
|
Backend backendId = get<0>(get<4>(GetParam()));
|
||||||
int targetId = get<1>(get<4>(GetParam()));
|
Target targetId = get<1>(get<4>(GetParam()));
|
||||||
if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
|
if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
|
||||||
throw SkipTestException("");
|
throw SkipTestException("");
|
||||||
|
|
||||||
@ -394,8 +400,8 @@ typedef TestWithParam<tuple<int, tuple<Backend, Target> > > SoftMax;
|
|||||||
TEST_P(SoftMax, Accuracy)
|
TEST_P(SoftMax, Accuracy)
|
||||||
{
|
{
|
||||||
int inChannels = get<0>(GetParam());
|
int inChannels = get<0>(GetParam());
|
||||||
int backendId = get<0>(get<1>(GetParam()));
|
Backend backendId = get<0>(get<1>(GetParam()));
|
||||||
int targetId = get<1>(get<1>(GetParam()));
|
Target targetId = get<1>(get<1>(GetParam()));
|
||||||
LayerParams lp;
|
LayerParams lp;
|
||||||
lp.type = "SoftMax";
|
lp.type = "SoftMax";
|
||||||
lp.name = "testLayer";
|
lp.name = "testLayer";
|
||||||
@ -457,7 +463,7 @@ TEST_P(Test_Halide_layers, MaxPoolUnpool)
|
|||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
static const int kNumChannels = 3;
|
static const int kNumChannels = 3;
|
||||||
|
|
||||||
void testInPlaceActivation(LayerParams& lp, int backendId, int targetId)
|
void testInPlaceActivation(LayerParams& lp, Backend backendId, Target targetId)
|
||||||
{
|
{
|
||||||
EXPECT_FALSE(lp.name.empty());
|
EXPECT_FALSE(lp.name.empty());
|
||||||
|
|
||||||
@ -485,8 +491,8 @@ TEST_P(BatchNorm, Accuracy)
|
|||||||
bool hasWeights = get<0>(GetParam());
|
bool hasWeights = get<0>(GetParam());
|
||||||
bool hasBias = get<1>(GetParam());
|
bool hasBias = get<1>(GetParam());
|
||||||
float epsilon = get<2>(GetParam());
|
float epsilon = get<2>(GetParam());
|
||||||
int backendId = get<0>(get<3>(GetParam()));
|
Backend backendId = get<0>(get<3>(GetParam()));
|
||||||
int targetId = get<1>(get<3>(GetParam()));
|
Target targetId = get<1>(get<3>(GetParam()));
|
||||||
|
|
||||||
LayerParams lp;
|
LayerParams lp;
|
||||||
lp.set("has_weight", hasWeights);
|
lp.set("has_weight", hasWeights);
|
||||||
@ -518,8 +524,8 @@ typedef TestWithParam<tuple<float, tuple<Backend, Target> > > ReLU;
|
|||||||
TEST_P(ReLU, Accuracy)
|
TEST_P(ReLU, Accuracy)
|
||||||
{
|
{
|
||||||
float negativeSlope = get<0>(GetParam());
|
float negativeSlope = get<0>(GetParam());
|
||||||
int backendId = get<0>(get<1>(GetParam()));
|
Backend backendId = get<0>(get<1>(GetParam()));
|
||||||
int targetId = get<1>(get<1>(GetParam()));
|
Target targetId = get<1>(get<1>(GetParam()));
|
||||||
|
|
||||||
LayerParams lp;
|
LayerParams lp;
|
||||||
lp.set("negative_slope", negativeSlope);
|
lp.set("negative_slope", negativeSlope);
|
||||||
@ -536,8 +542,8 @@ INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, ReLU, Combine(
|
|||||||
typedef TestWithParam<tuple<std::string, tuple<Backend, Target> > > NoParamActivation;
|
typedef TestWithParam<tuple<std::string, tuple<Backend, Target> > > NoParamActivation;
|
||||||
TEST_P(NoParamActivation, Accuracy)
|
TEST_P(NoParamActivation, Accuracy)
|
||||||
{
|
{
|
||||||
int backendId = get<0>(get<1>(GetParam()));
|
Backend backendId = get<0>(get<1>(GetParam()));
|
||||||
int targetId = get<1>(get<1>(GetParam()));
|
Target targetId = get<1>(get<1>(GetParam()));
|
||||||
|
|
||||||
LayerParams lp;
|
LayerParams lp;
|
||||||
lp.type = get<0>(GetParam());
|
lp.type = get<0>(GetParam());
|
||||||
@ -555,8 +561,8 @@ TEST_P(Power, Accuracy)
|
|||||||
float power = get<0>(GetParam())[0];
|
float power = get<0>(GetParam())[0];
|
||||||
float scale = get<0>(GetParam())[1];
|
float scale = get<0>(GetParam())[1];
|
||||||
float shift = get<0>(GetParam())[2];
|
float shift = get<0>(GetParam())[2];
|
||||||
int backendId = get<0>(get<1>(GetParam()));
|
Backend backendId = get<0>(get<1>(GetParam()));
|
||||||
int targetId = get<1>(get<1>(GetParam()));
|
Target targetId = get<1>(get<1>(GetParam()));
|
||||||
|
|
||||||
LayerParams lp;
|
LayerParams lp;
|
||||||
lp.set("power", power);
|
lp.set("power", power);
|
||||||
@ -589,8 +595,8 @@ typedef TestWithParam<tuple<bool, tuple<Backend, Target> > > Scale;
|
|||||||
TEST_P(Scale, Accuracy)
|
TEST_P(Scale, Accuracy)
|
||||||
{
|
{
|
||||||
bool hasBias = get<0>(GetParam());
|
bool hasBias = get<0>(GetParam());
|
||||||
int backendId = get<0>(get<1>(GetParam()));
|
Backend backendId = get<0>(get<1>(GetParam()));
|
||||||
int targetId = get<1>(get<1>(GetParam()));
|
Target targetId = get<1>(get<1>(GetParam()));
|
||||||
|
|
||||||
LayerParams lp;
|
LayerParams lp;
|
||||||
lp.set("bias_term", hasBias);
|
lp.set("bias_term", hasBias);
|
||||||
@ -624,8 +630,8 @@ TEST_P(Concat, Accuracy)
|
|||||||
{
|
{
|
||||||
Vec3i inSize = get<0>(GetParam());
|
Vec3i inSize = get<0>(GetParam());
|
||||||
Vec3i numChannels = get<1>(GetParam());
|
Vec3i numChannels = get<1>(GetParam());
|
||||||
int backendId = get<0>(get<2>(GetParam()));
|
Backend backendId = get<0>(get<2>(GetParam()));
|
||||||
int targetId = get<1>(get<2>(GetParam()));
|
Target targetId = get<1>(get<2>(GetParam()));
|
||||||
|
|
||||||
Net net;
|
Net net;
|
||||||
|
|
||||||
@ -692,8 +698,8 @@ TEST_P(Eltwise, Accuracy)
|
|||||||
std::string op = get<1>(GetParam());
|
std::string op = get<1>(GetParam());
|
||||||
int numConv = get<2>(GetParam());
|
int numConv = get<2>(GetParam());
|
||||||
bool weighted = get<3>(GetParam());
|
bool weighted = get<3>(GetParam());
|
||||||
int backendId = get<0>(get<4>(GetParam()));
|
Backend backendId = get<0>(get<4>(GetParam()));
|
||||||
int targetId = get<1>(get<4>(GetParam()));
|
Target targetId = get<1>(get<4>(GetParam()));
|
||||||
|
|
||||||
Net net;
|
Net net;
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user