From 74cf48b5d77bc98084b8e809ec95eb80a1e1b1c2 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Wed, 25 Jul 2018 16:51:11 +0300 Subject: [PATCH 1/2] dnn(test): use Backend/Target enums instead of 'int' --- modules/dnn/test/test_halide_layers.cpp | 62 ++++++++++++------------- 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/modules/dnn/test/test_halide_layers.cpp b/modules/dnn/test/test_halide_layers.cpp index eda414551f..637b1f00b5 100644 --- a/modules/dnn/test/test_halide_layers.cpp +++ b/modules/dnn/test/test_halide_layers.cpp @@ -16,7 +16,7 @@ using namespace cv; using namespace cv::dnn; using namespace testing; -static void test(Mat& input, Net& net, int backendId, int targetId) +static void test(Mat& input, Net& net, Backend backendId, Target targetId) { DNNTestLayer::checkBackend(backendId, targetId); randu(input, -1.0f, 1.0f); @@ -34,7 +34,7 @@ static void test(Mat& input, Net& net, int backendId, int targetId) normAssert(outputDefault, outputHalide, "", l1, lInf); } -static void test(LayerParams& params, Mat& input, int backendId, int targetId) +static void test(LayerParams& params, Mat& input, Backend backendId, Target targetId) { Net net; net.addLayerToPrev(params.name, params.type, params); @@ -101,8 +101,8 @@ TEST_P(Convolution, Accuracy) Size pad = get<4>(GetParam()); Size dilation = get<5>(GetParam()); bool hasBias = get<6>(GetParam()); - int backendId = get<0>(get<7>(GetParam())); - int targetId = get<1>(get<7>(GetParam())); + Backend backendId = get<0>(get<7>(GetParam())); + Target targetId = get<1>(get<7>(GetParam())); if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD) throw SkipTestException(""); @@ -171,8 +171,8 @@ TEST_P(Deconvolution, Accuracy) Size stride = Size(get<5>(GetParam())[0], get<5>(GetParam())[1]); Size adjPad = Size(get<5>(GetParam())[2], get<5>(GetParam())[3]); bool hasBias = get<6>(GetParam()); - int backendId = get<0>(get<7>(GetParam())); - int targetId = get<1>(get<7>(GetParam())); + Backend backendId = get<0>(get<7>(GetParam())); + Target targetId = get<1>(get<7>(GetParam())); if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_CPU && dilation.width == 2 && dilation.height == 2) throw SkipTestException(""); @@ -235,8 +235,8 @@ TEST_P(LRN, Accuracy) float bias = get<2>(GetParam())[2]; bool normBySize = get<3>(GetParam()); std::string nrmType = get<4>(GetParam()); - int backendId = get<0>(get<5>(GetParam())); - int targetId = get<1>(get<5>(GetParam())); + Backend backendId = get<0>(get<5>(GetParam())); + Target targetId = get<1>(get<5>(GetParam())); if (backendId == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException(""); @@ -276,8 +276,8 @@ TEST_P(AvePooling, Accuracy) Size outSize = get<1>(GetParam());; // Input size will be computed from parameters. Size kernel = get<2>(GetParam()); Size stride = get<3>(GetParam()); - int backendId = get<0>(get<4>(GetParam())); - int targetId = get<1>(get<4>(GetParam())); + Backend backendId = get<0>(get<4>(GetParam())); + Target targetId = get<1>(get<4>(GetParam())); if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD) throw SkipTestException(""); @@ -317,8 +317,8 @@ TEST_P(MaxPooling, Accuracy) Size kernel = get<2>(GetParam()); Size stride = get<3>(GetParam()); Size pad = get<4>(GetParam()); - int backendId = get<0>(get<5>(GetParam())); - int targetId = get<1>(get<5>(GetParam())); + Backend backendId = get<0>(get<5>(GetParam())); + Target targetId = get<1>(get<5>(GetParam())); LayerParams lp; lp.set("pool", "max"); @@ -355,8 +355,8 @@ TEST_P(FullyConnected, Accuracy) Size inSize = get<1>(GetParam()); int outChannels = get<2>(GetParam()); bool hasBias = get<3>(GetParam()); - int backendId = get<0>(get<4>(GetParam())); - int targetId = get<1>(get<4>(GetParam())); + Backend backendId = get<0>(get<4>(GetParam())); + Target targetId = get<1>(get<4>(GetParam())); if (backendId == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException(""); @@ -394,8 +394,8 @@ typedef TestWithParam > > SoftMax; TEST_P(SoftMax, Accuracy) { int inChannels = get<0>(GetParam()); - int backendId = get<0>(get<1>(GetParam())); - int targetId = get<1>(get<1>(GetParam())); + Backend backendId = get<0>(get<1>(GetParam())); + Target targetId = get<1>(get<1>(GetParam())); LayerParams lp; lp.type = "SoftMax"; lp.name = "testLayer"; @@ -457,7 +457,7 @@ TEST_P(Test_Halide_layers, MaxPoolUnpool) //////////////////////////////////////////////////////////////////////////////// static const int kNumChannels = 3; -void testInPlaceActivation(LayerParams& lp, int backendId, int targetId) +void testInPlaceActivation(LayerParams& lp, Backend backendId, Target targetId) { EXPECT_FALSE(lp.name.empty()); @@ -485,8 +485,8 @@ TEST_P(BatchNorm, Accuracy) bool hasWeights = get<0>(GetParam()); bool hasBias = get<1>(GetParam()); float epsilon = get<2>(GetParam()); - int backendId = get<0>(get<3>(GetParam())); - int targetId = get<1>(get<3>(GetParam())); + Backend backendId = get<0>(get<3>(GetParam())); + Target targetId = get<1>(get<3>(GetParam())); LayerParams lp; lp.set("has_weight", hasWeights); @@ -518,8 +518,8 @@ typedef TestWithParam > > ReLU; TEST_P(ReLU, Accuracy) { float negativeSlope = get<0>(GetParam()); - int backendId = get<0>(get<1>(GetParam())); - int targetId = get<1>(get<1>(GetParam())); + Backend backendId = get<0>(get<1>(GetParam())); + Target targetId = get<1>(get<1>(GetParam())); LayerParams lp; lp.set("negative_slope", negativeSlope); @@ -536,8 +536,8 @@ INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, ReLU, Combine( typedef TestWithParam > > NoParamActivation; TEST_P(NoParamActivation, Accuracy) { - int backendId = get<0>(get<1>(GetParam())); - int targetId = get<1>(get<1>(GetParam())); + Backend backendId = get<0>(get<1>(GetParam())); + Target targetId = get<1>(get<1>(GetParam())); LayerParams lp; lp.type = get<0>(GetParam()); @@ -555,8 +555,8 @@ TEST_P(Power, Accuracy) float power = get<0>(GetParam())[0]; float scale = get<0>(GetParam())[1]; float shift = get<0>(GetParam())[2]; - int backendId = get<0>(get<1>(GetParam())); - int targetId = get<1>(get<1>(GetParam())); + Backend backendId = get<0>(get<1>(GetParam())); + Target targetId = get<1>(get<1>(GetParam())); LayerParams lp; lp.set("power", power); @@ -589,8 +589,8 @@ typedef TestWithParam > > Scale; TEST_P(Scale, Accuracy) { bool hasBias = get<0>(GetParam()); - int backendId = get<0>(get<1>(GetParam())); - int targetId = get<1>(get<1>(GetParam())); + Backend backendId = get<0>(get<1>(GetParam())); + Target targetId = get<1>(get<1>(GetParam())); LayerParams lp; lp.set("bias_term", hasBias); @@ -624,8 +624,8 @@ TEST_P(Concat, Accuracy) { Vec3i inSize = get<0>(GetParam()); Vec3i numChannels = get<1>(GetParam()); - int backendId = get<0>(get<2>(GetParam())); - int targetId = get<1>(get<2>(GetParam())); + Backend backendId = get<0>(get<2>(GetParam())); + Target targetId = get<1>(get<2>(GetParam())); Net net; @@ -692,8 +692,8 @@ TEST_P(Eltwise, Accuracy) std::string op = get<1>(GetParam()); int numConv = get<2>(GetParam()); bool weighted = get<3>(GetParam()); - int backendId = get<0>(get<4>(GetParam())); - int targetId = get<1>(get<4>(GetParam())); + Backend backendId = get<0>(get<4>(GetParam())); + Target targetId = get<1>(get<4>(GetParam())); Net net; From dd8701c1a031a3db99d0f026ba19484e4256b3e2 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Wed, 25 Jul 2018 16:53:37 +0300 Subject: [PATCH 2/2] dnn(test): skip checks only for unstable tests but execute tested functions in Layer_Test_Halide/Convolution.Accuracy --- modules/dnn/test/test_halide_layers.cpp | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/modules/dnn/test/test_halide_layers.cpp b/modules/dnn/test/test_halide_layers.cpp index 637b1f00b5..788f237b22 100644 --- a/modules/dnn/test/test_halide_layers.cpp +++ b/modules/dnn/test/test_halide_layers.cpp @@ -16,7 +16,7 @@ using namespace cv; using namespace cv::dnn; using namespace testing; -static void test(Mat& input, Net& net, Backend backendId, Target targetId) +static void test(Mat& input, Net& net, Backend backendId, Target targetId, bool skipCheck = false) { DNNTestLayer::checkBackend(backendId, targetId); randu(input, -1.0f, 1.0f); @@ -29,16 +29,19 @@ static void test(Mat& input, Net& net, Backend backendId, Target targetId) net.setPreferableTarget(targetId); Mat outputHalide = net.forward().clone(); + if (skipCheck) + return; + double l1, lInf; DNNTestLayer::getDefaultThresholds(backendId, targetId, &l1, &lInf); normAssert(outputDefault, outputHalide, "", l1, lInf); } -static void test(LayerParams& params, Mat& input, Backend backendId, Target targetId) +static void test(LayerParams& params, Mat& input, Backend backendId, Target targetId, bool skipCheck = false) { Net net; net.addLayerToPrev(params.name, params.type, params); - test(input, net, backendId, targetId); + test(input, net, backendId, targetId, skipCheck); } static testing::internal::ParamGenerator > dnnBackendsAndTargetsWithHalide() @@ -107,10 +110,11 @@ TEST_P(Convolution, Accuracy) if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD) throw SkipTestException(""); + bool skipCheck = false; if (cvtest::skipUnstableTests && backendId == DNN_BACKEND_OPENCV && (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16) && kernel == Size(3, 1) && stride == Size(1, 1) && pad == Size(0, 1)) - throw SkipTestException("Skip unstable test"); + skipCheck = true; int sz[] = {outChannels, inChannels / group, kernel.height, kernel.width}; Mat weights(4, &sz[0], CV_32F); @@ -139,7 +143,9 @@ TEST_P(Convolution, Accuracy) } int inpSz[] = {1, inChannels, inSize.height, inSize.width}; Mat input(4, &inpSz[0], CV_32F); - test(lp, input, backendId, targetId); + test(lp, input, backendId, targetId, skipCheck); + if (skipCheck) + throw SkipTestException("Skip checks in unstable test"); } INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Convolution, Combine(