mirror of
https://github.com/opencv/opencv.git
synced 2025-01-19 06:53:50 +08:00
Merge pull request #13280 from dkurt:enable_dnn_ie_r4_tests
This commit is contained in:
commit
5bd1cc44ec
@ -700,7 +700,8 @@ struct AbsValFunctor
|
||||
|
||||
bool supportBackend(int backendId, int)
|
||||
{
|
||||
return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE;
|
||||
return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE ||
|
||||
backendId == DNN_BACKEND_INFERENCE_ENGINE;
|
||||
}
|
||||
|
||||
void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const
|
||||
@ -754,8 +755,11 @@ struct AbsValFunctor
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp)
|
||||
{
|
||||
CV_Error(Error::StsNotImplemented, "Abs");
|
||||
return InferenceEngine::CNNLayerPtr();
|
||||
lp.type = "ReLU";
|
||||
std::shared_ptr<InferenceEngine::ReLULayer> ieLayer(new InferenceEngine::ReLULayer(lp));
|
||||
ieLayer->negative_slope = -1;
|
||||
ieLayer->params["negative_slope"] = "-1.0";
|
||||
return ieLayer;
|
||||
}
|
||||
#endif // HAVE_INF_ENGINE
|
||||
|
||||
@ -832,7 +836,7 @@ struct PowerFunctor
|
||||
bool supportBackend(int backendId, int targetId)
|
||||
{
|
||||
if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
|
||||
return (targetId != DNN_TARGET_OPENCL && targetId != DNN_TARGET_OPENCL_FP16) || power == 1.0;
|
||||
return (targetId != DNN_TARGET_OPENCL && targetId != DNN_TARGET_OPENCL_FP16) || power == 1.0 || power == 0.5;
|
||||
else
|
||||
return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE;
|
||||
}
|
||||
@ -978,7 +982,8 @@ struct ChannelsPReLUFunctor
|
||||
|
||||
bool supportBackend(int backendId, int)
|
||||
{
|
||||
return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE;
|
||||
return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE ||
|
||||
backendId == DNN_BACKEND_INFERENCE_ENGINE;
|
||||
}
|
||||
|
||||
void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const
|
||||
@ -1064,8 +1069,11 @@ struct ChannelsPReLUFunctor
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp)
|
||||
{
|
||||
CV_Error(Error::StsNotImplemented, "PReLU");
|
||||
return InferenceEngine::CNNLayerPtr();
|
||||
lp.type = "PReLU";
|
||||
std::shared_ptr<InferenceEngine::PReLULayer> ieLayer(new InferenceEngine::PReLULayer(lp));
|
||||
const size_t numChannels = scale.total();
|
||||
ieLayer->_weights = wrapToInfEngineBlob(scale, {numChannels}, InferenceEngine::Layout::C);
|
||||
return ieLayer;
|
||||
}
|
||||
#endif // HAVE_INF_ENGINE
|
||||
|
||||
|
@ -128,10 +128,16 @@ TEST_P(DNNTestNetwork, GoogLeNet)
|
||||
|
||||
TEST_P(DNNTestNetwork, Inception_5h)
|
||||
{
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException("");
|
||||
double l1 = default_l1, lInf = default_lInf;
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_CPU || target == DNN_TARGET_OPENCL))
|
||||
{
|
||||
l1 = 1.72e-5;
|
||||
lInf = 8e-4;
|
||||
}
|
||||
processNet("dnn/tensorflow_inception_graph.pb", "", Size(224, 224), "softmax2",
|
||||
target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_inception_5h.yml" :
|
||||
"dnn/halide_scheduler_inception_5h.yml");
|
||||
"dnn/halide_scheduler_inception_5h.yml",
|
||||
l1, lInf);
|
||||
}
|
||||
|
||||
TEST_P(DNNTestNetwork, ENet)
|
||||
@ -193,8 +199,7 @@ TEST_P(DNNTestNetwork, SSD_VGG16)
|
||||
|
||||
TEST_P(DNNTestNetwork, OpenPose_pose_coco)
|
||||
{
|
||||
if (backend == DNN_BACKEND_HALIDE ||
|
||||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD))
|
||||
if (backend == DNN_BACKEND_HALIDE)
|
||||
throw SkipTestException("");
|
||||
processNet("dnn/openpose_pose_coco.caffemodel", "dnn/openpose_pose_coco.prototxt",
|
||||
Size(46, 46));
|
||||
@ -202,8 +207,7 @@ TEST_P(DNNTestNetwork, OpenPose_pose_coco)
|
||||
|
||||
TEST_P(DNNTestNetwork, OpenPose_pose_mpi)
|
||||
{
|
||||
if (backend == DNN_BACKEND_HALIDE ||
|
||||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD))
|
||||
if (backend == DNN_BACKEND_HALIDE)
|
||||
throw SkipTestException("");
|
||||
processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi.prototxt",
|
||||
Size(46, 46));
|
||||
@ -211,8 +215,7 @@ TEST_P(DNNTestNetwork, OpenPose_pose_mpi)
|
||||
|
||||
TEST_P(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages)
|
||||
{
|
||||
if (backend == DNN_BACKEND_HALIDE ||
|
||||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD))
|
||||
if (backend == DNN_BACKEND_HALIDE)
|
||||
throw SkipTestException("");
|
||||
// The same .caffemodel but modified .prototxt
|
||||
// See https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/src/openpose/pose/poseParameters.cpp
|
||||
@ -222,12 +225,16 @@ TEST_P(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages)
|
||||
|
||||
TEST_P(DNNTestNetwork, OpenFace)
|
||||
{
|
||||
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018030000
|
||||
#if defined(INF_ENGINE_RELEASE)
|
||||
#if INF_ENGINE_RELEASE < 2018030000
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
|
||||
throw SkipTestException("Test is enabled starts from OpenVINO 2018R3");
|
||||
#elif INF_ENGINE_RELEASE < 2018040000
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
|
||||
throw SkipTestException("Test is enabled starts from OpenVINO 2018R4");
|
||||
#endif
|
||||
if (backend == DNN_BACKEND_HALIDE ||
|
||||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16))
|
||||
#endif
|
||||
if (backend == DNN_BACKEND_HALIDE)
|
||||
throw SkipTestException("");
|
||||
processNet("dnn/openface_nn4.small2.v1.t7", "", Size(96, 96), "");
|
||||
}
|
||||
|
@ -347,8 +347,10 @@ INSTANTIATE_TEST_CASE_P(/**/, Test_Darknet_nets, dnnBackendsAndTargets());
|
||||
|
||||
TEST_P(Test_Darknet_layers, shortcut)
|
||||
{
|
||||
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018040000
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_CPU)
|
||||
throw SkipTestException("");
|
||||
throw SkipTestException("Test is enabled starts from OpenVINO 2018R4");
|
||||
#endif
|
||||
testDarknetLayer("shortcut");
|
||||
}
|
||||
|
||||
|
@ -273,9 +273,11 @@ TEST_P(AvePooling, Accuracy)
|
||||
Size stride = get<3>(GetParam());
|
||||
Backend backendId = get<0>(get<4>(GetParam()));
|
||||
Target targetId = get<1>(get<4>(GetParam()));
|
||||
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018040000
|
||||
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD &&
|
||||
stride == Size(3, 2) && kernel == Size(3, 3) && outSize != Size(1, 1))
|
||||
throw SkipTestException("");
|
||||
throw SkipTestException("Test is enabled starts from OpenVINO 2018R4");
|
||||
#endif
|
||||
|
||||
const int inWidth = (outSize.width - 1) * stride.width + kernel.width;
|
||||
const int inHeight = (outSize.height - 1) * stride.height + kernel.height;
|
||||
|
@ -243,9 +243,14 @@ TEST_P(Test_Caffe_layers, Concat)
|
||||
|
||||
TEST_P(Test_Caffe_layers, Fused_Concat)
|
||||
{
|
||||
if ((backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_CPU) ||
|
||||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL))
|
||||
#if defined(INF_ENGINE_RELEASE)
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
|
||||
{
|
||||
if (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16 ||
|
||||
(INF_ENGINE_RELEASE < 2018040000 && target == DNN_TARGET_CPU))
|
||||
throw SkipTestException("");
|
||||
}
|
||||
#endif
|
||||
checkBackend();
|
||||
|
||||
// Test case
|
||||
@ -349,12 +354,6 @@ TEST_P(Test_Caffe_layers, Reshape_Split_Slice)
|
||||
|
||||
TEST_P(Test_Caffe_layers, Conv_Elu)
|
||||
{
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
|
||||
{
|
||||
if (!checkIETarget(DNN_TARGET_MYRIAD))
|
||||
throw SkipTestException("Myriad is not available/disabled in OpenCV");
|
||||
}
|
||||
|
||||
Net net = readNetFromTensorflow(_tf("layer_elu_model.pb"));
|
||||
ASSERT_FALSE(net.empty());
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user