opencv/modules/dnn/test/test_onnx_importer.cpp
2019-04-12 15:35:41 +00:00

459 lines
13 KiB
C++

// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
// Third party copyrights are property of their respective owners.
#include "test_precomp.hpp"
#include "npy_blob.hpp"
#include <opencv2/dnn/shape_utils.hpp>
namespace opencv_test { namespace {
template<typename TString>
static std::string _tf(TString filename)
{
String rootFolder = "dnn/onnx/";
return findDataFile(rootFolder + filename, false);
}
class Test_ONNX_layers : public DNNTestLayer
{
public:
enum Extension
{
npy,
pb
};
void testONNXModels(const String& basename, const Extension ext = npy,
const double l1 = 0, const float lInf = 0, const bool useSoftmax = false)
{
String onnxmodel = _tf("models/" + basename + ".onnx");
Mat inp, ref;
if (ext == npy) {
inp = blobFromNPY(_tf("data/input_" + basename + ".npy"));
ref = blobFromNPY(_tf("data/output_" + basename + ".npy"));
}
else if (ext == pb) {
inp = readTensorFromONNX(_tf("data/input_" + basename + ".pb"));
ref = readTensorFromONNX(_tf("data/output_" + basename + ".pb"));
}
else
CV_Error(Error::StsUnsupportedFormat, "Unsupported extension");
checkBackend(&inp, &ref);
Net net = readNetFromONNX(onnxmodel);
ASSERT_FALSE(net.empty());
net.setPreferableBackend(backend);
net.setPreferableTarget(target);
net.setInput(inp);
Mat out = net.forward("");
if (useSoftmax)
{
LayerParams lp;
Net netSoftmax;
netSoftmax.addLayerToPrev("softmaxLayer", "SoftMax", lp);
netSoftmax.setPreferableBackend(DNN_BACKEND_OPENCV);
netSoftmax.setInput(out);
out = netSoftmax.forward();
netSoftmax.setInput(ref);
ref = netSoftmax.forward();
}
normAssert(ref, out, "", l1 ? l1 : default_l1, lInf ? lInf : default_lInf);
}
};
TEST_P(Test_ONNX_layers, MaxPooling)
{
testONNXModels("maxpooling");
testONNXModels("two_maxpooling");
}
TEST_P(Test_ONNX_layers, Convolution)
{
testONNXModels("convolution");
}
TEST_P(Test_ONNX_layers, Two_convolution)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2018050000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
)
throw SkipTestException("Test is disabled for MyriadX"); // 2018R5+ is failed
#endif
// Reference output values are in range [-0.855, 0.611]
testONNXModels("two_convolution");
}
TEST_P(Test_ONNX_layers, Deconvolution)
{
testONNXModels("deconvolution");
testONNXModels("two_deconvolution");
testONNXModels("deconvolution_group");
testONNXModels("deconvolution_output_shape");
}
TEST_P(Test_ONNX_layers, Dropout)
{
testONNXModels("dropout");
}
TEST_P(Test_ONNX_layers, Linear)
{
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("");
testONNXModels("linear");
}
TEST_P(Test_ONNX_layers, ReLU)
{
testONNXModels("ReLU");
}
TEST_P(Test_ONNX_layers, MaxPooling_Sigmoid)
{
testONNXModels("maxpooling_sigmoid");
}
TEST_P(Test_ONNX_layers, Concatenation)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
testONNXModels("concatenation");
}
TEST_P(Test_ONNX_layers, AveragePooling)
{
testONNXModels("average_pooling");
}
TEST_P(Test_ONNX_layers, BatchNormalization)
{
testONNXModels("batch_norm");
}
TEST_P(Test_ONNX_layers, Transpose)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
testONNXModels("transpose");
}
TEST_P(Test_ONNX_layers, Multiplication)
{
if ((backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) ||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
testONNXModels("mul");
}
TEST_P(Test_ONNX_layers, Constant)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
throw SkipTestException("Test is disabled for OpenVINO <= 2018R5 + MyriadX target");
#endif
testONNXModels("constant");
}
TEST_P(Test_ONNX_layers, Padding)
{
testONNXModels("padding");
}
TEST_P(Test_ONNX_layers, Resize)
{
testONNXModels("resize_nearest");
}
TEST_P(Test_ONNX_layers, MultyInputs)
{
const String model = _tf("models/multy_inputs.onnx");
Net net = readNetFromONNX(model);
ASSERT_FALSE(net.empty());
net.setPreferableBackend(backend);
net.setPreferableTarget(target);
Mat inp1 = blobFromNPY(_tf("data/input_multy_inputs_0.npy"));
Mat inp2 = blobFromNPY(_tf("data/input_multy_inputs_1.npy"));
Mat ref = blobFromNPY(_tf("data/output_multy_inputs.npy"));
checkBackend(&inp1, &ref);
net.setInput(inp1, "0");
net.setInput(inp2, "1");
Mat out = net.forward();
normAssert(ref, out, "", default_l1, default_lInf);
}
TEST_P(Test_ONNX_layers, DynamicReshape)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
throw SkipTestException("");
testONNXModels("dynamic_reshape");
}
TEST_P(Test_ONNX_layers, Reshape)
{
testONNXModels("unsqueeze");
}
INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_ONNX_layers, dnnBackendsAndTargets());
class Test_ONNX_nets : public Test_ONNX_layers {};
TEST_P(Test_ONNX_nets, Alexnet)
{
applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
const String model = _tf("models/alexnet.onnx");
Net net = readNetFromONNX(model);
ASSERT_FALSE(net.empty());
net.setPreferableBackend(backend);
net.setPreferableTarget(target);
Mat inp = imread(_tf("../grace_hopper_227.png"));
Mat ref = blobFromNPY(_tf("../caffe_alexnet_prob.npy"));
checkBackend(&inp, &ref);
net.setInput(blobFromImage(inp, 1.0f, Size(227, 227), Scalar(), false));
ASSERT_FALSE(net.empty());
Mat out = net.forward();
normAssert(out, ref, "", default_l1, default_lInf);
}
TEST_P(Test_ONNX_nets, Squeezenet)
{
testONNXModels("squeezenet", pb);
}
TEST_P(Test_ONNX_nets, Googlenet)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("");
const String model = _tf("models/googlenet.onnx");
Net net = readNetFromONNX(model);
ASSERT_FALSE(net.empty());
net.setPreferableBackend(backend);
net.setPreferableTarget(target);
std::vector<Mat> images;
images.push_back( imread(_tf("../googlenet_0.png")) );
images.push_back( imread(_tf("../googlenet_1.png")) );
Mat inp = blobFromImages(images, 1.0f, Size(), Scalar(), false);
Mat ref = blobFromNPY(_tf("../googlenet_prob.npy"));
checkBackend(&inp, &ref);
net.setInput(inp);
ASSERT_FALSE(net.empty());
Mat out = net.forward();
normAssert(ref, out, "", default_l1, default_lInf);
}
TEST_P(Test_ONNX_nets, CaffeNet)
{
applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
testONNXModels("caffenet", pb);
}
TEST_P(Test_ONNX_nets, RCNN_ILSVRC13)
{
applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
// Reference output values are in range [-4.992, -1.161]
testONNXModels("rcnn_ilsvrc13", pb, 0.0045);
}
TEST_P(Test_ONNX_nets, VGG16)
{
applyTestTag(CV_TEST_TAG_MEMORY_6GB); // > 2.3Gb
// output range: [-69; 72], after Softmax [0; 0.96]
testONNXModels("vgg16", pb, default_l1, default_lInf, true);
}
TEST_P(Test_ONNX_nets, VGG16_bn)
{
applyTestTag(CV_TEST_TAG_MEMORY_6GB); // > 2.3Gb
// output range: [-16; 27], after Softmax [0; 0.67]
const double lInf = (target == DNN_TARGET_MYRIAD) ? 0.038 : default_lInf;
testONNXModels("vgg16-bn", pb, default_l1, lInf, true);
}
TEST_P(Test_ONNX_nets, ZFNet)
{
applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
testONNXModels("zfnet512", pb);
}
TEST_P(Test_ONNX_nets, ResNet18v1)
{
applyTestTag(CV_TEST_TAG_MEMORY_512MB);
// output range: [-16; 22], after Softmax [0, 0.51]
testONNXModels("resnet18v1", pb, default_l1, default_lInf, true);
}
TEST_P(Test_ONNX_nets, ResNet50v1)
{
applyTestTag(CV_TEST_TAG_MEMORY_512MB);
// output range: [-67; 75], after Softmax [0, 0.98]
testONNXModels("resnet50v1", pb, default_l1, default_lInf, true);
}
TEST_P(Test_ONNX_nets, ResNet101_DUC_HDC)
{
applyTestTag(CV_TEST_TAG_VERYLONG);
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("Test is disabled for DLIE targets");
#endif
#if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets");
#endif
if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL)
throw SkipTestException("Test is disabled for OpenCL targets");
testONNXModels("resnet101_duc_hdc", pb);
}
TEST_P(Test_ONNX_nets, TinyYolov2)
{
applyTestTag(CV_TEST_TAG_MEMORY_512MB);
if (cvtest::skipUnstableTests)
throw SkipTestException("Skip unstable test");
#if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE
&& (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)
)
throw SkipTestException("Test is disabled for DLIE OpenCL targets");
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
)
throw SkipTestException("Test is disabled for MyriadX");
#endif
// output range: [-11; 8]
double l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.017 : default_l1;
double lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.14 : default_lInf;
testONNXModels("tiny_yolo2", pb, l1, lInf);
}
TEST_P(Test_ONNX_nets, CNN_MNIST)
{
// output range: [-1952; 6574], after Softmax [0; 1]
testONNXModels("cnn_mnist", pb, default_l1, default_lInf, true);
}
TEST_P(Test_ONNX_nets, MobileNet_v2)
{
// output range: [-166; 317], after Softmax [0; 1]
testONNXModels("mobilenetv2", pb, default_l1, default_lInf, true);
}
TEST_P(Test_ONNX_nets, LResNet100E_IR)
{
applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
double l1 = default_l1;
double lInf = default_lInf;
// output range: [-3; 3]
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) {
l1 = 0.009;
lInf = 0.035;
}
else if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_CPU) {
l1 = 4.6e-5;
lInf = 1.9e-4;
}
testONNXModels("LResNet100E_IR", pb, l1, lInf);
}
TEST_P(Test_ONNX_nets, Emotion_ferplus)
{
#if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
)
throw SkipTestException("Test is disabled for MyriadX");
#endif
double l1 = default_l1;
double lInf = default_lInf;
// Output values are in range [-2.011, 2.111]
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
l1 = 0.007;
else if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
{
l1 = 0.021;
lInf = 0.034;
}
else if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_CPU || target == DNN_TARGET_OPENCL)) {
l1 = 2.4e-4;
lInf = 6e-4;
}
testONNXModels("emotion_ferplus", pb, l1, lInf);
}
TEST_P(Test_ONNX_nets, Inception_v2)
{
testONNXModels("inception_v2", pb, default_l1, default_lInf, true);
}
TEST_P(Test_ONNX_nets, DenseNet121)
{
applyTestTag(CV_TEST_TAG_MEMORY_512MB);
// output range: [-87; 138], after Softmax [0; 1]
testONNXModels("densenet121", pb, default_l1, default_lInf, true);
}
TEST_P(Test_ONNX_nets, Inception_v1)
{
#if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets");
#endif
testONNXModels("inception_v1", pb);
}
TEST_P(Test_ONNX_nets, Shufflenet)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
testONNXModels("shufflenet", pb);
}
INSTANTIATE_TEST_CASE_P(/**/, Test_ONNX_nets, dnnBackendsAndTargets());
}} // namespace