mirror of
https://github.com/opencv/opencv.git
synced 2024-11-25 11:40:44 +08:00
Fix custom IE layers in case of no MKLDNN plugin
This commit is contained in:
parent
a8d14e88fe
commit
dfe0368835
@ -97,6 +97,37 @@ public:
|
||||
static BackendRegistry impl;
|
||||
return impl;
|
||||
}
|
||||
|
||||
static inline bool checkIETarget(int target)
|
||||
{
|
||||
#ifndef HAVE_INF_ENGINE
|
||||
return false;
|
||||
#else
|
||||
cv::dnn::Net net;
|
||||
cv::dnn::LayerParams lp;
|
||||
lp.set("kernel_size", 1);
|
||||
lp.set("num_output", 1);
|
||||
lp.set("bias_term", false);
|
||||
lp.type = "Convolution";
|
||||
lp.name = "testLayer";
|
||||
lp.blobs.push_back(Mat({1, 2, 1, 1}, CV_32F, Scalar(1)));
|
||||
net.addLayerToPrev(lp.name, lp.type, lp);
|
||||
net.setPreferableBackend(cv::dnn::DNN_BACKEND_INFERENCE_ENGINE);
|
||||
net.setPreferableTarget(target);
|
||||
static int inpDims[] = {1, 2, 3, 4};
|
||||
net.setInput(cv::Mat(4, &inpDims[0], CV_32FC1, cv::Scalar(0)));
|
||||
try
|
||||
{
|
||||
net.forward();
|
||||
}
|
||||
catch(...)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
#endif
|
||||
}
|
||||
|
||||
private:
|
||||
BackendRegistry()
|
||||
{
|
||||
@ -136,35 +167,6 @@ private:
|
||||
|
||||
backends.push_back(std::make_pair(DNN_BACKEND_OPENCV, DNN_TARGET_CPU));
|
||||
}
|
||||
static inline bool checkIETarget(int target)
|
||||
{
|
||||
#ifndef HAVE_INF_ENGINE
|
||||
return false;
|
||||
#else
|
||||
cv::dnn::Net net;
|
||||
cv::dnn::LayerParams lp;
|
||||
lp.set("kernel_size", 1);
|
||||
lp.set("num_output", 1);
|
||||
lp.set("bias_term", false);
|
||||
lp.type = "Convolution";
|
||||
lp.name = "testLayer";
|
||||
lp.blobs.push_back(Mat({1, 2, 1, 1}, CV_32F, Scalar(1)));
|
||||
net.addLayerToPrev(lp.name, lp.type, lp);
|
||||
net.setPreferableBackend(cv::dnn::DNN_BACKEND_INFERENCE_ENGINE);
|
||||
net.setPreferableTarget(target);
|
||||
static int inpDims[] = {1, 2, 3, 4};
|
||||
net.setInput(cv::Mat(4, &inpDims[0], CV_32FC1, cv::Scalar(0)));
|
||||
try
|
||||
{
|
||||
net.forward();
|
||||
}
|
||||
catch(...)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
#endif
|
||||
}
|
||||
|
||||
BackendsList backends;
|
||||
};
|
||||
@ -1544,6 +1546,9 @@ struct Net::Impl
|
||||
// backend. Split a whole model on several Inference Engine networks if
|
||||
// some of layers are not implemented.
|
||||
|
||||
bool supportsCPUFallback = preferableTarget == DNN_TARGET_CPU ||
|
||||
BackendRegistry::checkIETarget(DNN_TARGET_CPU);
|
||||
|
||||
// Set of all input and output blobs wrappers for current network.
|
||||
std::map<LayerPin, Ptr<BackendWrapper> > netBlobsWrappers;
|
||||
for (it = layers.begin(); it != layers.end(); ++it)
|
||||
@ -1557,7 +1562,8 @@ struct Net::Impl
|
||||
if (!fused && !layer->supportBackend(preferableBackend))
|
||||
{
|
||||
bool customizable = ld.id != 0 && ld.outputBlobs.size() == 1 &&
|
||||
INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R2);
|
||||
INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R2) &&
|
||||
supportsCPUFallback;
|
||||
// TODO: there is a bug in Myriad plugin with custom layers shape infer.
|
||||
if (preferableTarget == DNN_TARGET_MYRIAD)
|
||||
{
|
||||
|
@ -390,12 +390,6 @@ TEST_P(Test_Darknet_nets, YOLOv3)
|
||||
{
|
||||
applyTestTag(CV_TEST_TAG_LONG, (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_1GB : CV_TEST_TAG_MEMORY_2GB));
|
||||
|
||||
#if defined(INF_ENGINE_RELEASE)
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
|
||||
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
|
||||
#endif
|
||||
|
||||
// batchId, classId, confidence, left, top, right, bottom
|
||||
Mat ref = (Mat_<float>(9, 7) << 0, 7, 0.952983f, 0.614622f, 0.150257f, 0.901369f, 0.289251f, // a truck
|
||||
0, 1, 0.987908f, 0.150913f, 0.221933f, 0.742255f, 0.74626f, // a bicycle
|
||||
@ -413,23 +407,35 @@ TEST_P(Test_Darknet_nets, YOLOv3)
|
||||
std::string config_file = "yolov3.cfg";
|
||||
std::string weights_file = "yolov3.weights";
|
||||
|
||||
#if defined(INF_ENGINE_RELEASE)
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD &&
|
||||
getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
|
||||
{
|
||||
scoreDiff = 0.04;
|
||||
iouDiff = 0.2;
|
||||
}
|
||||
#endif
|
||||
|
||||
{
|
||||
SCOPED_TRACE("batch size 1");
|
||||
testDarknetModel(config_file, weights_file, ref.rowRange(0, 3), scoreDiff, iouDiff);
|
||||
}
|
||||
|
||||
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL)
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL) // Test with 'batch size 2' is disabled for DLIE/OpenCL target
|
||||
#endif
|
||||
|
||||
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019020000)
|
||||
#if defined(INF_ENGINE_RELEASE)
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
|
||||
{
|
||||
if (target == DNN_TARGET_OPENCL)
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_2019R2);
|
||||
if (target == DNN_TARGET_OPENCL_FP16)
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_2019R2);
|
||||
if (INF_ENGINE_VER_MAJOR_LE(2018050000) && target == DNN_TARGET_OPENCL)
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
|
||||
else if (INF_ENGINE_VER_MAJOR_EQ(2019020000))
|
||||
{
|
||||
if (target == DNN_TARGET_OPENCL)
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_2019R2);
|
||||
if (target == DNN_TARGET_OPENCL_FP16)
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_2019R2);
|
||||
}
|
||||
else if (target == DNN_TARGET_MYRIAD &&
|
||||
getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
|
||||
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user