Fix OpenVINO 2019R1 compilation

This commit is contained in:
Dmitry Kurtaev 2019-09-09 19:24:54 +03:00
parent b3a0507546
commit 0428f60d66
4 changed files with 10 additions and 3 deletions

View File

@ -1556,7 +1556,8 @@ struct Net::Impl
Ptr<Layer> layer = ld.layerInstance; Ptr<Layer> layer = ld.layerInstance;
if (!fused && !layer->supportBackend(preferableBackend)) if (!fused && !layer->supportBackend(preferableBackend))
{ {
bool customizable = ld.id != 0 && ld.outputBlobs.size() == 1; bool customizable = ld.id != 0 && ld.outputBlobs.size() == 1 &&
INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R2);
// TODO: there is a bug in Myriad plugin with custom layers shape infer. // TODO: there is a bug in Myriad plugin with custom layers shape infer.
if (preferableTarget == DNN_TARGET_MYRIAD) if (preferableTarget == DNN_TARGET_MYRIAD)
{ {

View File

@ -581,7 +581,6 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::CNNNetwork& net)
try try
{ {
AutoLock lock(getInitializationMutex()); AutoLock lock(getInitializationMutex());
InferenceEngine::Core& ie = getCore();
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1) #if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1)
auto& sharedPlugins = getSharedPlugins(); auto& sharedPlugins = getSharedPlugins();
auto pluginIt = sharedPlugins.find(device_name); auto pluginIt = sharedPlugins.find(device_name);
@ -590,6 +589,8 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::CNNNetwork& net)
enginePtr = pluginIt->second; enginePtr = pluginIt->second;
} }
else else
#else
InferenceEngine::Core& ie = getCore();
#endif #endif
{ {
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1) #if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1)

View File

@ -334,6 +334,8 @@ static const std::chrono::milliseconds async_timeout(500);
typedef testing::TestWithParam<tuple<std::string, Target> > Test_Darknet_nets_async; typedef testing::TestWithParam<tuple<std::string, Target> > Test_Darknet_nets_async;
TEST_P(Test_Darknet_nets_async, Accuracy) TEST_P(Test_Darknet_nets_async, Accuracy)
{ {
if (INF_ENGINE_VER_MAJOR_LT(2019020000))
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
applyTestTag(CV_TEST_TAG_MEMORY_512MB); applyTestTag(CV_TEST_TAG_MEMORY_512MB);
std::string prefix = get<0>(GetParam()); std::string prefix = get<0>(GetParam());

View File

@ -481,8 +481,11 @@ TEST_P(Test_TensorFlow_nets, Faster_RCNN)
"faster_rcnn_resnet50_coco_2018_01_28"}; "faster_rcnn_resnet50_coco_2018_01_28"};
checkBackend(); checkBackend();
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU) #ifdef INF_ENGINE_RELEASE
if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
(INF_ENGINE_VER_MAJOR_LT(2019020000) || target != DNN_TARGET_CPU))
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
#endif
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16); applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);