Wrap Inference Engine init to try-catch

This commit is contained in:
Dmitry Kurtaev 2018-06-05 17:18:14 +03:00
parent 3cbd2e2764
commit f3a6ae5f00
4 changed files with 60 additions and 29 deletions

View File

@ -34,7 +34,7 @@ public:
void processNet(std::string weights, std::string proto, std::string halide_scheduler,
const Mat& input, const std::string& outputLayer = "")
{
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL)
if (backend == DNN_BACKEND_OPENCV && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
{
#if defined(HAVE_OPENCL)
if (!cv::ocl::useOpenCL())

View File

@ -2252,7 +2252,13 @@ void Net::setPreferableTarget(int targetId)
if (IS_DNN_OPENCL_TARGET(targetId))
{
#ifndef HAVE_OPENCL
impl->preferableTarget = DNN_TARGET_CPU;
#ifdef HAVE_INF_ENGINE
if (impl->preferableBackend == DNN_BACKEND_OPENCV)
#else
if (impl->preferableBackend == DNN_BACKEND_DEFAULT ||
impl->preferableBackend == DNN_BACKEND_OPENCV)
#endif // HAVE_INF_ENGINE
impl->preferableTarget = DNN_TARGET_CPU;
#else
bool fp16 = ocl::Device::getDefault().isExtensionSupported("cl_khr_fp16");
if (!fp16 && targetId == DNN_TARGET_OPENCL_FP16)

View File

@ -361,35 +361,60 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::ICNNNetwork& net)
{
CV_Assert(!isInitialized());
static std::map<std::string, InferenceEngine::InferenceEnginePluginPtr> sharedPlugins;
std::string deviceName = InferenceEngine::getDeviceName(targetDevice);
auto pluginIt = sharedPlugins.find(deviceName);
if (pluginIt != sharedPlugins.end())
try
{
enginePtr = pluginIt->second;
}
else
{
enginePtr = InferenceEngine::PluginDispatcher({""}).getSuitablePlugin(targetDevice);
sharedPlugins[deviceName] = enginePtr;
}
plugin = InferenceEngine::InferencePlugin(enginePtr);
static std::map<std::string, InferenceEngine::InferenceEnginePluginPtr> sharedPlugins;
std::string deviceName = InferenceEngine::getDeviceName(targetDevice);
auto pluginIt = sharedPlugins.find(deviceName);
if (pluginIt != sharedPlugins.end())
{
enginePtr = pluginIt->second;
}
else
{
enginePtr = InferenceEngine::PluginDispatcher({""}).getSuitablePlugin(targetDevice);
sharedPlugins[deviceName] = enginePtr;
if (targetDevice == InferenceEngine::TargetDevice::eCPU)
{
#ifdef _WIN32
InferenceEngine::IExtensionPtr extension =
InferenceEngine::make_so_pointer<InferenceEngine::IExtension>("cpu_extension.dll");
#else
InferenceEngine::IExtensionPtr extension =
InferenceEngine::make_so_pointer<InferenceEngine::IExtension>("libcpu_extension.so");
#endif // _WIN32
plugin.AddExtension(extension);
if (targetDevice == InferenceEngine::TargetDevice::eCPU)
{
std::string suffixes[] = {"_avx2", "_sse4", ""};
bool haveFeature[] = {
checkHardwareSupport(CPU_AVX2),
checkHardwareSupport(CPU_SSE4_2),
true
};
for (int i = 0; i < 3; ++i)
{
if (!haveFeature[i])
continue;
#ifdef _WIN32
std::string libName = "cpu_extension" + suffixes[i] + ".dll";
#else
std::string libName = "libcpu_extension" + suffixes[i] + ".so";
#endif // _WIN32
try
{
InferenceEngine::IExtensionPtr extension =
InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(libName);
enginePtr->AddExtension(extension, 0);
break;
}
catch(...) {}
}
// Some of networks can work without a library of extra layers.
}
}
plugin = InferenceEngine::InferencePlugin(enginePtr);
netExec = plugin.LoadNetwork(net, {});
infRequest = netExec.CreateInferRequest();
infRequest.SetInput(inpBlobs);
infRequest.SetOutput(outBlobs);
}
catch (const std::exception& ex)
{
CV_Error(Error::StsAssert, format("Failed to initialize Inference Engine backend: %s", ex.what()));
}
netExec = plugin.LoadNetwork(net, {});
infRequest = netExec.CreateInferRequest();
infRequest.SetInput(inpBlobs);
infRequest.SetOutput(outBlobs);
}
bool InfEngineBackendNet::isInitialized()

View File

@ -40,7 +40,7 @@ public:
std::string halideScheduler = "",
double l1 = 0.0, double lInf = 0.0)
{
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL)
if (backend == DNN_BACKEND_OPENCV && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
{
#ifdef HAVE_OPENCL
if (!cv::ocl::useOpenCL())