mirror of
https://github.com/opencv/opencv.git
synced 2025-08-06 14:36:36 +08:00
Merge pull request #16809 from alalek:dnn_ie_separate_core_instances
This commit is contained in:
commit
00925ad795
@ -108,7 +108,7 @@ public:
|
|||||||
{
|
{
|
||||||
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R3)
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R3)
|
||||||
// Lightweight detection
|
// Lightweight detection
|
||||||
const std::vector<std::string> devices = getCore().GetAvailableDevices();
|
const std::vector<std::string> devices = getCore("").GetAvailableDevices();
|
||||||
for (std::vector<std::string>::const_iterator i = devices.begin(); i != devices.end(); ++i)
|
for (std::vector<std::string>::const_iterator i = devices.begin(); i != devices.end(); ++i)
|
||||||
{
|
{
|
||||||
if (std::string::npos != i->find("MYRIAD") && target == DNN_TARGET_MYRIAD)
|
if (std::string::npos != i->find("MYRIAD") && target == DNN_TARGET_MYRIAD)
|
||||||
@ -3253,7 +3253,7 @@ Net Net::readFromModelOptimizer(const String& xml, const String& bin)
|
|||||||
|
|
||||||
InferenceEngine::CNNNetwork ieNet = reader.getNetwork();
|
InferenceEngine::CNNNetwork ieNet = reader.getNetwork();
|
||||||
#else
|
#else
|
||||||
InferenceEngine::Core& ie = getCore();
|
InferenceEngine::Core& ie = getCore("");
|
||||||
InferenceEngine::CNNNetwork ieNet = ie.ReadNetwork(xml, bin);
|
InferenceEngine::CNNNetwork ieNet = ie.ReadNetwork(xml, bin);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -3302,7 +3302,7 @@ Net Net::readFromModelOptimizer(
|
|||||||
|
|
||||||
InferenceEngine::CNNNetwork ieNet = reader.getNetwork();
|
InferenceEngine::CNNNetwork ieNet = reader.getNetwork();
|
||||||
#else
|
#else
|
||||||
InferenceEngine::Core& ie = getCore();
|
InferenceEngine::Core& ie = getCore("");
|
||||||
|
|
||||||
std::string model; model.assign((char*)bufferModelConfigPtr, bufferModelConfigSize);
|
std::string model; model.assign((char*)bufferModelConfigPtr, bufferModelConfigSize);
|
||||||
|
|
||||||
|
@ -524,7 +524,7 @@ void InfEngineNgraphNet::initPlugin(InferenceEngine::CNNNetwork& net)
|
|||||||
try
|
try
|
||||||
{
|
{
|
||||||
AutoLock lock(getInitializationMutex());
|
AutoLock lock(getInitializationMutex());
|
||||||
InferenceEngine::Core& ie = getCore();
|
InferenceEngine::Core& ie = getCore(device_name);
|
||||||
{
|
{
|
||||||
isInit = true;
|
isInit = true;
|
||||||
std::vector<std::string> candidates;
|
std::vector<std::string> candidates;
|
||||||
|
@ -604,18 +604,31 @@ static bool init_IE_plugins()
|
|||||||
(void)init_core->GetAvailableDevices();
|
(void)init_core->GetAvailableDevices();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
static InferenceEngine::Core& create_IE_Core_instance()
|
static InferenceEngine::Core& retrieveIECore(const std::string& id, std::map<std::string, std::shared_ptr<InferenceEngine::Core> >& cores)
|
||||||
{
|
{
|
||||||
static InferenceEngine::Core core;
|
AutoLock lock(getInitializationMutex());
|
||||||
return core;
|
std::map<std::string, std::shared_ptr<InferenceEngine::Core> >::iterator i = cores.find(id);
|
||||||
|
if (i == cores.end())
|
||||||
|
{
|
||||||
|
std::shared_ptr<InferenceEngine::Core> core = std::make_shared<InferenceEngine::Core>();
|
||||||
|
cores[id] = core;
|
||||||
|
return *core.get();
|
||||||
|
}
|
||||||
|
return *(i->second).get();
|
||||||
}
|
}
|
||||||
static InferenceEngine::Core& create_IE_Core_pointer()
|
static InferenceEngine::Core& create_IE_Core_instance(const std::string& id)
|
||||||
|
{
|
||||||
|
static std::map<std::string, std::shared_ptr<InferenceEngine::Core> > cores;
|
||||||
|
return retrieveIECore(id, cores);
|
||||||
|
}
|
||||||
|
static InferenceEngine::Core& create_IE_Core_pointer(const std::string& id)
|
||||||
{
|
{
|
||||||
// load and hold IE plugins
|
// load and hold IE plugins
|
||||||
static InferenceEngine::Core* core = new InferenceEngine::Core(); // 'delete' is never called
|
static std::map<std::string, std::shared_ptr<InferenceEngine::Core> >* cores =
|
||||||
return *core;
|
new std::map<std::string, std::shared_ptr<InferenceEngine::Core> >();
|
||||||
|
return retrieveIECore(id, *cores);
|
||||||
}
|
}
|
||||||
InferenceEngine::Core& getCore()
|
InferenceEngine::Core& getCore(const std::string& id)
|
||||||
{
|
{
|
||||||
// to make happy memory leak tools use:
|
// to make happy memory leak tools use:
|
||||||
// - OPENCV_DNN_INFERENCE_ENGINE_HOLD_PLUGINS=0
|
// - OPENCV_DNN_INFERENCE_ENGINE_HOLD_PLUGINS=0
|
||||||
@ -631,9 +644,10 @@ InferenceEngine::Core& getCore()
|
|||||||
false
|
false
|
||||||
#endif
|
#endif
|
||||||
);
|
);
|
||||||
static InferenceEngine::Core& core = param_DNN_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND
|
|
||||||
? create_IE_Core_pointer()
|
InferenceEngine::Core& core = param_DNN_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND
|
||||||
: create_IE_Core_instance();
|
? create_IE_Core_pointer(id)
|
||||||
|
: create_IE_Core_instance(id);
|
||||||
return core;
|
return core;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
@ -641,9 +655,10 @@ InferenceEngine::Core& getCore()
|
|||||||
#if !defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
|
#if !defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
|
||||||
static bool detectMyriadX_()
|
static bool detectMyriadX_()
|
||||||
{
|
{
|
||||||
|
AutoLock lock(getInitializationMutex());
|
||||||
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R3)
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R3)
|
||||||
// Lightweight detection
|
// Lightweight detection
|
||||||
InferenceEngine::Core& ie = getCore();
|
InferenceEngine::Core& ie = getCore("MYRIAD");
|
||||||
const std::vector<std::string> devices = ie.GetAvailableDevices();
|
const std::vector<std::string> devices = ie.GetAvailableDevices();
|
||||||
for (std::vector<std::string>::const_iterator i = devices.begin(); i != devices.end(); ++i)
|
for (std::vector<std::string>::const_iterator i = devices.begin(); i != devices.end(); ++i)
|
||||||
{
|
{
|
||||||
@ -687,7 +702,6 @@ static bool detectMyriadX_()
|
|||||||
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1)
|
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1)
|
||||||
InferenceEngine::InferenceEnginePluginPtr enginePtr;
|
InferenceEngine::InferenceEnginePluginPtr enginePtr;
|
||||||
{
|
{
|
||||||
AutoLock lock(getInitializationMutex());
|
|
||||||
auto& sharedPlugins = getSharedPlugins();
|
auto& sharedPlugins = getSharedPlugins();
|
||||||
auto pluginIt = sharedPlugins.find("MYRIAD");
|
auto pluginIt = sharedPlugins.find("MYRIAD");
|
||||||
if (pluginIt != sharedPlugins.end()) {
|
if (pluginIt != sharedPlugins.end()) {
|
||||||
@ -706,9 +720,9 @@ static bool detectMyriadX_()
|
|||||||
try
|
try
|
||||||
{
|
{
|
||||||
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R3)
|
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R3)
|
||||||
auto netExec = getCore().LoadNetwork(cnn, "MYRIAD", {{"VPU_PLATFORM", "VPU_2480"}});
|
auto netExec = getCore("MYRIAD").LoadNetwork(cnn, "MYRIAD", {{"VPU_PLATFORM", "VPU_2480"}});
|
||||||
#else
|
#else
|
||||||
auto netExec = getCore().LoadNetwork(cnn, "MYRIAD", {{"VPU_MYRIAD_PLATFORM", "VPU_MYRIAD_2480"}});
|
auto netExec = getCore("MYRIAD").LoadNetwork(cnn, "MYRIAD", {{"VPU_MYRIAD_PLATFORM", "VPU_MYRIAD_2480"}});
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
auto infRequest = netExec.CreateInferRequest();
|
auto infRequest = netExec.CreateInferRequest();
|
||||||
@ -739,7 +753,7 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::CNNNetwork& net)
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
#else
|
#else
|
||||||
InferenceEngine::Core& ie = getCore();
|
InferenceEngine::Core& ie = getCore(device_name);
|
||||||
#endif
|
#endif
|
||||||
{
|
{
|
||||||
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1)
|
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1)
|
||||||
@ -1124,7 +1138,7 @@ void resetMyriadDevice()
|
|||||||
getSharedPlugins().erase("MYRIAD");
|
getSharedPlugins().erase("MYRIAD");
|
||||||
#else
|
#else
|
||||||
// Unregister both "MYRIAD" and "HETERO:MYRIAD,CPU" plugins
|
// Unregister both "MYRIAD" and "HETERO:MYRIAD,CPU" plugins
|
||||||
InferenceEngine::Core& ie = getCore();
|
InferenceEngine::Core& ie = getCore("MYRIAD");
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
ie.UnregisterPlugin("MYRIAD");
|
ie.UnregisterPlugin("MYRIAD");
|
||||||
|
@ -245,7 +245,7 @@ bool isMyriadX();
|
|||||||
|
|
||||||
CV__DNN_EXPERIMENTAL_NS_END
|
CV__DNN_EXPERIMENTAL_NS_END
|
||||||
|
|
||||||
InferenceEngine::Core& getCore();
|
InferenceEngine::Core& getCore(const std::string& id);
|
||||||
|
|
||||||
template<typename T = size_t>
|
template<typename T = size_t>
|
||||||
static inline std::vector<T> getShape(const Mat& mat)
|
static inline std::vector<T> getShape(const Mat& mat)
|
||||||
|
Loading…
Reference in New Issue
Block a user