2018-02-06 16:57:35 +08:00
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
2019-03-29 21:42:58 +08:00
// Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
2018-02-06 16:57:35 +08:00
// Third party copyrights are property of their respective owners.
# include "precomp.hpp"
# include "op_inf_engine.hpp"
# include <opencv2/dnn/shape_utils.hpp>
2018-04-09 21:22:19 +08:00
# ifdef HAVE_INF_ENGINE
# include <ie_extension.h>
# include <ie_plugin_dispatcher.hpp>
# endif // HAVE_INF_ENGINE
2019-03-29 21:42:58 +08:00
# include <opencv2/core/utils/configuration.private.hpp>
# include <opencv2/core/utils/logger.hpp>
2018-02-06 16:57:35 +08:00
namespace cv { namespace dnn {
# ifdef HAVE_INF_ENGINE
2019-02-14 18:30:30 +08:00
// For networks with input layer which has an empty name, IE generates a name id[some_number].
// OpenCV lets users use an empty input name and to prevent unexpected naming,
// we can use some predefined name.
static std : : string kDefaultInpLayerName = " empty_inp_layer_name " ;
2019-01-14 14:55:44 +08:00
InfEngineBackendNode : : InfEngineBackendNode ( const InferenceEngine : : Builder : : Layer & _layer )
: BackendNode ( DNN_BACKEND_INFERENCE_ENGINE ) , layer ( _layer ) { }
2018-02-06 16:57:35 +08:00
static std : : vector < Ptr < InfEngineBackendWrapper > >
infEngineWrappers ( const std : : vector < Ptr < BackendWrapper > > & ptrs )
{
std : : vector < Ptr < InfEngineBackendWrapper > > wrappers ( ptrs . size ( ) ) ;
for ( int i = 0 ; i < ptrs . size ( ) ; + + i )
{
CV_Assert ( ! ptrs [ i ] . empty ( ) ) ;
wrappers [ i ] = ptrs [ i ] . dynamicCast < InfEngineBackendWrapper > ( ) ;
CV_Assert ( ! wrappers [ i ] . empty ( ) ) ;
}
return wrappers ;
}
2019-01-14 14:55:44 +08:00
InfEngineBackendNet : : InfEngineBackendNet ( ) : netBuilder ( " " )
{
hasNetOwner = false ;
targetDevice = InferenceEngine : : TargetDevice : : eCPU ;
}
InfEngineBackendNet : : InfEngineBackendNet ( InferenceEngine : : CNNNetwork & net ) : netBuilder ( " " ) , cnn ( net )
{
hasNetOwner = true ;
targetDevice = InferenceEngine : : TargetDevice : : eCPU ;
}
void InfEngineBackendNet : : connect ( const std : : vector < Ptr < BackendWrapper > > & inputs ,
const std : : vector < Ptr < BackendWrapper > > & outputs ,
const std : : string & layerName )
{
std : : vector < Ptr < InfEngineBackendWrapper > > inpWrappers = infEngineWrappers ( inputs ) ;
std : : map < std : : string , int > : : iterator it = layers . find ( layerName ) ;
CV_Assert ( it ! = layers . end ( ) ) ;
const int layerId = it - > second ;
2019-01-25 20:02:29 +08:00
for ( size_t i = 0 ; i < inpWrappers . size ( ) ; + + i )
2019-01-14 14:55:44 +08:00
{
const auto & inp = inpWrappers [ i ] ;
const std : : string & inpName = inp - > dataPtr - > name ;
int inpId ;
it = layers . find ( inpName ) ;
if ( it = = layers . end ( ) )
{
2019-02-14 18:30:30 +08:00
InferenceEngine : : Builder : : InputLayer inpLayer ( ! inpName . empty ( ) ? inpName : kDefaultInpLayerName ) ;
2019-01-14 14:55:44 +08:00
std : : vector < size_t > shape ( inp - > blob - > dims ( ) ) ;
std : : reverse ( shape . begin ( ) , shape . end ( ) ) ;
inpLayer . setPort ( InferenceEngine : : Port ( shape ) ) ;
inpId = netBuilder . addLayer ( inpLayer ) ;
layers . insert ( { inpName , inpId } ) ;
}
else
inpId = it - > second ;
2019-01-25 20:02:29 +08:00
netBuilder . connect ( ( size_t ) inpId , { ( size_t ) layerId , i } ) ;
2019-01-14 14:55:44 +08:00
unconnectedLayersIds . erase ( inpId ) ;
}
CV_Assert ( ! outputs . empty ( ) ) ;
InferenceEngine : : DataPtr dataPtr = infEngineDataNode ( outputs [ 0 ] ) ;
dataPtr - > name = layerName ;
}
void InfEngineBackendNet : : init ( int targetId )
{
if ( ! hasNetOwner )
{
CV_Assert ( ! unconnectedLayersIds . empty ( ) ) ;
for ( int id : unconnectedLayersIds )
{
InferenceEngine : : Builder : : OutputLayer outLayer ( " myconv1 " ) ;
2019-04-01 20:00:25 +08:00
# if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1)
2019-02-14 18:30:30 +08:00
// Inference Engine determines network precision by ports.
InferenceEngine : : Precision p = ( targetId = = DNN_TARGET_MYRIAD | |
targetId = = DNN_TARGET_OPENCL_FP16 ) ?
InferenceEngine : : Precision : : FP16 :
InferenceEngine : : Precision : : FP32 ;
outLayer . setPort ( InferenceEngine : : Port ( { } , p ) ) ;
# endif
2019-01-25 20:02:29 +08:00
netBuilder . addLayer ( { InferenceEngine : : PortInfo ( id ) } , outLayer ) ;
2019-01-14 14:55:44 +08:00
}
cnn = InferenceEngine : : CNNNetwork ( InferenceEngine : : Builder : : convertToICNNNetwork ( netBuilder . build ( ) ) ) ;
}
switch ( targetId )
{
case DNN_TARGET_CPU :
targetDevice = InferenceEngine : : TargetDevice : : eCPU ;
break ;
case DNN_TARGET_OPENCL : case DNN_TARGET_OPENCL_FP16 :
targetDevice = InferenceEngine : : TargetDevice : : eGPU ;
break ;
case DNN_TARGET_MYRIAD :
targetDevice = InferenceEngine : : TargetDevice : : eMYRIAD ;
break ;
case DNN_TARGET_FPGA :
targetDevice = InferenceEngine : : TargetDevice : : eFPGA ;
break ;
default :
CV_Error ( Error : : StsError , format ( " Unknown target identifier: %d " , targetId ) ) ;
}
for ( const auto & name : requestedOutputs )
{
cnn . addOutput ( name ) ;
}
for ( const auto & it : cnn . getInputsInfo ( ) )
{
const std : : string & name = it . first ;
auto blobIt = allBlobs . find ( name ) ;
CV_Assert ( blobIt ! = allBlobs . end ( ) ) ;
it . second - > setPrecision ( blobIt - > second - > precision ( ) ) ;
}
for ( const auto & it : cnn . getOutputsInfo ( ) )
{
const std : : string & name = it . first ;
auto blobIt = allBlobs . find ( name ) ;
CV_Assert ( blobIt ! = allBlobs . end ( ) ) ;
it . second - > setPrecision ( blobIt - > second - > precision ( ) ) ; // Should be always FP32
}
initPlugin ( cnn ) ;
}
2019-02-14 18:30:30 +08:00
void InfEngineBackendNet : : addLayer ( InferenceEngine : : Builder : : Layer & layer )
2019-01-14 14:55:44 +08:00
{
2019-04-01 20:00:25 +08:00
# if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1)
2019-02-14 18:30:30 +08:00
// Add weights to network and connect them after input blobs.
std : : map < std : : string , InferenceEngine : : Parameter > & params = layer . getParameters ( ) ;
std : : vector < int > blobsIds ;
std : : vector < int > portIds ;
for ( const std : : string & name : { " weights " , " biases " } )
{
bool asInput = false ;
int portId = 0 ;
for ( int i = 0 ; i < layer . getInputPorts ( ) . size ( ) ; + + i )
{
const auto & port = layer . getInputPorts ( ) [ i ] ;
auto it = port . getParameters ( ) . find ( " type " ) ;
if ( it ! = port . getParameters ( ) . end ( ) & & it - > second = = name )
{
portId = i ;
asInput = true ;
break ;
}
}
if ( ! asInput )
continue ;
auto it = params . find ( name ) ;
if ( it ! = params . end ( ) )
{
InferenceEngine : : Blob : : Ptr blob = it - > second . as < InferenceEngine : : Blob : : Ptr > ( ) ;
params . erase ( it ) ;
int blobId = netBuilder . addLayer ( InferenceEngine : : Builder : : ConstLayer ( name ) . setData ( blob ) ) ;
blobsIds . push_back ( blobId ) ;
portIds . push_back ( portId ) ;
}
}
# endif
2019-01-14 14:55:44 +08:00
int id = netBuilder . addLayer ( layer ) ;
const std : : string & layerName = layer . getName ( ) ;
CV_Assert ( layers . insert ( { layerName , id } ) . second ) ;
unconnectedLayersIds . insert ( id ) ;
2019-02-14 18:30:30 +08:00
2019-04-01 20:00:25 +08:00
# if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1)
2019-02-14 18:30:30 +08:00
// By default, all the weights are connected to last ports ids.
for ( int i = 0 ; i < blobsIds . size ( ) ; + + i )
{
2019-02-21 16:42:55 +08:00
netBuilder . connect ( ( size_t ) blobsIds [ i ] , { ( size_t ) id , ( size_t ) portIds [ i ] } ) ;
2019-02-14 18:30:30 +08:00
}
# endif
2019-01-14 14:55:44 +08:00
}
void InfEngineBackendNet : : addOutput ( const std : : string & name )
{
requestedOutputs . push_back ( name ) ;
}
2018-05-31 19:05:21 +08:00
static InferenceEngine : : Layout estimateLayout ( const Mat & m )
{
if ( m . dims = = 4 )
return InferenceEngine : : Layout : : NCHW ;
else if ( m . dims = = 2 )
return InferenceEngine : : Layout : : NC ;
else
return InferenceEngine : : Layout : : ANY ;
}
2018-02-06 16:57:35 +08:00
static InferenceEngine : : DataPtr wrapToInfEngineDataNode ( const Mat & m , const std : : string & name = " " )
{
std : : vector < size_t > reversedShape ( & m . size [ 0 ] , & m . size [ 0 ] + m . dims ) ;
std : : reverse ( reversedShape . begin ( ) , reversedShape . end ( ) ) ;
2018-06-05 04:51:28 +08:00
if ( m . type ( ) = = CV_32F )
return InferenceEngine : : DataPtr (
new InferenceEngine : : Data ( name , reversedShape , InferenceEngine : : Precision : : FP32 , estimateLayout ( m ) )
) ;
else if ( m . type ( ) = = CV_8U )
return InferenceEngine : : DataPtr (
new InferenceEngine : : Data ( name , reversedShape , InferenceEngine : : Precision : : U8 , estimateLayout ( m ) )
) ;
else
CV_Error ( Error : : StsNotImplemented , format ( " Unsupported data type %d " , m . type ( ) ) ) ;
2018-02-06 16:57:35 +08:00
}
2018-06-05 04:51:28 +08:00
InferenceEngine : : Blob : : Ptr wrapToInfEngineBlob ( const Mat & m , const std : : vector < size_t > & shape ,
InferenceEngine : : Layout layout )
2018-02-06 16:57:35 +08:00
{
2018-06-05 04:51:28 +08:00
if ( m . type ( ) = = CV_32F )
return InferenceEngine : : make_shared_blob < float > ( InferenceEngine : : Precision : : FP32 ,
layout , shape , ( float * ) m . data ) ;
else if ( m . type ( ) = = CV_8U )
return InferenceEngine : : make_shared_blob < uint8_t > ( InferenceEngine : : Precision : : U8 ,
layout , shape , ( uint8_t * ) m . data ) ;
else
CV_Error ( Error : : StsNotImplemented , format ( " Unsupported data type %d " , m . type ( ) ) ) ;
2018-02-06 16:57:35 +08:00
}
2018-06-05 04:51:28 +08:00
InferenceEngine : : Blob : : Ptr wrapToInfEngineBlob ( const Mat & m , InferenceEngine : : Layout layout )
2018-02-06 16:57:35 +08:00
{
std : : vector < size_t > reversedShape ( & m . size [ 0 ] , & m . size [ 0 ] + m . dims ) ;
std : : reverse ( reversedShape . begin ( ) , reversedShape . end ( ) ) ;
2018-03-12 22:35:28 +08:00
return wrapToInfEngineBlob ( m , reversedShape , layout ) ;
2018-02-06 16:57:35 +08:00
}
2019-04-20 02:01:19 +08:00
InferenceEngine : : Blob : : Ptr cloneBlob ( const InferenceEngine : : Blob : : Ptr & blob )
{
InferenceEngine : : Precision precision = blob - > precision ( ) ;
InferenceEngine : : Blob : : Ptr copy ;
if ( precision = = InferenceEngine : : Precision : : FP32 )
{
copy = InferenceEngine : : make_shared_blob < float > ( precision , blob - > layout ( ) , blob - > dims ( ) ) ;
}
else if ( precision = = InferenceEngine : : Precision : : U8 )
{
copy = InferenceEngine : : make_shared_blob < uint8_t > ( precision , blob - > layout ( ) , blob - > dims ( ) ) ;
}
else
CV_Error ( Error : : StsNotImplemented , " Unsupported blob precision " ) ;
copy - > allocate ( ) ;
return copy ;
}
2018-02-06 16:57:35 +08:00
InferenceEngine : : DataPtr infEngineDataNode ( const Ptr < BackendWrapper > & ptr )
{
CV_Assert ( ! ptr . empty ( ) ) ;
Ptr < InfEngineBackendWrapper > p = ptr . dynamicCast < InfEngineBackendWrapper > ( ) ;
CV_Assert ( ! p . empty ( ) ) ;
return p - > dataPtr ;
}
InfEngineBackendWrapper : : InfEngineBackendWrapper ( int targetId , const cv : : Mat & m )
: BackendWrapper ( DNN_BACKEND_INFERENCE_ENGINE , targetId )
{
dataPtr = wrapToInfEngineDataNode ( m ) ;
2018-05-31 19:05:21 +08:00
blob = wrapToInfEngineBlob ( m , estimateLayout ( m ) ) ;
2018-02-06 16:57:35 +08:00
}
2018-06-05 04:51:28 +08:00
InfEngineBackendWrapper : : InfEngineBackendWrapper ( Ptr < BackendWrapper > wrapper )
: BackendWrapper ( DNN_BACKEND_INFERENCE_ENGINE , wrapper - > targetId )
{
Ptr < InfEngineBackendWrapper > ieWrapper = wrapper . dynamicCast < InfEngineBackendWrapper > ( ) ;
CV_Assert ( ! ieWrapper . empty ( ) ) ;
InferenceEngine : : DataPtr srcData = ieWrapper - > dataPtr ;
dataPtr = InferenceEngine : : DataPtr (
new InferenceEngine : : Data ( srcData - > name , srcData - > dims , srcData - > precision ,
srcData - > layout )
) ;
blob = ieWrapper - > blob ;
}
Ptr < BackendWrapper > InfEngineBackendWrapper : : create ( Ptr < BackendWrapper > wrapper )
{
return Ptr < BackendWrapper > ( new InfEngineBackendWrapper ( wrapper ) ) ;
}
2018-02-06 16:57:35 +08:00
InfEngineBackendWrapper : : ~ InfEngineBackendWrapper ( )
{
}
void InfEngineBackendWrapper : : copyToHost ( )
{
}
void InfEngineBackendWrapper : : setHostDirty ( )
{
}
2019-01-31 21:10:59 +08:00
static std : : map < InferenceEngine : : TargetDevice , InferenceEngine : : InferenceEnginePluginPtr > & getSharedPlugins ( )
{
static std : : map < InferenceEngine : : TargetDevice , InferenceEngine : : InferenceEnginePluginPtr > sharedPlugins ;
return sharedPlugins ;
}
2018-09-18 16:21:08 +08:00
2019-03-29 21:42:58 +08:00
2019-06-14 23:17:02 +08:00
# if !defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
2019-03-29 21:42:58 +08:00
static bool detectMyriadX_ ( )
{
InferenceEngine : : Builder : : Network builder ( " " ) ;
InferenceEngine : : idx_t inpId = builder . addLayer (
InferenceEngine : : Builder : : InputLayer ( ) . setPort ( InferenceEngine : : Port ( { 1 } ) ) ) ;
# if INF_ENGINE_RELEASE <= 2018050000
InferenceEngine : : idx_t clampId ;
{
InferenceEngine : : Builder : : Layer l = InferenceEngine : : Builder : : ClampLayer ( ) ;
auto & blobs = l . getConstantData ( ) ;
auto blob = InferenceEngine : : make_shared_blob < int16_t > (
InferenceEngine : : Precision : : FP16 ,
InferenceEngine : : Layout : : C , { 1 } ) ;
blob - > allocate ( ) ;
blobs [ " " ] = blob ;
clampId = builder . addLayer ( { inpId } , l ) ;
}
builder . addLayer ( { InferenceEngine : : PortInfo ( clampId ) } , InferenceEngine : : Builder : : OutputLayer ( ) ) ;
# else
2019-06-14 23:17:02 +08:00
2019-03-29 21:42:58 +08:00
InferenceEngine : : idx_t clampId = builder . addLayer ( { inpId } , InferenceEngine : : Builder : : ClampLayer ( ) ) ;
builder . addLayer ( { InferenceEngine : : PortInfo ( clampId ) } ,
InferenceEngine : : Builder : : OutputLayer ( ) . setPort ( InferenceEngine : : Port ( { } ,
InferenceEngine : : Precision : : FP16 ) ) ) ;
# endif
InferenceEngine : : CNNNetwork cnn = InferenceEngine : : CNNNetwork (
InferenceEngine : : Builder : : convertToICNNNetwork ( builder . build ( ) ) ) ;
InferenceEngine : : TargetDevice device = InferenceEngine : : TargetDevice : : eMYRIAD ;
InferenceEngine : : InferenceEnginePluginPtr enginePtr ;
{
AutoLock lock ( getInitializationMutex ( ) ) ;
auto & sharedPlugins = getSharedPlugins ( ) ;
auto pluginIt = sharedPlugins . find ( device ) ;
if ( pluginIt ! = sharedPlugins . end ( ) ) {
enginePtr = pluginIt - > second ;
} else {
auto dispatcher = InferenceEngine : : PluginDispatcher ( { " " } ) ;
enginePtr = dispatcher . getSuitablePlugin ( device ) ;
sharedPlugins [ device ] = enginePtr ;
}
}
auto plugin = InferenceEngine : : InferencePlugin ( enginePtr ) ;
try
{
2019-04-14 00:02:03 +08:00
auto netExec = plugin . LoadNetwork ( cnn , { { " VPU_PLATFORM " , " VPU_2480 " } } ) ;
2019-03-29 21:42:58 +08:00
auto infRequest = netExec . CreateInferRequest ( ) ;
} catch ( . . . ) {
return false ;
}
return true ;
}
2019-06-14 23:17:02 +08:00
# endif // !defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
2019-03-29 21:42:58 +08:00
2018-03-17 00:27:04 +08:00
void InfEngineBackendNet : : initPlugin ( InferenceEngine : : ICNNNetwork & net )
{
CV_Assert ( ! isInitialized ( ) ) ;
2018-04-09 21:22:19 +08:00
2018-06-05 22:18:14 +08:00
try
2018-06-01 19:10:32 +08:00
{
2019-01-31 21:10:59 +08:00
AutoLock lock ( getInitializationMutex ( ) ) ;
auto & sharedPlugins = getSharedPlugins ( ) ;
2018-07-20 23:58:37 +08:00
auto pluginIt = sharedPlugins . find ( targetDevice ) ;
2018-06-05 22:18:14 +08:00
if ( pluginIt ! = sharedPlugins . end ( ) )
{
enginePtr = pluginIt - > second ;
}
else
{
2018-11-16 22:09:54 +08:00
auto dispatcher = InferenceEngine : : PluginDispatcher ( { " " } ) ;
if ( targetDevice = = InferenceEngine : : TargetDevice : : eFPGA )
enginePtr = dispatcher . getPluginByDevice ( " HETERO:FPGA,CPU " ) ;
else
enginePtr = dispatcher . getSuitablePlugin ( targetDevice ) ;
2018-07-20 23:58:37 +08:00
sharedPlugins [ targetDevice ] = enginePtr ;
2018-04-09 21:22:19 +08:00
2019-06-26 14:41:01 +08:00
std : : vector < std : : string > candidates ;
std : : string param_pluginPath = utils : : getConfigurationParameterString ( " OPENCV_DNN_IE_EXTRA_PLUGIN_PATH " , " " ) ;
if ( ! param_pluginPath . empty ( ) )
{
candidates . push_back ( param_pluginPath ) ;
}
2018-11-16 22:09:54 +08:00
if ( targetDevice = = InferenceEngine : : TargetDevice : : eCPU | |
targetDevice = = InferenceEngine : : TargetDevice : : eFPGA )
2018-06-05 22:18:14 +08:00
{
std : : string suffixes [ ] = { " _avx2 " , " _sse4 " , " " } ;
bool haveFeature [ ] = {
checkHardwareSupport ( CPU_AVX2 ) ,
checkHardwareSupport ( CPU_SSE4_2 ) ,
true
} ;
for ( int i = 0 ; i < 3 ; + + i )
{
if ( ! haveFeature [ i ] )
continue ;
2019-06-26 14:41:01 +08:00
# ifdef _WIN32
candidates . push_back ( " cpu_extension " + suffixes [ i ] + " .dll " ) ;
# elif defined(__APPLE__)
candidates . push_back ( " libcpu_extension " + suffixes [ i ] + " .so " ) ; // built as loadable module
candidates . push_back ( " libcpu_extension " + suffixes [ i ] + " .dylib " ) ; // built as shared library
# else
candidates . push_back ( " libcpu_extension " + suffixes [ i ] + " .so " ) ;
# endif // _WIN32
2018-06-05 22:18:14 +08:00
}
}
2019-06-26 14:41:01 +08:00
bool found = false ;
for ( size_t i = 0 ; i ! = candidates . size ( ) ; + + i )
{
const std : : string & libName = candidates [ i ] ;
try
{
InferenceEngine : : IExtensionPtr extension =
InferenceEngine : : make_so_pointer < InferenceEngine : : IExtension > ( libName ) ;
enginePtr - > AddExtension ( extension , 0 ) ;
CV_LOG_INFO ( NULL , " DNN-IE: Loaded extension plugin: " < < libName ) ;
found = true ;
break ;
}
catch ( . . . ) { }
}
if ( ! found & & ! candidates . empty ( ) )
{
CV_LOG_WARNING ( NULL , " DNN-IE: Can't load extension plugin (extra layers for some networks). Specify path via OPENCV_DNN_IE_EXTRA_PLUGIN_PATH parameter " ) ;
}
// Some of networks can work without a library of extra layers.
2018-06-05 22:18:14 +08:00
}
plugin = InferenceEngine : : InferencePlugin ( enginePtr ) ;
netExec = plugin . LoadNetwork ( net , { } ) ;
}
catch ( const std : : exception & ex )
2018-04-09 21:22:19 +08:00
{
2018-06-05 22:18:14 +08:00
CV_Error ( Error : : StsAssert , format ( " Failed to initialize Inference Engine backend: %s " , ex . what ( ) ) ) ;
2018-04-09 21:22:19 +08:00
}
2018-02-06 16:57:35 +08:00
}
bool InfEngineBackendNet : : isInitialized ( )
{
2018-06-01 19:10:32 +08:00
return ( bool ) enginePtr ;
2018-02-06 16:57:35 +08:00
}
void InfEngineBackendNet : : addBlobs ( const std : : vector < Ptr < BackendWrapper > > & ptrs )
{
auto wrappers = infEngineWrappers ( ptrs ) ;
for ( const auto & wrapper : wrappers )
{
2019-01-14 14:55:44 +08:00
std : : string name = wrapper - > dataPtr - > name ;
2019-02-14 18:30:30 +08:00
name = name . empty ( ) ? kDefaultInpLayerName : name ;
2019-01-14 14:55:44 +08:00
allBlobs . insert ( { name , wrapper - > blob } ) ;
2018-02-06 16:57:35 +08:00
}
}
2019-04-20 02:01:19 +08:00
void InfEngineBackendNet : : InfEngineReqWrapper : : makePromises ( const std : : vector < Ptr < BackendWrapper > > & outsWrappers )
{
auto outs = infEngineWrappers ( outsWrappers ) ;
outProms . clear ( ) ;
outProms . resize ( outs . size ( ) ) ;
outsNames . resize ( outs . size ( ) ) ;
for ( int i = 0 ; i < outs . size ( ) ; + + i )
{
2019-05-01 19:51:12 +08:00
outs [ i ] - > futureMat = outProms [ i ] . getArrayResult ( ) ;
2019-04-20 02:01:19 +08:00
outsNames [ i ] = outs [ i ] - > dataPtr - > name ;
}
}
void InfEngineBackendNet : : forward ( const std : : vector < Ptr < BackendWrapper > > & outBlobsWrappers ,
bool isAsync )
2018-02-06 16:57:35 +08:00
{
2019-04-20 02:01:19 +08:00
// Look for finished requests.
Ptr < InfEngineReqWrapper > reqWrapper ;
for ( auto & wrapper : infRequests )
{
if ( wrapper - > isReady )
{
reqWrapper = wrapper ;
break ;
}
}
if ( reqWrapper . empty ( ) )
{
reqWrapper = Ptr < InfEngineReqWrapper > ( new InfEngineReqWrapper ( ) ) ;
try
{
reqWrapper - > req = netExec . CreateInferRequest ( ) ;
}
catch ( const std : : exception & ex )
{
CV_Error ( Error : : StsAssert , format ( " Failed to initialize Inference Engine backend: %s " , ex . what ( ) ) ) ;
}
infRequests . push_back ( reqWrapper ) ;
InferenceEngine : : BlobMap inpBlobs , outBlobs ;
for ( const auto & it : cnn . getInputsInfo ( ) )
{
const std : : string & name = it . first ;
auto blobIt = allBlobs . find ( name ) ;
CV_Assert ( blobIt ! = allBlobs . end ( ) ) ;
inpBlobs [ name ] = isAsync ? cloneBlob ( blobIt - > second ) : blobIt - > second ;
}
for ( const auto & it : cnn . getOutputsInfo ( ) )
{
const std : : string & name = it . first ;
auto blobIt = allBlobs . find ( name ) ;
CV_Assert ( blobIt ! = allBlobs . end ( ) ) ;
outBlobs [ name ] = isAsync ? cloneBlob ( blobIt - > second ) : blobIt - > second ;
}
reqWrapper - > req . SetInput ( inpBlobs ) ;
reqWrapper - > req . SetOutput ( outBlobs ) ;
InferenceEngine : : IInferRequest : : Ptr infRequestPtr = reqWrapper - > req ;
infRequestPtr - > SetUserData ( reqWrapper . get ( ) , 0 ) ;
2019-04-30 00:03:10 +08:00
infRequestPtr - > SetCompletionCallback (
2019-04-20 02:01:19 +08:00
[ ] ( InferenceEngine : : IInferRequest : : Ptr request , InferenceEngine : : StatusCode status )
{
InfEngineReqWrapper * wrapper ;
request - > GetUserData ( ( void * * ) & wrapper , 0 ) ;
2019-05-01 19:51:12 +08:00
CV_Assert ( wrapper & & " Internal error " ) ;
2019-04-20 02:01:19 +08:00
2019-05-01 19:51:12 +08:00
size_t processedOutputs = 0 ;
try
2019-04-20 02:01:19 +08:00
{
2019-05-01 19:51:12 +08:00
for ( ; processedOutputs < wrapper - > outProms . size ( ) ; + + processedOutputs )
{
const std : : string & name = wrapper - > outsNames [ processedOutputs ] ;
Mat m = infEngineBlobToMat ( wrapper - > req . GetBlob ( name ) ) ;
2019-04-20 02:01:19 +08:00
2019-05-01 19:51:12 +08:00
try
{
CV_Assert ( status = = InferenceEngine : : StatusCode : : OK ) ;
wrapper - > outProms [ processedOutputs ] . setValue ( m . clone ( ) ) ;
}
catch ( . . . )
{
try {
wrapper - > outProms [ processedOutputs ] . setException ( std : : current_exception ( ) ) ;
} catch ( . . . ) {
CV_LOG_ERROR ( NULL , " DNN: Exception occured during async inference exception propagation " ) ;
}
}
}
}
catch ( . . . )
{
std : : exception_ptr e = std : : current_exception ( ) ;
for ( ; processedOutputs < wrapper - > outProms . size ( ) ; + + processedOutputs )
2019-04-20 02:01:19 +08:00
{
try {
2019-05-01 19:51:12 +08:00
wrapper - > outProms [ processedOutputs ] . setException ( e ) ;
2019-04-20 02:01:19 +08:00
} catch ( . . . ) {
CV_LOG_ERROR ( NULL , " DNN: Exception occured during async inference exception propagation " ) ;
}
}
}
wrapper - > isReady = true ;
}
2019-04-30 00:03:10 +08:00
) ;
2019-04-20 02:01:19 +08:00
}
if ( isAsync )
{
// Copy actual data to infer request's input blobs.
for ( const auto & it : cnn . getInputsInfo ( ) )
{
const std : : string & name = it . first ;
auto blobIt = allBlobs . find ( name ) ;
Mat srcMat = infEngineBlobToMat ( blobIt - > second ) ;
Mat dstMat = infEngineBlobToMat ( reqWrapper - > req . GetBlob ( name ) ) ;
srcMat . copyTo ( dstMat ) ;
}
// Set promises to output blobs wrappers.
reqWrapper - > makePromises ( outBlobsWrappers ) ;
reqWrapper - > isReady = false ;
reqWrapper - > req . StartAsync ( ) ;
}
else
{
reqWrapper - > req . Infer ( ) ;
}
2018-02-06 16:57:35 +08:00
}
2018-03-12 22:35:28 +08:00
Mat infEngineBlobToMat ( const InferenceEngine : : Blob : : Ptr & blob )
2018-02-06 16:57:35 +08:00
{
// NOTE: Inference Engine sizes are reversed.
2018-02-07 16:28:45 +08:00
std : : vector < size_t > dims = blob - > dims ( ) ;
2018-08-02 16:12:22 +08:00
std : : vector < int > size ( dims . rbegin ( ) , dims . rend ( ) ) ;
2019-05-05 17:49:38 +08:00
int type = - 1 ;
switch ( blob - > precision ( ) )
{
case InferenceEngine : : Precision : : FP32 : type = CV_32F ; break ;
case InferenceEngine : : Precision : : U8 : type = CV_8U ; break ;
default :
CV_Error ( Error : : StsNotImplemented , " Unsupported blob precision " ) ;
}
return Mat ( size , type , ( void * ) blob - > buffer ( ) ) ;
2018-02-06 16:57:35 +08:00
}
2018-03-17 00:27:04 +08:00
bool InfEngineBackendLayer : : getMemoryShapes ( const std : : vector < MatShape > & inputs ,
const int requiredOutputs ,
std : : vector < MatShape > & outputs ,
std : : vector < MatShape > & internals ) const
{
2019-01-11 01:29:44 +08:00
InferenceEngine : : ICNNNetwork : : InputShapes inShapes = t_net . getInputShapes ( ) ;
InferenceEngine : : ICNNNetwork : : InputShapes : : iterator itr ;
bool equal_flag = true ;
size_t i = 0 ;
for ( itr = inShapes . begin ( ) ; itr ! = inShapes . end ( ) ; + + itr )
{
InferenceEngine : : SizeVector currentInShape ( inputs [ i ] . begin ( ) , inputs [ i ] . end ( ) ) ;
if ( itr - > second ! = currentInShape )
{
itr - > second = currentInShape ;
equal_flag = false ;
}
i + + ;
}
if ( ! equal_flag )
{
InferenceEngine : : CNNNetwork curr_t_net ( t_net ) ;
curr_t_net . reshape ( inShapes ) ;
}
std : : vector < size_t > dims = t_net . getOutputsInfo ( ) [ name ] - > getDims ( ) ;
outputs . push_back ( MatShape ( dims . begin ( ) , dims . end ( ) ) ) ;
2018-03-17 00:27:04 +08:00
return false ;
}
bool InfEngineBackendLayer : : supportBackend ( int backendId )
{
return backendId = = DNN_BACKEND_DEFAULT | |
2018-11-15 04:25:23 +08:00
( backendId = = DNN_BACKEND_INFERENCE_ENGINE & & haveInfEngine ( ) ) ;
2018-03-17 00:27:04 +08:00
}
void InfEngineBackendLayer : : forward ( InputArrayOfArrays inputs , OutputArrayOfArrays outputs ,
OutputArrayOfArrays internals )
{
CV_Error ( Error : : StsInternal , " Choose Inference Engine as a preferable backend. " ) ;
}
2019-02-11 22:13:39 +08:00
InferenceEngine : : Blob : : Ptr convertFp16 ( const InferenceEngine : : Blob : : Ptr & blob )
2018-03-12 22:35:28 +08:00
{
auto halfs = InferenceEngine : : make_shared_blob < int16_t > ( InferenceEngine : : Precision : : FP16 , blob - > layout ( ) , blob - > dims ( ) ) ;
halfs - > allocate ( ) ;
Mat floatsData ( 1 , blob - > size ( ) , CV_32F , blob - > buffer ( ) ) ;
Mat halfsData ( 1 , blob - > size ( ) , CV_16SC1 , halfs - > buffer ( ) ) ;
convertFp16 ( floatsData , halfsData ) ;
return halfs ;
}
2019-02-14 18:30:30 +08:00
void addConstantData ( const std : : string & name , InferenceEngine : : Blob : : Ptr data ,
InferenceEngine : : Builder : : Layer & l )
{
2019-04-01 20:00:25 +08:00
# if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1)
2019-02-14 18:30:30 +08:00
l . getParameters ( ) [ name ] = data ;
# else
l . addConstantData ( name , data ) ;
# endif
}
2018-02-06 16:57:35 +08:00
# endif // HAVE_INF_ENGINE
bool haveInfEngine ( )
{
# ifdef HAVE_INF_ENGINE
return true ;
# else
return false ;
# endif // HAVE_INF_ENGINE
}
2019-04-20 02:01:19 +08:00
void forwardInfEngine ( const std : : vector < Ptr < BackendWrapper > > & outBlobsWrappers ,
Ptr < BackendNode > & node , bool isAsync )
2018-02-06 16:57:35 +08:00
{
CV_Assert ( haveInfEngine ( ) ) ;
# ifdef HAVE_INF_ENGINE
CV_Assert ( ! node . empty ( ) ) ;
Ptr < InfEngineBackendNode > ieNode = node . dynamicCast < InfEngineBackendNode > ( ) ;
CV_Assert ( ! ieNode . empty ( ) ) ;
2019-04-20 02:01:19 +08:00
ieNode - > net - > forward ( outBlobsWrappers , isAsync ) ;
2018-02-06 16:57:35 +08:00
# endif // HAVE_INF_ENGINE
}
2018-09-18 16:21:08 +08:00
CV__DNN_EXPERIMENTAL_NS_BEGIN
void resetMyriadDevice ( )
{
# ifdef HAVE_INF_ENGINE
2019-01-31 21:10:59 +08:00
AutoLock lock ( getInitializationMutex ( ) ) ;
getSharedPlugins ( ) . erase ( InferenceEngine : : TargetDevice : : eMYRIAD ) ;
2018-09-18 16:21:08 +08:00
# endif // HAVE_INF_ENGINE
}
2019-03-29 21:42:58 +08:00
# ifdef HAVE_INF_ENGINE
bool isMyriadX ( )
{
static bool myriadX = getInferenceEngineVPUType ( ) = = CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X ;
return myriadX ;
}
static std : : string getInferenceEngineVPUType_ ( )
{
static std : : string param_vpu_type = utils : : getConfigurationParameterString ( " OPENCV_DNN_IE_VPU_TYPE " , " " ) ;
if ( param_vpu_type = = " " )
{
# if defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
param_vpu_type = OPENCV_DNN_IE_VPU_TYPE_DEFAULT ;
2019-06-14 23:17:02 +08:00
# else
2019-03-29 21:42:58 +08:00
CV_LOG_INFO ( NULL , " OpenCV-DNN: running Inference Engine VPU autodetection: Myriad2/X. In case of other accelerator types specify 'OPENCV_DNN_IE_VPU_TYPE' parameter " ) ;
try {
bool isMyriadX_ = detectMyriadX_ ( ) ;
if ( isMyriadX_ )
{
param_vpu_type = CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X ;
}
else
{
param_vpu_type = CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_2 ;
}
}
catch ( . . . )
{
CV_LOG_WARNING ( NULL , " OpenCV-DNN: Failed Inference Engine VPU autodetection. Specify 'OPENCV_DNN_IE_VPU_TYPE' parameter. " ) ;
param_vpu_type . clear ( ) ;
}
# endif
}
CV_LOG_INFO ( NULL , " OpenCV-DNN: Inference Engine VPU type=' " < < param_vpu_type < < " ' " ) ;
return param_vpu_type ;
}
cv : : String getInferenceEngineVPUType ( )
{
static cv : : String vpu_type = getInferenceEngineVPUType_ ( ) ;
return vpu_type ;
}
# else // HAVE_INF_ENGINE
cv : : String getInferenceEngineVPUType ( )
{
CV_Error ( Error : : StsNotImplemented , " This OpenCV build doesn't include InferenceEngine support " ) ;
}
# endif // HAVE_INF_ENGINE
2018-09-18 16:21:08 +08:00
CV__DNN_EXPERIMENTAL_NS_END
2018-02-06 16:57:35 +08:00
} } // namespace dnn, namespace cv