mirror of
https://github.com/opencv/opencv.git
synced 2025-06-10 11:03:03 +08:00
Merge remote-tracking branch 'upstream/3.4' into merge-3.4
This commit is contained in:
commit
57d3002ee1
@ -243,7 +243,7 @@ OCV_OPTION(WITH_AVFOUNDATION "Use AVFoundation for Video I/O (iOS/Mac)" ON
|
|||||||
OCV_OPTION(WITH_CAP_IOS "Enable iOS video capture" ON
|
OCV_OPTION(WITH_CAP_IOS "Enable iOS video capture" ON
|
||||||
VISIBLE_IF IOS
|
VISIBLE_IF IOS
|
||||||
VERIFY HAVE_CAP_IOS)
|
VERIFY HAVE_CAP_IOS)
|
||||||
OCV_OPTION(WITH_CAROTENE "Use NVidia carotene acceleration library for ARM platform" ON
|
OCV_OPTION(WITH_CAROTENE "Use NVidia carotene acceleration library for ARM platform" (NOT CV_DISABLE_OPTIMIZATION)
|
||||||
VISIBLE_IF (ARM OR AARCH64) AND NOT IOS)
|
VISIBLE_IF (ARM OR AARCH64) AND NOT IOS)
|
||||||
OCV_OPTION(WITH_CPUFEATURES "Use cpufeatures Android library" ON
|
OCV_OPTION(WITH_CPUFEATURES "Use cpufeatures Android library" ON
|
||||||
VISIBLE_IF ANDROID
|
VISIBLE_IF ANDROID
|
||||||
|
@ -16,6 +16,19 @@ endif()
|
|||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
|
|
||||||
|
if(WITH_OPENVINO)
|
||||||
|
find_package(OpenVINO QUIET)
|
||||||
|
if(OpenVINO_FOUND)
|
||||||
|
message(STATUS "OpenVINO FOUND: ${OpenVINO_VERSION}")
|
||||||
|
math(EXPR ver "${OpenVINO_VERSION_MAJOR} * 1000000 + ${OpenVINO_VERSION_MINOR} * 10000 + ${OpenVINO_VERSION_PATCH} * 100")
|
||||||
|
ocv_add_external_target(openvino "" "openvino::runtime" "INF_ENGINE_RELEASE=${ver};HAVE_NGRAPH;HAVE_DNN_NGRAPH;HAVE_INF_ENGINE")
|
||||||
|
set(HAVE_OPENVINO 1)
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# ======================
|
||||||
|
|
||||||
macro(ocv_ie_find_extra_libraries find_prefix find_suffix)
|
macro(ocv_ie_find_extra_libraries find_prefix find_suffix)
|
||||||
file(GLOB libraries "${INF_ENGINE_LIB_DIRS}/${find_prefix}inference_engine*${find_suffix}")
|
file(GLOB libraries "${INF_ENGINE_LIB_DIRS}/${find_prefix}inference_engine*${find_suffix}")
|
||||||
foreach(full_path IN LISTS libraries)
|
foreach(full_path IN LISTS libraries)
|
||||||
|
@ -1619,6 +1619,7 @@ function(ocv_add_external_target name inc link def)
|
|||||||
endif()
|
endif()
|
||||||
endfunction()
|
endfunction()
|
||||||
|
|
||||||
|
|
||||||
# Returns the first non-interface target
|
# Returns the first non-interface target
|
||||||
function(ocv_get_imported_target imported interface)
|
function(ocv_get_imported_target imported interface)
|
||||||
set(__result "${interface}")
|
set(__result "${interface}")
|
||||||
|
@ -184,7 +184,8 @@ static inline MatShape concat(const MatShape& a, const MatShape& b)
|
|||||||
return c;
|
return c;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline std::string toString(const MatShape& shape, const String& name = "")
|
template<typename _Tp>
|
||||||
|
static inline std::string toString(const std::vector<_Tp>& shape, const String& name = "")
|
||||||
{
|
{
|
||||||
std::ostringstream ss;
|
std::ostringstream ss;
|
||||||
if (!name.empty())
|
if (!name.empty())
|
||||||
@ -195,11 +196,14 @@ static inline std::string toString(const MatShape& shape, const String& name = "
|
|||||||
ss << " ]";
|
ss << " ]";
|
||||||
return ss.str();
|
return ss.str();
|
||||||
}
|
}
|
||||||
static inline void print(const MatShape& shape, const String& name = "")
|
|
||||||
|
template<typename _Tp>
|
||||||
|
static inline void print(const std::vector<_Tp>& shape, const String& name = "")
|
||||||
{
|
{
|
||||||
std::cout << toString(shape, name) << std::endl;
|
std::cout << toString(shape, name) << std::endl;
|
||||||
}
|
}
|
||||||
static inline std::ostream& operator<<(std::ostream &out, const MatShape& shape)
|
template<typename _Tp>
|
||||||
|
static inline std::ostream& operator<<(std::ostream &out, const std::vector<_Tp>& shape)
|
||||||
{
|
{
|
||||||
out << toString(shape);
|
out << toString(shape);
|
||||||
return out;
|
return out;
|
||||||
|
@ -2120,10 +2120,15 @@ struct Net::Impl : public detail::NetImplBase
|
|||||||
|
|
||||||
|
|
||||||
#ifdef HAVE_DNN_NGRAPH
|
#ifdef HAVE_DNN_NGRAPH
|
||||||
|
/** mark input pins as outputs from other subnetworks
|
||||||
|
* FIXIT must be done by DNN engine not ngraph.
|
||||||
|
*/
|
||||||
void addNgraphOutputs(LayerData &ld)
|
void addNgraphOutputs(LayerData &ld)
|
||||||
{
|
{
|
||||||
CV_TRACE_FUNCTION();
|
CV_TRACE_FUNCTION();
|
||||||
|
|
||||||
|
CV_LOG_DEBUG(NULL, "DNN/IE: layer of new subnet: " << ld.name << "@" << ld.type);
|
||||||
|
|
||||||
Ptr<InfEngineNgraphNet> layerNet;
|
Ptr<InfEngineNgraphNet> layerNet;
|
||||||
auto it = ld.backendNodes.find(preferableBackend);
|
auto it = ld.backendNodes.find(preferableBackend);
|
||||||
if (it != ld.backendNodes.end())
|
if (it != ld.backendNodes.end())
|
||||||
@ -2147,8 +2152,8 @@ struct Net::Impl : public detail::NetImplBase
|
|||||||
CV_Assert(!ieInpNode.empty()); CV_Assert(!ieInpNode->net.empty());
|
CV_Assert(!ieInpNode.empty()); CV_Assert(!ieInpNode->net.empty());
|
||||||
if (layerNet != ieInpNode->net)
|
if (layerNet != ieInpNode->net)
|
||||||
{
|
{
|
||||||
ieInpNode->net->addOutput(ieInpNode->node->get_friendly_name());
|
CV_LOG_DEBUG(NULL, "DNN/IE: pin output between subnets: " << ieInpNode->node->get_friendly_name());
|
||||||
ieInpNode->net->setUnconnectedNodes(ieInpNode);
|
ieInpNode->net->addOutput(ieInpNode);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2247,13 +2252,19 @@ struct Net::Impl : public detail::NetImplBase
|
|||||||
{
|
{
|
||||||
LayerData& ld = it->second;
|
LayerData& ld = it->second;
|
||||||
|
|
||||||
|
CV_LOG_DEBUG(NULL, "DNN/IE: processing layer " << ld.name << "@" << ld.type << " (" << ld.id << ") ...");
|
||||||
|
|
||||||
if (ld.id == 0 && ld.skip)
|
if (ld.id == 0 && ld.skip)
|
||||||
|
{
|
||||||
|
CV_LOG_DEBUG(NULL, "DNN/IE: SKIP!");
|
||||||
continue;
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
bool fused = ld.skip;
|
bool fused = ld.skip;
|
||||||
Ptr<Layer> layer = ld.layerInstance;
|
Ptr<Layer> layer = ld.layerInstance;
|
||||||
if (!fused && !layer->supportBackend(preferableBackend))
|
if (!fused && !layer->supportBackend(preferableBackend))
|
||||||
{
|
{
|
||||||
|
CV_LOG_DEBUG(NULL, "DNN/IE: NOT supported!");
|
||||||
bool customizable = ld.id != 0 && supportsCPUFallback;
|
bool customizable = ld.id != 0 && supportsCPUFallback;
|
||||||
|
|
||||||
// TODO: there is a bug in Myriad plugin with custom layers shape infer.
|
// TODO: there is a bug in Myriad plugin with custom layers shape infer.
|
||||||
@ -2281,6 +2292,7 @@ struct Net::Impl : public detail::NetImplBase
|
|||||||
|
|
||||||
if (!customizable)
|
if (!customizable)
|
||||||
{
|
{
|
||||||
|
CV_LOG_DEBUG(NULL, "DNN/IE: NOT customizable!");
|
||||||
addNgraphOutputs(ld);
|
addNgraphOutputs(ld);
|
||||||
net = Ptr<InfEngineNgraphNet>();
|
net = Ptr<InfEngineNgraphNet>();
|
||||||
layer->preferableTarget = DNN_TARGET_CPU;
|
layer->preferableTarget = DNN_TARGET_CPU;
|
||||||
@ -2292,7 +2304,7 @@ struct Net::Impl : public detail::NetImplBase
|
|||||||
if (!inpNode.empty()) {
|
if (!inpNode.empty()) {
|
||||||
Ptr<InfEngineNgraphNode> ieNode = inpNode.dynamicCast<InfEngineNgraphNode>();
|
Ptr<InfEngineNgraphNode> ieNode = inpNode.dynamicCast<InfEngineNgraphNode>();
|
||||||
CV_Assert(!ieNode.empty());
|
CV_Assert(!ieNode.empty());
|
||||||
ieNode->net->setUnconnectedNodes(ieNode);
|
ieNode->net->addOutput(ieNode);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
@ -2380,9 +2392,22 @@ struct Net::Impl : public detail::NetImplBase
|
|||||||
continue;
|
continue;
|
||||||
|
|
||||||
auto ieInpNode = inputNodes[i].dynamicCast<InfEngineNgraphNode>();
|
auto ieInpNode = inputNodes[i].dynamicCast<InfEngineNgraphNode>();
|
||||||
CV_Assert(oid < ieInpNode->node->get_output_size());
|
const auto& ngraph_input_node = ieInpNode->node;
|
||||||
|
CV_LOG_DEBUG(NULL, "DNN/IE: bind output port " << lid << ":" << oid << " (" << ngraph_input_node->get_friendly_name() << ":" << ngraph_input_node->get_type_info().name << ")");
|
||||||
|
|
||||||
|
// Handle parameters from other subnets. Output port is not used in this case
|
||||||
|
if ((ngraph::op::is_parameter(ngraph_input_node) || ngraph::op::is_constant(ngraph_input_node)) &&
|
||||||
|
ngraph_input_node->get_output_size() == 1)
|
||||||
|
{
|
||||||
|
inputNodes[i] = Ptr<BackendNode>(new InfEngineNgraphNode(ngraph_input_node));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
CV_CheckLT((size_t)oid, ngraph_input_node->get_output_size(), "");
|
||||||
#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2020_4)
|
#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2020_4)
|
||||||
inputNodes[i] = Ptr<BackendNode>(new InfEngineNgraphNode(ieInpNode->node));
|
// FIXIT refactor ".initNgraph()" API to use Output<Node>
|
||||||
|
// WA: use Concat to emulate Identity operation with requested output port
|
||||||
|
auto oid_node = std::make_shared<ngraph::op::Concat>(ngraph::OutputVector {ngraph_input_node->output(oid)}, 0);
|
||||||
|
inputNodes[i] = Ptr<BackendNode>(new InfEngineNgraphNode(oid_node));
|
||||||
#elif INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2020_3)
|
#elif INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2020_3)
|
||||||
inputNodes[i] = Ptr<BackendNode>(new InfEngineNgraphNode(ieInpNode->node->get_output_as_single_output_node(oid)));
|
inputNodes[i] = Ptr<BackendNode>(new InfEngineNgraphNode(ieInpNode->node->get_output_as_single_output_node(oid)));
|
||||||
#else
|
#else
|
||||||
@ -2392,21 +2417,30 @@ struct Net::Impl : public detail::NetImplBase
|
|||||||
|
|
||||||
if (layer->supportBackend(preferableBackend))
|
if (layer->supportBackend(preferableBackend))
|
||||||
{
|
{
|
||||||
|
CV_LOG_DEBUG(NULL, "DNN/IE: wrap layer " << ld.name << "@" << ld.type << " - outputs: " << ld.outputBlobsWrappers.size());
|
||||||
node = layer->initNgraph(ld.inputBlobsWrappers, inputNodes);
|
node = layer->initNgraph(ld.inputBlobsWrappers, inputNodes);
|
||||||
|
#if 0 // FIXIT doesn't work with multiple outputs (set name is applied to the same node)
|
||||||
for (int i = 0; i < ld.outputBlobsWrappers.size(); ++i)
|
for (int i = 0; i < ld.outputBlobsWrappers.size(); ++i)
|
||||||
{
|
{
|
||||||
InferenceEngine::DataPtr dataPtr = ngraphDataNode(ld.outputBlobsWrappers[i]);
|
InferenceEngine::DataPtr dataPtr = ngraphDataNode(ld.outputBlobsWrappers[i]);
|
||||||
node.dynamicCast<InfEngineNgraphNode>()->setName(dataPtr->getName());
|
node.dynamicCast<InfEngineNgraphNode>()->setName(dataPtr->getName());
|
||||||
}
|
}
|
||||||
|
#else
|
||||||
|
node.dynamicCast<InfEngineNgraphNode>()->setName(layer->name);
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
CV_LOG_DEBUG(NULL, "DNN/IE: layer is not supported: " << ld.name << "@" << ld.type);
|
||||||
node = Ptr<BackendNode>(new InfEngineNgraphNode(inputNodes,
|
node = Ptr<BackendNode>(new InfEngineNgraphNode(inputNodes,
|
||||||
ld.layerInstance, ld.inputBlobs, ld.outputBlobs, ld.internals));
|
ld.layerInstance, ld.inputBlobs, ld.outputBlobs, ld.internals));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (node.empty())
|
else if (node.empty())
|
||||||
|
{
|
||||||
|
CV_LOG_DEBUG(NULL, "DNN/IE: node.empty() bypass...");
|
||||||
continue;
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
ld.backendNodes[preferableBackend] = node;
|
ld.backendNodes[preferableBackend] = node;
|
||||||
|
|
||||||
@ -2414,15 +2448,11 @@ struct Net::Impl : public detail::NetImplBase
|
|||||||
CV_Assert(!ieNode.empty());
|
CV_Assert(!ieNode.empty());
|
||||||
ieNode->net = net;
|
ieNode->net = net;
|
||||||
|
|
||||||
if (ld.consumers.empty()) {
|
|
||||||
// TF EAST_text_detection
|
|
||||||
ieNode->net->setUnconnectedNodes(ieNode);
|
|
||||||
}
|
|
||||||
for (const auto& pin : blobsToKeep_)
|
for (const auto& pin : blobsToKeep_)
|
||||||
{
|
{
|
||||||
if (pin.lid == ld.id)
|
if (pin.lid == ld.id)
|
||||||
{
|
{
|
||||||
ieNode->net->addOutput(ieNode->node->get_friendly_name());
|
ieNode->net->addOutput(ieNode);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2453,7 +2483,7 @@ struct Net::Impl : public detail::NetImplBase
|
|||||||
|
|
||||||
if (!ieNode->net->isInitialized())
|
if (!ieNode->net->isInitialized())
|
||||||
{
|
{
|
||||||
ieNode->net->setUnconnectedNodes(ieNode);
|
ieNode->net->addOutput(ieNode);
|
||||||
ieNode->net->createNet((Target)preferableTarget);
|
ieNode->net->createNet((Target)preferableTarget);
|
||||||
ld.skip = false;
|
ld.skip = false;
|
||||||
}
|
}
|
||||||
@ -2967,8 +2997,15 @@ struct Net::Impl : public detail::NetImplBase
|
|||||||
preferableBackend != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH))
|
preferableBackend != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
#if 0 // FIXIT mode without fusion is broken due to unsupported layers and handling of "custom" nodes
|
||||||
|
if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
|
||||||
|
return;
|
||||||
|
#endif
|
||||||
|
|
||||||
// scan through all the layers. If there is convolution layer followed by the activation layer,
|
// scan through all the layers. If there is convolution layer followed by the activation layer,
|
||||||
// we try to embed this activation into the convolution and disable separate execution of the activation
|
// we try to embed this activation into the convolution and disable separate execution of the activation
|
||||||
|
|
||||||
|
// FIXIT replace by layersToKeep to avoid hacks like "LayerPin(lid, 0)"
|
||||||
std::set<LayerPin> pinsToKeep(blobsToKeep_.begin(),
|
std::set<LayerPin> pinsToKeep(blobsToKeep_.begin(),
|
||||||
blobsToKeep_.end());
|
blobsToKeep_.end());
|
||||||
for (MapIdToLayerData::const_iterator it = layers.begin(); it != layers.end(); it++)
|
for (MapIdToLayerData::const_iterator it = layers.begin(); it != layers.end(); it++)
|
||||||
@ -2993,6 +3030,13 @@ struct Net::Impl : public detail::NetImplBase
|
|||||||
LayerPin lpNext(ld.consumers[0].lid, 0);
|
LayerPin lpNext(ld.consumers[0].lid, 0);
|
||||||
while (nextData)
|
while (nextData)
|
||||||
{
|
{
|
||||||
|
#ifdef HAVE_INF_ENGINE
|
||||||
|
if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && pinsToKeep.count(lpNext) != 0)
|
||||||
|
{
|
||||||
|
CV_LOG_DEBUG(NULL, "DNN/IE: skip fusing with 'output' node: " << nextData->name << "@" << nextData->type);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
/* we use `tryFuse` member of convolution layer to fuse eltwise later
|
/* we use `tryFuse` member of convolution layer to fuse eltwise later
|
||||||
* it's not intended to be fused here; hence, we stop when we encounter eltwise
|
* it's not intended to be fused here; hence, we stop when we encounter eltwise
|
||||||
*/
|
*/
|
||||||
|
@ -330,7 +330,7 @@ public:
|
|||||||
InfEngineNgraphNode::InfEngineNgraphNode(std::shared_ptr<ngraph::Node>&& _node)
|
InfEngineNgraphNode::InfEngineNgraphNode(std::shared_ptr<ngraph::Node>&& _node)
|
||||||
: BackendNode(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH), node(std::move(_node)) {}
|
: BackendNode(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH), node(std::move(_node)) {}
|
||||||
|
|
||||||
InfEngineNgraphNode::InfEngineNgraphNode(std::shared_ptr<ngraph::Node>& _node)
|
InfEngineNgraphNode::InfEngineNgraphNode(const std::shared_ptr<ngraph::Node>& _node)
|
||||||
: BackendNode(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH), node(_node) {}
|
: BackendNode(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH), node(_node) {}
|
||||||
|
|
||||||
InfEngineNgraphNode::InfEngineNgraphNode(const std::vector<Ptr<BackendNode> >& nodes,
|
InfEngineNgraphNode::InfEngineNgraphNode(const std::vector<Ptr<BackendNode> >& nodes,
|
||||||
@ -379,16 +379,21 @@ InfEngineNgraphNet::InfEngineNgraphNet(detail::NetImplBase& netImpl, InferenceEn
|
|||||||
device_name = "CPU";
|
device_name = "CPU";
|
||||||
}
|
}
|
||||||
|
|
||||||
void InfEngineNgraphNet::addOutput(const std::string& name)
|
void InfEngineNgraphNet::addOutput(const Ptr<InfEngineNgraphNode>& node)
|
||||||
{
|
{
|
||||||
requestedOutputs.push_back(name);
|
CV_Assert(node);
|
||||||
|
CV_Assert(node->node);
|
||||||
|
const std::string& name = node->node->get_friendly_name();
|
||||||
|
requestedOutputs.insert({name, node});
|
||||||
}
|
}
|
||||||
|
|
||||||
void InfEngineNgraphNet::setNodePtr(std::shared_ptr<ngraph::Node>* ptr) {
|
void InfEngineNgraphNet::setNodePtr(std::shared_ptr<ngraph::Node>* ptr) {
|
||||||
all_nodes.emplace((*ptr)->get_friendly_name(), ptr);
|
all_nodes.emplace((*ptr)->get_friendly_name(), ptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
void InfEngineNgraphNet::release() {
|
void InfEngineNgraphNet::release()
|
||||||
|
{
|
||||||
|
// FIXIT release should not be conditional, release ALL
|
||||||
for (auto& node : components.back()) {
|
for (auto& node : components.back()) {
|
||||||
#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2020_4)
|
#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2020_4)
|
||||||
if (!(ngraph::op::is_parameter(node) || ngraph::op::is_output(node) || ngraph::op::is_constant(node)) ) {
|
if (!(ngraph::op::is_parameter(node) || ngraph::op::is_output(node) || ngraph::op::is_constant(node)) ) {
|
||||||
@ -397,7 +402,6 @@ void InfEngineNgraphNet::setNodePtr(std::shared_ptr<ngraph::Node>* ptr) {
|
|||||||
#endif
|
#endif
|
||||||
auto it = all_nodes.find(node->get_friendly_name());
|
auto it = all_nodes.find(node->get_friendly_name());
|
||||||
if (it != all_nodes.end()) {
|
if (it != all_nodes.end()) {
|
||||||
unconnectedNodes.erase(*(it->second));
|
|
||||||
it->second->reset();
|
it->second->reset();
|
||||||
all_nodes.erase(it);
|
all_nodes.erase(it);
|
||||||
}
|
}
|
||||||
@ -422,7 +426,8 @@ void InfEngineNgraphNet::dfs(std::shared_ptr<ngraph::Node>& node,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int InfEngineNgraphNet::getNumComponents() {
|
int InfEngineNgraphNet::getNumComponents()
|
||||||
|
{
|
||||||
if (!components.empty()) {
|
if (!components.empty()) {
|
||||||
return components.size();
|
return components.size();
|
||||||
}
|
}
|
||||||
@ -445,17 +450,21 @@ int InfEngineNgraphNet::getNumComponents() {
|
|||||||
void InfEngineNgraphNet::createNet(Target targetId) {
|
void InfEngineNgraphNet::createNet(Target targetId) {
|
||||||
if (!hasNetOwner)
|
if (!hasNetOwner)
|
||||||
{
|
{
|
||||||
CV_Assert(!unconnectedNodes.empty());
|
CV_Assert(!requestedOutputs.empty());
|
||||||
ngraph::ResultVector outs;
|
ngraph::ResultVector outs;
|
||||||
for (auto& node : unconnectedNodes)
|
|
||||||
|
for (auto output_node_it = requestedOutputs.begin(); output_node_it != requestedOutputs.end(); ++output_node_it)
|
||||||
{
|
{
|
||||||
auto out = std::make_shared<ngraph::op::Result>(node);
|
CV_LOG_DEBUG(NULL, "DNN/NGRAPH: Add 'Result' output: " << output_node_it->first);
|
||||||
|
CV_Assert(output_node_it->second);
|
||||||
|
auto out = std::make_shared<ngraph::op::Result>(output_node_it->second->node);
|
||||||
outs.push_back(out);
|
outs.push_back(out);
|
||||||
}
|
}
|
||||||
CV_Assert_N(!inputs_vec.empty(), !outs.empty());
|
CV_Assert_N(!inputs_vec.empty(), !outs.empty());
|
||||||
ngraph_function = std::make_shared<ngraph::Function>(outs, inputs_vec);
|
ngraph_function = std::make_shared<ngraph::Function>(outs, inputs_vec);
|
||||||
|
|
||||||
int num_comp = getNumComponents();
|
int num_comp = getNumComponents();
|
||||||
|
CV_LOG_DEBUG(NULL, "DNN/IE: number of subgraphs: " << num_comp);
|
||||||
if (num_comp > 1) {
|
if (num_comp > 1) {
|
||||||
for (int i = num_comp - 1; i >= 0; --i) {
|
for (int i = num_comp - 1; i >= 0; --i) {
|
||||||
ngraph::ResultVector outputs;
|
ngraph::ResultVector outputs;
|
||||||
@ -466,6 +475,7 @@ void InfEngineNgraphNet::createNet(Target targetId) {
|
|||||||
#else
|
#else
|
||||||
if (node->is_parameter()) {
|
if (node->is_parameter()) {
|
||||||
#endif
|
#endif
|
||||||
|
CV_LOG_DEBUG(NULL, "DNN/IE: subgraph[" << i << "]: +input[" << inps.size() << "] = '" << node->get_friendly_name() << "'");
|
||||||
auto parameter = std::dynamic_pointer_cast<ngraph::op::Parameter>(node);
|
auto parameter = std::dynamic_pointer_cast<ngraph::op::Parameter>(node);
|
||||||
inps.push_back(parameter);
|
inps.push_back(parameter);
|
||||||
}
|
}
|
||||||
@ -474,10 +484,12 @@ void InfEngineNgraphNet::createNet(Target targetId) {
|
|||||||
#else
|
#else
|
||||||
else if (node->is_output()) {
|
else if (node->is_output()) {
|
||||||
#endif
|
#endif
|
||||||
|
CV_LOG_DEBUG(NULL, "DNN/IE: subgraph[" << i << "]: +output[" << outputs.size() << "] = '" << node->get_friendly_name() << "'");
|
||||||
auto result = std::dynamic_pointer_cast<ngraph::op::Result>(node);
|
auto result = std::dynamic_pointer_cast<ngraph::op::Result>(node);
|
||||||
outputs.push_back(result);
|
outputs.push_back(result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
CV_LOG_DEBUG(NULL, "DNN/IE: subgraph[" << i << ": nodes=" << components.back().size() << " inputs=" << inps.size() << " outputs=" << outputs.size());
|
||||||
isInit = false;
|
isInit = false;
|
||||||
CV_Assert_N(!inps.empty(), !outputs.empty());
|
CV_Assert_N(!inps.empty(), !outputs.empty());
|
||||||
ngraph_function = std::make_shared<ngraph::Function>(outputs, inps);
|
ngraph_function = std::make_shared<ngraph::Function>(outputs, inps);
|
||||||
@ -574,7 +586,7 @@ void InfEngineNgraphNet::init(Target targetId)
|
|||||||
auto node = ngraph_function->output(i).get_node();
|
auto node = ngraph_function->output(i).get_node();
|
||||||
for (size_t j = 0; j < node->get_input_size(); ++j) {
|
for (size_t j = 0; j < node->get_input_size(); ++j) {
|
||||||
std::string name = node->input_value(j).get_node()->get_friendly_name();
|
std::string name = node->input_value(j).get_node()->get_friendly_name();
|
||||||
auto iter = std::find(requestedOutputs.begin(), requestedOutputs.end(), name);
|
auto iter = requestedOutputs.find(name);
|
||||||
if (iter != requestedOutputs.end()) {
|
if (iter != requestedOutputs.end()) {
|
||||||
requestedOutputs.erase(iter);
|
requestedOutputs.erase(iter);
|
||||||
cnn.addOutput(name);
|
cnn.addOutput(name);
|
||||||
@ -582,10 +594,6 @@ void InfEngineNgraphNet::init(Target targetId)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (const auto& name : requestedOutputs)
|
|
||||||
{
|
|
||||||
cnn.addOutput(name);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const auto& it : cnn.getInputsInfo())
|
for (const auto& it : cnn.getInputsInfo())
|
||||||
{
|
{
|
||||||
@ -630,9 +638,6 @@ ngraph::ParameterVector InfEngineNgraphNet::setInputs(const std::vector<cv::Mat>
|
|||||||
return current_inp;
|
return current_inp;
|
||||||
}
|
}
|
||||||
|
|
||||||
void InfEngineNgraphNet::setUnconnectedNodes(Ptr<InfEngineNgraphNode>& node) {
|
|
||||||
unconnectedNodes.insert(node->node);
|
|
||||||
}
|
|
||||||
|
|
||||||
void InfEngineNgraphNet::initPlugin(InferenceEngine::CNNNetwork& net)
|
void InfEngineNgraphNet::initPlugin(InferenceEngine::CNNNetwork& net)
|
||||||
{
|
{
|
||||||
@ -732,10 +737,10 @@ void InfEngineNgraphNet::initPlugin(InferenceEngine::CNNNetwork& net)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (isHetero)
|
|
||||||
netExec = ie.LoadNetwork(net, "HETERO:" + device_name + ",CPU", config);
|
std::string ieDevice = isHetero ? ("HETERO:" + device_name + ",CPU") : device_name;
|
||||||
else
|
CV_LOG_INFO(NULL, "DNN/IE: Calling LoadNetwork(device=" << ieDevice << ")...");
|
||||||
netExec = ie.LoadNetwork(net, device_name, config);
|
netExec = ie.LoadNetwork(net, ieDevice, config);
|
||||||
}
|
}
|
||||||
catch (const std::exception& ex)
|
catch (const std::exception& ex)
|
||||||
{
|
{
|
||||||
|
@ -37,7 +37,7 @@ public:
|
|||||||
InfEngineNgraphNet(detail::NetImplBase& netImpl);
|
InfEngineNgraphNet(detail::NetImplBase& netImpl);
|
||||||
InfEngineNgraphNet(detail::NetImplBase& netImpl, InferenceEngine::CNNNetwork& net);
|
InfEngineNgraphNet(detail::NetImplBase& netImpl, InferenceEngine::CNNNetwork& net);
|
||||||
|
|
||||||
void addOutput(const std::string& name);
|
void addOutput(const Ptr<InfEngineNgraphNode>& node);
|
||||||
|
|
||||||
bool isInitialized();
|
bool isInitialized();
|
||||||
void init(Target targetId);
|
void init(Target targetId);
|
||||||
@ -47,7 +47,6 @@ public:
|
|||||||
void initPlugin(InferenceEngine::CNNNetwork& net);
|
void initPlugin(InferenceEngine::CNNNetwork& net);
|
||||||
ngraph::ParameterVector setInputs(const std::vector<cv::Mat>& inputs, const std::vector<std::string>& names);
|
ngraph::ParameterVector setInputs(const std::vector<cv::Mat>& inputs, const std::vector<std::string>& names);
|
||||||
|
|
||||||
void setUnconnectedNodes(Ptr<InfEngineNgraphNode>& node);
|
|
||||||
void addBlobs(const std::vector<cv::Ptr<BackendWrapper> >& ptrs);
|
void addBlobs(const std::vector<cv::Ptr<BackendWrapper> >& ptrs);
|
||||||
|
|
||||||
void createNet(Target targetId);
|
void createNet(Target targetId);
|
||||||
@ -88,8 +87,7 @@ public:
|
|||||||
|
|
||||||
InferenceEngine::CNNNetwork cnn;
|
InferenceEngine::CNNNetwork cnn;
|
||||||
bool hasNetOwner;
|
bool hasNetOwner;
|
||||||
std::vector<std::string> requestedOutputs;
|
std::unordered_map<std::string, Ptr<InfEngineNgraphNode> > requestedOutputs;
|
||||||
std::unordered_set<std::shared_ptr<ngraph::Node>> unconnectedNodes;
|
|
||||||
|
|
||||||
std::map<std::string, InferenceEngine::TensorDesc> outputsDesc;
|
std::map<std::string, InferenceEngine::TensorDesc> outputsDesc;
|
||||||
};
|
};
|
||||||
@ -102,7 +100,7 @@ public:
|
|||||||
std::vector<Mat>& internals);
|
std::vector<Mat>& internals);
|
||||||
|
|
||||||
InfEngineNgraphNode(std::shared_ptr<ngraph::Node>&& _node);
|
InfEngineNgraphNode(std::shared_ptr<ngraph::Node>&& _node);
|
||||||
InfEngineNgraphNode(std::shared_ptr<ngraph::Node>& _node);
|
InfEngineNgraphNode(const std::shared_ptr<ngraph::Node>& _node);
|
||||||
|
|
||||||
void setName(const std::string& name);
|
void setName(const std::string& name);
|
||||||
|
|
||||||
|
@ -1771,26 +1771,11 @@ CASE(test_spacetodepth)
|
|||||||
CASE(test_spacetodepth_example)
|
CASE(test_spacetodepth_example)
|
||||||
// no filter
|
// no filter
|
||||||
CASE(test_split_equal_parts_1d)
|
CASE(test_split_equal_parts_1d)
|
||||||
#if INF_ENGINE_VER_MAJOR_EQ(2021040000)
|
// no filter
|
||||||
SKIP_CPU;
|
|
||||||
// MYRIAD is ok
|
|
||||||
SKIP_OPENCL;
|
|
||||||
SKIP_OPENCL_FP16;
|
|
||||||
#endif
|
|
||||||
CASE(test_split_equal_parts_2d)
|
CASE(test_split_equal_parts_2d)
|
||||||
#if INF_ENGINE_VER_MAJOR_EQ(2021040000)
|
// no filter
|
||||||
SKIP_CPU;
|
|
||||||
// MYRIAD is ok
|
|
||||||
SKIP_OPENCL;
|
|
||||||
SKIP_OPENCL_FP16;
|
|
||||||
#endif
|
|
||||||
CASE(test_split_equal_parts_default_axis)
|
CASE(test_split_equal_parts_default_axis)
|
||||||
#if INF_ENGINE_VER_MAJOR_EQ(2021040000)
|
// no filter
|
||||||
SKIP_CPU;
|
|
||||||
// MYRIAD is ok
|
|
||||||
SKIP_OPENCL;
|
|
||||||
SKIP_OPENCL_FP16;
|
|
||||||
#endif
|
|
||||||
CASE(test_split_variable_parts_1d)
|
CASE(test_split_variable_parts_1d)
|
||||||
// no filter
|
// no filter
|
||||||
CASE(test_split_variable_parts_2d)
|
CASE(test_split_variable_parts_2d)
|
||||||
|
Loading…
Reference in New Issue
Block a user