From 062f305d1aa9d11b1b81c8231102d820738858d8 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Thu, 3 Feb 2022 20:23:17 +0000 Subject: [PATCH 1/4] dnn: don't fuse 'outputs' with OpenVINO backend --- modules/dnn/src/dnn.cpp | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp index f03fa20626..ff16b31590 100644 --- a/modules/dnn/src/dnn.cpp +++ b/modules/dnn/src/dnn.cpp @@ -2412,8 +2412,15 @@ struct Net::Impl : public detail::NetImplBase preferableBackend != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)) return; +#if 0 // FIXIT mode without fusion is broken due to unsupported layers and handling of "custom" nodes + if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) + return; +#endif + // scan through all the layers. If there is convolution layer followed by the activation layer, // we try to embed this activation into the convolution and disable separate execution of the activation + + // FIXIT replace by layersToKeep to avoid hacks like "LayerPin(lid, 0)" std::set pinsToKeep(blobsToKeep_.begin(), blobsToKeep_.end()); for (MapIdToLayerData::const_iterator it = layers.begin(); it != layers.end(); it++) @@ -2438,6 +2445,13 @@ struct Net::Impl : public detail::NetImplBase LayerPin lpNext(ld.consumers[0].lid, 0); while (nextData) { +#ifdef HAVE_INF_ENGINE + if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && pinsToKeep.count(lpNext) != 0) + { + CV_LOG_DEBUG(NULL, "DNN/IE: skip fusing with 'output' node: " << nextData->name << "@" << nextData->type); + break; + } +#endif Ptr nextLayer = nextData->layerInstance; if (currLayer->tryFuse(nextLayer)) { From 67978b5746567d56827c8db5e2e08ba5351916fb Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Sat, 5 Feb 2022 14:36:57 +0000 Subject: [PATCH 2/4] dnn(ngraph): add debuging messages --- modules/dnn/include/opencv2/dnn/shape_utils.hpp | 10 +++++++--- modules/dnn/src/dnn.cpp | 16 ++++++++++++++++ modules/dnn/src/ie_ngraph.cpp | 13 +++++++++---- 3 files changed, 32 insertions(+), 7 deletions(-) diff --git a/modules/dnn/include/opencv2/dnn/shape_utils.hpp b/modules/dnn/include/opencv2/dnn/shape_utils.hpp index b77333bd48..fafd1bf725 100644 --- a/modules/dnn/include/opencv2/dnn/shape_utils.hpp +++ b/modules/dnn/include/opencv2/dnn/shape_utils.hpp @@ -184,7 +184,8 @@ static inline MatShape concat(const MatShape& a, const MatShape& b) return c; } -static inline std::string toString(const MatShape& shape, const String& name = "") +template +static inline std::string toString(const std::vector<_Tp>& shape, const String& name = "") { std::ostringstream ss; if (!name.empty()) @@ -195,11 +196,14 @@ static inline std::string toString(const MatShape& shape, const String& name = " ss << " ]"; return ss.str(); } -static inline void print(const MatShape& shape, const String& name = "") + +template +static inline void print(const std::vector<_Tp>& shape, const String& name = "") { std::cout << toString(shape, name) << std::endl; } -static inline std::ostream& operator<<(std::ostream &out, const MatShape& shape) +template +static inline std::ostream& operator<<(std::ostream &out, const std::vector<_Tp>& shape) { out << toString(shape); return out; diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp index ff16b31590..672b328993 100644 --- a/modules/dnn/src/dnn.cpp +++ b/modules/dnn/src/dnn.cpp @@ -1941,6 +1941,8 @@ struct Net::Impl : public detail::NetImplBase { CV_TRACE_FUNCTION(); + CV_LOG_DEBUG(NULL, "DNN/IE: layer of new subnet: " << ld.name << "@" << ld.type); + Ptr layerNet; auto it = ld.backendNodes.find(preferableBackend); if (it != ld.backendNodes.end()) @@ -1964,6 +1966,7 @@ struct Net::Impl : public detail::NetImplBase CV_Assert(!ieInpNode.empty()); CV_Assert(!ieInpNode->net.empty()); if (layerNet != ieInpNode->net) { + CV_LOG_DEBUG(NULL, "DNN/IE: pin output between subnets: " << ieInpNode->node->get_friendly_name()); ieInpNode->net->addOutput(ieInpNode->node->get_friendly_name()); ieInpNode->net->setUnconnectedNodes(ieInpNode); } @@ -2064,13 +2067,19 @@ struct Net::Impl : public detail::NetImplBase { LayerData& ld = it->second; + CV_LOG_DEBUG(NULL, "DNN/IE: processing layer " << ld.name << "@" << ld.type << " (" << ld.id << ") ..."); + if (ld.id == 0 && ld.skip) + { + CV_LOG_DEBUG(NULL, "DNN/IE: SKIP!"); continue; + } bool fused = ld.skip; Ptr layer = ld.layerInstance; if (!fused && !layer->supportBackend(preferableBackend)) { + CV_LOG_DEBUG(NULL, "DNN/IE: NOT supported!"); bool customizable = ld.id != 0 && supportsCPUFallback; // TODO: there is a bug in Myriad plugin with custom layers shape infer. @@ -2097,6 +2106,7 @@ struct Net::Impl : public detail::NetImplBase if (!customizable) { + CV_LOG_DEBUG(NULL, "DNN/IE: NOT customizable!"); addNgraphOutputs(ld); net = Ptr(); layer->preferableTarget = DNN_TARGET_CPU; @@ -2221,7 +2231,9 @@ struct Net::Impl : public detail::NetImplBase if (layer->supportBackend(preferableBackend)) { + CV_LOG_DEBUG(NULL, "DNN/IE: wrap layer " << ld.name << "@" << ld.type << " - outputs: " << ld.outputBlobsWrappers.size()); node = layer->initNgraph(ld.inputBlobsWrappers, inputNodes); + // FIXIT doesn't work with multiple outputs (set name is applied to the same node) for (int i = 0; i < ld.outputBlobsWrappers.size(); ++i) { InferenceEngine::DataPtr dataPtr = ngraphDataNode(ld.outputBlobsWrappers[i]); @@ -2230,12 +2242,16 @@ struct Net::Impl : public detail::NetImplBase } else { + CV_LOG_DEBUG(NULL, "DNN/IE: layer is not supported: " << ld.name << "@" << ld.type); node = Ptr(new InfEngineNgraphNode(inputNodes, ld.layerInstance, ld.inputBlobs, ld.outputBlobs, ld.internals)); } } else if (node.empty()) + { + CV_LOG_DEBUG(NULL, "DNN/IE: node.empty() bypass..."); continue; + } ld.backendNodes[preferableBackend] = node; diff --git a/modules/dnn/src/ie_ngraph.cpp b/modules/dnn/src/ie_ngraph.cpp index 916f5e0a18..de735cc32c 100644 --- a/modules/dnn/src/ie_ngraph.cpp +++ b/modules/dnn/src/ie_ngraph.cpp @@ -449,6 +449,7 @@ void InfEngineNgraphNet::createNet(Target targetId) { ngraph::ResultVector outs; for (auto& node : unconnectedNodes) { + CV_LOG_DEBUG(NULL, "DNN/IE: +network_output[" << outs.size() << "]: name='" << node->get_friendly_name() << "'"); auto out = std::make_shared(node); outs.push_back(out); } @@ -456,6 +457,7 @@ void InfEngineNgraphNet::createNet(Target targetId) { ngraph_function = std::make_shared(outs, inputs_vec); int num_comp = getNumComponents(); + CV_LOG_DEBUG(NULL, "DNN/IE: number of subgraphs: " << num_comp); if (num_comp > 1) { for (int i = num_comp - 1; i >= 0; --i) { ngraph::ResultVector outputs; @@ -466,6 +468,7 @@ void InfEngineNgraphNet::createNet(Target targetId) { #else if (node->is_parameter()) { #endif + CV_LOG_DEBUG(NULL, "DNN/IE: subgraph[" << i << "]: +input[" << inps.size() << "] = '" << node->get_friendly_name() << "'"); auto parameter = std::dynamic_pointer_cast(node); inps.push_back(parameter); } @@ -474,10 +477,12 @@ void InfEngineNgraphNet::createNet(Target targetId) { #else else if (node->is_output()) { #endif + CV_LOG_DEBUG(NULL, "DNN/IE: subgraph[" << i << "]: +output[" << outputs.size() << "] = '" << node->get_friendly_name() << "'"); auto result = std::dynamic_pointer_cast(node); outputs.push_back(result); } } + CV_LOG_DEBUG(NULL, "DNN/IE: subgraph[" << i << ": nodes=" << components.back().size() << " inputs=" << inps.size() << " outputs=" << outputs.size()); isInit = false; CV_Assert_N(!inps.empty(), !outputs.empty()); ngraph_function = std::make_shared(outputs, inps); @@ -729,10 +734,10 @@ void InfEngineNgraphNet::initPlugin(InferenceEngine::CNNNetwork& net) } } } - if (isHetero) - netExec = ie.LoadNetwork(net, "HETERO:" + device_name + ",CPU", config); - else - netExec = ie.LoadNetwork(net, device_name, config); + + std::string ieDevice = isHetero ? ("HETERO:" + device_name + ",CPU") : device_name; + CV_LOG_INFO(NULL, "DNN/IE: Calling LoadNetwork(device=" << ieDevice << ")..."); + netExec = ie.LoadNetwork(net, ieDevice, config); } catch (const std::exception& ex) { From b57ff73086540a0039408ec562efa93619f7fef1 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Sat, 5 Feb 2022 02:51:44 +0000 Subject: [PATCH 3/4] dnn(ngraph): fix outputs handling, drop 'unconnected' logic --- modules/dnn/src/dnn.cpp | 16 +++++++--------- modules/dnn/src/ie_ngraph.cpp | 34 +++++++++++++++++----------------- modules/dnn/src/ie_ngraph.hpp | 6 ++---- 3 files changed, 26 insertions(+), 30 deletions(-) diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp index 672b328993..82f814131e 100644 --- a/modules/dnn/src/dnn.cpp +++ b/modules/dnn/src/dnn.cpp @@ -1937,6 +1937,9 @@ struct Net::Impl : public detail::NetImplBase #ifdef HAVE_DNN_NGRAPH + /** mark input pins as outputs from other subnetworks + * FIXIT must be done by DNN engine not ngraph. + */ void addNgraphOutputs(LayerData &ld) { CV_TRACE_FUNCTION(); @@ -1967,8 +1970,7 @@ struct Net::Impl : public detail::NetImplBase if (layerNet != ieInpNode->net) { CV_LOG_DEBUG(NULL, "DNN/IE: pin output between subnets: " << ieInpNode->node->get_friendly_name()); - ieInpNode->net->addOutput(ieInpNode->node->get_friendly_name()); - ieInpNode->net->setUnconnectedNodes(ieInpNode); + ieInpNode->net->addOutput(ieInpNode); } } } @@ -2118,7 +2120,7 @@ struct Net::Impl : public detail::NetImplBase if (!inpNode.empty()) { Ptr ieNode = inpNode.dynamicCast(); CV_Assert(!ieNode.empty()); - ieNode->net->setUnconnectedNodes(ieNode); + ieNode->net->addOutput(ieNode); } } continue; @@ -2259,15 +2261,11 @@ struct Net::Impl : public detail::NetImplBase CV_Assert(!ieNode.empty()); ieNode->net = net; - if (ld.consumers.empty()) { - // TF EAST_text_detection - ieNode->net->setUnconnectedNodes(ieNode); - } for (const auto& pin : blobsToKeep_) { if (pin.lid == ld.id) { - ieNode->net->addOutput(ieNode->node->get_friendly_name()); + ieNode->net->addOutput(ieNode); break; } } @@ -2298,7 +2296,7 @@ struct Net::Impl : public detail::NetImplBase if (!ieNode->net->isInitialized()) { - ieNode->net->setUnconnectedNodes(ieNode); + ieNode->net->addOutput(ieNode); ieNode->net->createNet((Target)preferableTarget); ld.skip = false; } diff --git a/modules/dnn/src/ie_ngraph.cpp b/modules/dnn/src/ie_ngraph.cpp index de735cc32c..235fa7dcbb 100644 --- a/modules/dnn/src/ie_ngraph.cpp +++ b/modules/dnn/src/ie_ngraph.cpp @@ -379,16 +379,21 @@ InfEngineNgraphNet::InfEngineNgraphNet(detail::NetImplBase& netImpl, InferenceEn device_name = "CPU"; } -void InfEngineNgraphNet::addOutput(const std::string& name) +void InfEngineNgraphNet::addOutput(const Ptr& node) { - requestedOutputs.push_back(name); + CV_Assert(node); + CV_Assert(node->node); + const std::string& name = node->node->get_friendly_name(); + requestedOutputs.insert({name, node}); } void InfEngineNgraphNet::setNodePtr(std::shared_ptr* ptr) { all_nodes.emplace((*ptr)->get_friendly_name(), ptr); } - void InfEngineNgraphNet::release() { + void InfEngineNgraphNet::release() + { + // FIXIT release should not be conditional, release ALL for (auto& node : components.back()) { #if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2020_4) if (!(ngraph::op::is_parameter(node) || ngraph::op::is_output(node) || ngraph::op::is_constant(node)) ) { @@ -397,7 +402,6 @@ void InfEngineNgraphNet::setNodePtr(std::shared_ptr* ptr) { #endif auto it = all_nodes.find(node->get_friendly_name()); if (it != all_nodes.end()) { - unconnectedNodes.erase(*(it->second)); it->second->reset(); all_nodes.erase(it); } @@ -422,7 +426,8 @@ void InfEngineNgraphNet::dfs(std::shared_ptr& node, } } -int InfEngineNgraphNet::getNumComponents() { +int InfEngineNgraphNet::getNumComponents() +{ if (!components.empty()) { return components.size(); } @@ -445,12 +450,14 @@ int InfEngineNgraphNet::getNumComponents() { void InfEngineNgraphNet::createNet(Target targetId) { if (!hasNetOwner) { - CV_Assert(!unconnectedNodes.empty()); + CV_Assert(!requestedOutputs.empty()); ngraph::ResultVector outs; - for (auto& node : unconnectedNodes) + + for (auto output_node_it = requestedOutputs.begin(); output_node_it != requestedOutputs.end(); ++output_node_it) { - CV_LOG_DEBUG(NULL, "DNN/IE: +network_output[" << outs.size() << "]: name='" << node->get_friendly_name() << "'"); - auto out = std::make_shared(node); + CV_LOG_DEBUG(NULL, "DNN/NGRAPH: Add 'Result' output: " << output_node_it->first); + CV_Assert(output_node_it->second); + auto out = std::make_shared(output_node_it->second->node); outs.push_back(out); } CV_Assert_N(!inputs_vec.empty(), !outs.empty()); @@ -576,7 +583,7 @@ void InfEngineNgraphNet::init(Target targetId) auto node = ngraph_function->output(i).get_node(); for (size_t j = 0; j < node->get_input_size(); ++j) { std::string name = node->input_value(j).get_node()->get_friendly_name(); - auto iter = std::find(requestedOutputs.begin(), requestedOutputs.end(), name); + auto iter = requestedOutputs.find(name); if (iter != requestedOutputs.end()) { requestedOutputs.erase(iter); cnn.addOutput(name); @@ -584,10 +591,6 @@ void InfEngineNgraphNet::init(Target targetId) } } } - for (const auto& name : requestedOutputs) - { - cnn.addOutput(name); - } for (const auto& it : cnn.getInputsInfo()) { @@ -632,9 +635,6 @@ ngraph::ParameterVector InfEngineNgraphNet::setInputs(const std::vector return current_inp; } -void InfEngineNgraphNet::setUnconnectedNodes(Ptr& node) { - unconnectedNodes.insert(node->node); -} void InfEngineNgraphNet::initPlugin(InferenceEngine::CNNNetwork& net) { diff --git a/modules/dnn/src/ie_ngraph.hpp b/modules/dnn/src/ie_ngraph.hpp index bb593833ff..0d287a22a5 100644 --- a/modules/dnn/src/ie_ngraph.hpp +++ b/modules/dnn/src/ie_ngraph.hpp @@ -37,7 +37,7 @@ public: InfEngineNgraphNet(detail::NetImplBase& netImpl); InfEngineNgraphNet(detail::NetImplBase& netImpl, InferenceEngine::CNNNetwork& net); - void addOutput(const std::string& name); + void addOutput(const Ptr& node); bool isInitialized(); void init(Target targetId); @@ -47,7 +47,6 @@ public: void initPlugin(InferenceEngine::CNNNetwork& net); ngraph::ParameterVector setInputs(const std::vector& inputs, const std::vector& names); - void setUnconnectedNodes(Ptr& node); void addBlobs(const std::vector >& ptrs); void createNet(Target targetId); @@ -88,8 +87,7 @@ public: InferenceEngine::CNNNetwork cnn; bool hasNetOwner; - std::vector requestedOutputs; - std::unordered_set> unconnectedNodes; + std::unordered_map > requestedOutputs; std::map outputsDesc; }; From 1da48beeec8af53c1540e4571f5c06b3f771ff19 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Sat, 5 Feb 2022 16:16:45 +0000 Subject: [PATCH 4/4] dnn(ngraph): fix output names --- modules/dnn/src/dnn.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp index 82f814131e..2e2112064e 100644 --- a/modules/dnn/src/dnn.cpp +++ b/modules/dnn/src/dnn.cpp @@ -2235,12 +2235,15 @@ struct Net::Impl : public detail::NetImplBase { CV_LOG_DEBUG(NULL, "DNN/IE: wrap layer " << ld.name << "@" << ld.type << " - outputs: " << ld.outputBlobsWrappers.size()); node = layer->initNgraph(ld.inputBlobsWrappers, inputNodes); - // FIXIT doesn't work with multiple outputs (set name is applied to the same node) +#if 0 // FIXIT doesn't work with multiple outputs (set name is applied to the same node) for (int i = 0; i < ld.outputBlobsWrappers.size(); ++i) { InferenceEngine::DataPtr dataPtr = ngraphDataNode(ld.outputBlobsWrappers[i]); node.dynamicCast()->setName(dataPtr->getName()); } +#else + node.dynamicCast()->setName(layer->name); +#endif } else {