dnn(ngraph): add debuging messages

This commit is contained in:
Alexander Alekhin 2022-02-05 14:36:57 +00:00
parent 062f305d1a
commit 67978b5746
3 changed files with 32 additions and 7 deletions

View File

@ -184,7 +184,8 @@ static inline MatShape concat(const MatShape& a, const MatShape& b)
return c; return c;
} }
static inline std::string toString(const MatShape& shape, const String& name = "") template<typename _Tp>
static inline std::string toString(const std::vector<_Tp>& shape, const String& name = "")
{ {
std::ostringstream ss; std::ostringstream ss;
if (!name.empty()) if (!name.empty())
@ -195,11 +196,14 @@ static inline std::string toString(const MatShape& shape, const String& name = "
ss << " ]"; ss << " ]";
return ss.str(); return ss.str();
} }
static inline void print(const MatShape& shape, const String& name = "")
template<typename _Tp>
static inline void print(const std::vector<_Tp>& shape, const String& name = "")
{ {
std::cout << toString(shape, name) << std::endl; std::cout << toString(shape, name) << std::endl;
} }
static inline std::ostream& operator<<(std::ostream &out, const MatShape& shape) template<typename _Tp>
static inline std::ostream& operator<<(std::ostream &out, const std::vector<_Tp>& shape)
{ {
out << toString(shape); out << toString(shape);
return out; return out;

View File

@ -1941,6 +1941,8 @@ struct Net::Impl : public detail::NetImplBase
{ {
CV_TRACE_FUNCTION(); CV_TRACE_FUNCTION();
CV_LOG_DEBUG(NULL, "DNN/IE: layer of new subnet: " << ld.name << "@" << ld.type);
Ptr<InfEngineNgraphNet> layerNet; Ptr<InfEngineNgraphNet> layerNet;
auto it = ld.backendNodes.find(preferableBackend); auto it = ld.backendNodes.find(preferableBackend);
if (it != ld.backendNodes.end()) if (it != ld.backendNodes.end())
@ -1964,6 +1966,7 @@ struct Net::Impl : public detail::NetImplBase
CV_Assert(!ieInpNode.empty()); CV_Assert(!ieInpNode->net.empty()); CV_Assert(!ieInpNode.empty()); CV_Assert(!ieInpNode->net.empty());
if (layerNet != ieInpNode->net) if (layerNet != ieInpNode->net)
{ {
CV_LOG_DEBUG(NULL, "DNN/IE: pin output between subnets: " << ieInpNode->node->get_friendly_name());
ieInpNode->net->addOutput(ieInpNode->node->get_friendly_name()); ieInpNode->net->addOutput(ieInpNode->node->get_friendly_name());
ieInpNode->net->setUnconnectedNodes(ieInpNode); ieInpNode->net->setUnconnectedNodes(ieInpNode);
} }
@ -2064,13 +2067,19 @@ struct Net::Impl : public detail::NetImplBase
{ {
LayerData& ld = it->second; LayerData& ld = it->second;
CV_LOG_DEBUG(NULL, "DNN/IE: processing layer " << ld.name << "@" << ld.type << " (" << ld.id << ") ...");
if (ld.id == 0 && ld.skip) if (ld.id == 0 && ld.skip)
{
CV_LOG_DEBUG(NULL, "DNN/IE: SKIP!");
continue; continue;
}
bool fused = ld.skip; bool fused = ld.skip;
Ptr<Layer> layer = ld.layerInstance; Ptr<Layer> layer = ld.layerInstance;
if (!fused && !layer->supportBackend(preferableBackend)) if (!fused && !layer->supportBackend(preferableBackend))
{ {
CV_LOG_DEBUG(NULL, "DNN/IE: NOT supported!");
bool customizable = ld.id != 0 && supportsCPUFallback; bool customizable = ld.id != 0 && supportsCPUFallback;
// TODO: there is a bug in Myriad plugin with custom layers shape infer. // TODO: there is a bug in Myriad plugin with custom layers shape infer.
@ -2097,6 +2106,7 @@ struct Net::Impl : public detail::NetImplBase
if (!customizable) if (!customizable)
{ {
CV_LOG_DEBUG(NULL, "DNN/IE: NOT customizable!");
addNgraphOutputs(ld); addNgraphOutputs(ld);
net = Ptr<InfEngineNgraphNet>(); net = Ptr<InfEngineNgraphNet>();
layer->preferableTarget = DNN_TARGET_CPU; layer->preferableTarget = DNN_TARGET_CPU;
@ -2221,7 +2231,9 @@ struct Net::Impl : public detail::NetImplBase
if (layer->supportBackend(preferableBackend)) if (layer->supportBackend(preferableBackend))
{ {
CV_LOG_DEBUG(NULL, "DNN/IE: wrap layer " << ld.name << "@" << ld.type << " - outputs: " << ld.outputBlobsWrappers.size());
node = layer->initNgraph(ld.inputBlobsWrappers, inputNodes); node = layer->initNgraph(ld.inputBlobsWrappers, inputNodes);
// FIXIT doesn't work with multiple outputs (set name is applied to the same node)
for (int i = 0; i < ld.outputBlobsWrappers.size(); ++i) for (int i = 0; i < ld.outputBlobsWrappers.size(); ++i)
{ {
InferenceEngine::DataPtr dataPtr = ngraphDataNode(ld.outputBlobsWrappers[i]); InferenceEngine::DataPtr dataPtr = ngraphDataNode(ld.outputBlobsWrappers[i]);
@ -2230,12 +2242,16 @@ struct Net::Impl : public detail::NetImplBase
} }
else else
{ {
CV_LOG_DEBUG(NULL, "DNN/IE: layer is not supported: " << ld.name << "@" << ld.type);
node = Ptr<BackendNode>(new InfEngineNgraphNode(inputNodes, node = Ptr<BackendNode>(new InfEngineNgraphNode(inputNodes,
ld.layerInstance, ld.inputBlobs, ld.outputBlobs, ld.internals)); ld.layerInstance, ld.inputBlobs, ld.outputBlobs, ld.internals));
} }
} }
else if (node.empty()) else if (node.empty())
{
CV_LOG_DEBUG(NULL, "DNN/IE: node.empty() bypass...");
continue; continue;
}
ld.backendNodes[preferableBackend] = node; ld.backendNodes[preferableBackend] = node;

View File

@ -449,6 +449,7 @@ void InfEngineNgraphNet::createNet(Target targetId) {
ngraph::ResultVector outs; ngraph::ResultVector outs;
for (auto& node : unconnectedNodes) for (auto& node : unconnectedNodes)
{ {
CV_LOG_DEBUG(NULL, "DNN/IE: +network_output[" << outs.size() << "]: name='" << node->get_friendly_name() << "'");
auto out = std::make_shared<ngraph::op::Result>(node); auto out = std::make_shared<ngraph::op::Result>(node);
outs.push_back(out); outs.push_back(out);
} }
@ -456,6 +457,7 @@ void InfEngineNgraphNet::createNet(Target targetId) {
ngraph_function = std::make_shared<ngraph::Function>(outs, inputs_vec); ngraph_function = std::make_shared<ngraph::Function>(outs, inputs_vec);
int num_comp = getNumComponents(); int num_comp = getNumComponents();
CV_LOG_DEBUG(NULL, "DNN/IE: number of subgraphs: " << num_comp);
if (num_comp > 1) { if (num_comp > 1) {
for (int i = num_comp - 1; i >= 0; --i) { for (int i = num_comp - 1; i >= 0; --i) {
ngraph::ResultVector outputs; ngraph::ResultVector outputs;
@ -466,6 +468,7 @@ void InfEngineNgraphNet::createNet(Target targetId) {
#else #else
if (node->is_parameter()) { if (node->is_parameter()) {
#endif #endif
CV_LOG_DEBUG(NULL, "DNN/IE: subgraph[" << i << "]: +input[" << inps.size() << "] = '" << node->get_friendly_name() << "'");
auto parameter = std::dynamic_pointer_cast<ngraph::op::Parameter>(node); auto parameter = std::dynamic_pointer_cast<ngraph::op::Parameter>(node);
inps.push_back(parameter); inps.push_back(parameter);
} }
@ -474,10 +477,12 @@ void InfEngineNgraphNet::createNet(Target targetId) {
#else #else
else if (node->is_output()) { else if (node->is_output()) {
#endif #endif
CV_LOG_DEBUG(NULL, "DNN/IE: subgraph[" << i << "]: +output[" << outputs.size() << "] = '" << node->get_friendly_name() << "'");
auto result = std::dynamic_pointer_cast<ngraph::op::Result>(node); auto result = std::dynamic_pointer_cast<ngraph::op::Result>(node);
outputs.push_back(result); outputs.push_back(result);
} }
} }
CV_LOG_DEBUG(NULL, "DNN/IE: subgraph[" << i << ": nodes=" << components.back().size() << " inputs=" << inps.size() << " outputs=" << outputs.size());
isInit = false; isInit = false;
CV_Assert_N(!inps.empty(), !outputs.empty()); CV_Assert_N(!inps.empty(), !outputs.empty());
ngraph_function = std::make_shared<ngraph::Function>(outputs, inps); ngraph_function = std::make_shared<ngraph::Function>(outputs, inps);
@ -729,10 +734,10 @@ void InfEngineNgraphNet::initPlugin(InferenceEngine::CNNNetwork& net)
} }
} }
} }
if (isHetero)
netExec = ie.LoadNetwork(net, "HETERO:" + device_name + ",CPU", config); std::string ieDevice = isHetero ? ("HETERO:" + device_name + ",CPU") : device_name;
else CV_LOG_INFO(NULL, "DNN/IE: Calling LoadNetwork(device=" << ieDevice << ")...");
netExec = ie.LoadNetwork(net, device_name, config); netExec = ie.LoadNetwork(net, ieDevice, config);
} }
catch (const std::exception& ex) catch (const std::exception& ex)
{ {