mirror of
https://github.com/opencv/opencv.git
synced 2024-11-24 03:00:14 +08:00
Merge pull request #20494 from rogday:onnx_diagnostic_fix
fix ONNXImporter diagnostic mode layer registration issue * fix layer registration, thread unsafe access and align the behavior of DNN_DIAGNOSTICS_RUN between onnx and tf importers * move skipModelInput * print all missing layers * address TF issue
This commit is contained in:
parent
bb5f33d13c
commit
6801dd043d
@ -4,6 +4,7 @@ USAGE:
|
||||
**************************************************/
|
||||
#include <opencv2/dnn.hpp>
|
||||
#include <opencv2/core/utils/filesystem.hpp>
|
||||
#include <opencv2/dnn/utils/debug_utils.hpp>
|
||||
|
||||
#include <iostream>
|
||||
|
||||
@ -57,6 +58,7 @@ int main( int argc, const char** argv )
|
||||
CV_Assert(!model.empty());
|
||||
|
||||
enableModelDiagnostics(true);
|
||||
skipModelImport(true);
|
||||
redirectError(diagnosticsErrorCallback, NULL);
|
||||
|
||||
Net ocvNet = readNet(model, config, frameworkId);
|
||||
|
@ -12,10 +12,16 @@ CV__DNN_INLINE_NS_BEGIN
|
||||
//! @addtogroup dnn
|
||||
//! @{
|
||||
|
||||
//! Register layer types of DNN model.
|
||||
typedef std::map<std::string, std::vector<LayerFactory::Constructor> > LayerFactory_Impl;
|
||||
|
||||
//! Register layer types of DNN model.
|
||||
//!
|
||||
//! @note In order to thread-safely access the factory, see getLayerFactoryMutex() function.
|
||||
LayerFactory_Impl& getLayerFactoryImpl();
|
||||
|
||||
//! Get the mutex guarding @ref LayerFactory_Impl, see getLayerFactoryImpl() function.
|
||||
Mutex& getLayerFactoryMutex();
|
||||
|
||||
//! @}
|
||||
CV__DNN_INLINE_NS_END
|
||||
}
|
||||
|
24
modules/dnn/include/opencv2/dnn/utils/debug_utils.hpp
Normal file
24
modules/dnn/include/opencv2/dnn/utils/debug_utils.hpp
Normal file
@ -0,0 +1,24 @@
|
||||
// This file is part of OpenCV project.
|
||||
// It is subject to the license terms in the LICENSE file found in the top-level directory
|
||||
// of this distribution and at http://opencv.org/license.html.
|
||||
|
||||
#ifndef OPENCV_DNN_UTILS_DEBUG_UTILS_HPP
|
||||
#define OPENCV_DNN_UTILS_DEBUG_UTILS_HPP
|
||||
|
||||
#include "../dnn.hpp"
|
||||
|
||||
namespace cv { namespace dnn {
|
||||
CV__DNN_INLINE_NS_BEGIN
|
||||
|
||||
/**
|
||||
* @brief Skip model import after diagnostic run in readNet() functions.
|
||||
* @param[in] skip Indicates whether to skip the import.
|
||||
*
|
||||
* This is an internal OpenCV function not intended for users.
|
||||
*/
|
||||
CV_EXPORTS void skipModelImport(bool skip);
|
||||
|
||||
CV__DNN_INLINE_NS_END
|
||||
}} // namespace
|
||||
|
||||
#endif // OPENCV_DNN_UTILS_DEBUG_UTILS_HPP
|
91
modules/dnn/src/debug_utils.cpp
Normal file
91
modules/dnn/src/debug_utils.cpp
Normal file
@ -0,0 +1,91 @@
|
||||
// This file is part of OpenCV project.
|
||||
// It is subject to the license terms in the LICENSE file found in the top-level directory
|
||||
// of this distribution and at http://opencv.org/license.html.
|
||||
|
||||
#include "precomp.hpp"
|
||||
|
||||
#include <sstream>
|
||||
|
||||
#include <opencv2/dnn/layer_reg.private.hpp>
|
||||
#include <opencv2/dnn/utils/debug_utils.hpp>
|
||||
#include <opencv2/core/utils/logger.hpp>
|
||||
|
||||
namespace cv { namespace dnn {
|
||||
CV__DNN_INLINE_NS_BEGIN
|
||||
|
||||
bool DNN_DIAGNOSTICS_RUN = false;
|
||||
bool DNN_SKIP_REAL_IMPORT = false;
|
||||
|
||||
void enableModelDiagnostics(bool isDiagnosticsMode)
|
||||
{
|
||||
DNN_DIAGNOSTICS_RUN = isDiagnosticsMode;
|
||||
|
||||
if (DNN_DIAGNOSTICS_RUN)
|
||||
{
|
||||
detail::NotImplemented::Register();
|
||||
}
|
||||
else
|
||||
{
|
||||
detail::NotImplemented::unRegister();
|
||||
}
|
||||
}
|
||||
|
||||
void skipModelImport(bool skip)
|
||||
{
|
||||
DNN_SKIP_REAL_IMPORT = skip;
|
||||
}
|
||||
|
||||
void detail::LayerHandler::addMissing(const std::string& name, const std::string& type)
|
||||
{
|
||||
cv::AutoLock lock(getLayerFactoryMutex());
|
||||
auto& registeredLayers = getLayerFactoryImpl();
|
||||
|
||||
// If we didn't add it, but can create it, it's custom and not missing.
|
||||
if (layers.find(type) == layers.end() && registeredLayers.find(type) != registeredLayers.end())
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
layers[type].insert(name);
|
||||
}
|
||||
|
||||
bool detail::LayerHandler::contains(const std::string& type) const
|
||||
{
|
||||
return layers.find(type) != layers.end();
|
||||
}
|
||||
|
||||
void detail::LayerHandler::printMissing()
|
||||
{
|
||||
if (layers.empty())
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
std::stringstream ss;
|
||||
ss << "DNN: Not supported types:\n";
|
||||
for (const auto& type_names : layers)
|
||||
{
|
||||
const auto& type = type_names.first;
|
||||
ss << "Type='" << type << "', affected nodes:\n[";
|
||||
for (const auto& name : type_names.second)
|
||||
{
|
||||
ss << "'" << name << "', ";
|
||||
}
|
||||
ss.seekp(-2, std::ios_base::end);
|
||||
ss << "]\n";
|
||||
}
|
||||
CV_LOG_ERROR(NULL, ss.str());
|
||||
}
|
||||
|
||||
LayerParams detail::LayerHandler::getNotImplementedParams(const std::string& name, const std::string& op)
|
||||
{
|
||||
LayerParams lp;
|
||||
lp.name = name;
|
||||
lp.type = "NotImplemented";
|
||||
lp.set("type", op);
|
||||
|
||||
return lp;
|
||||
}
|
||||
|
||||
CV__DNN_INLINE_NS_END
|
||||
}} // namespace
|
@ -94,22 +94,6 @@ static bool DNN_CHECK_NAN_INF = utils::getConfigurationParameterBool("OPENCV_DNN
|
||||
static bool DNN_CHECK_NAN_INF_DUMP = utils::getConfigurationParameterBool("OPENCV_DNN_CHECK_NAN_INF_DUMP", false);
|
||||
static bool DNN_CHECK_NAN_INF_RAISE_ERROR = utils::getConfigurationParameterBool("OPENCV_DNN_CHECK_NAN_INF_RAISE_ERROR", false);
|
||||
|
||||
bool DNN_DIAGNOSTICS_RUN = false;
|
||||
|
||||
void enableModelDiagnostics(bool isDiagnosticsMode)
|
||||
{
|
||||
DNN_DIAGNOSTICS_RUN = isDiagnosticsMode;
|
||||
|
||||
if (DNN_DIAGNOSTICS_RUN)
|
||||
{
|
||||
detail::NotImplemented::Register();
|
||||
}
|
||||
else
|
||||
{
|
||||
detail::NotImplemented::unRegister();
|
||||
}
|
||||
}
|
||||
|
||||
using std::vector;
|
||||
using std::map;
|
||||
using std::make_pair;
|
||||
@ -5662,7 +5646,7 @@ bool Layer::updateMemoryShapes(const std::vector<MatShape> &inputs)
|
||||
}
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static Mutex& getLayerFactoryMutex()
|
||||
Mutex& getLayerFactoryMutex()
|
||||
{
|
||||
static Mutex* volatile instance = NULL;
|
||||
if (instance == NULL)
|
||||
|
@ -5,6 +5,9 @@
|
||||
#ifndef __OPENCV_DNN_COMMON_HPP__
|
||||
#define __OPENCV_DNN_COMMON_HPP__
|
||||
|
||||
#include <unordered_set>
|
||||
#include <unordered_map>
|
||||
|
||||
#include <opencv2/dnn.hpp>
|
||||
|
||||
namespace cv { namespace dnn {
|
||||
@ -13,6 +16,9 @@ CV__DNN_INLINE_NS_BEGIN
|
||||
Mutex& getInitializationMutex();
|
||||
void initializeLayerFactory();
|
||||
|
||||
extern bool DNN_DIAGNOSTICS_RUN;
|
||||
extern bool DNN_SKIP_REAL_IMPORT;
|
||||
|
||||
namespace detail {
|
||||
#define CALL_MEMBER_FN(object, ptrToMemFn) ((object).*(ptrToMemFn))
|
||||
|
||||
@ -25,6 +31,43 @@ public:
|
||||
static void unRegister();
|
||||
};
|
||||
|
||||
template <typename Importer, typename ... Args>
|
||||
Net readNet(Args&& ... args)
|
||||
{
|
||||
Net net;
|
||||
Importer importer(net, std::forward<Args>(args)...);
|
||||
return net;
|
||||
}
|
||||
|
||||
template <typename Importer, typename ... Args>
|
||||
Net readNetDiagnostic(Args&& ... args)
|
||||
{
|
||||
Net maybeDebugNet = readNet<Importer>(std::forward<Args>(args)...);
|
||||
if (DNN_DIAGNOSTICS_RUN && !DNN_SKIP_REAL_IMPORT)
|
||||
{
|
||||
// if we just imported the net in diagnostic mode, disable it and import again
|
||||
enableModelDiagnostics(false);
|
||||
Net releaseNet = readNet<Importer>(std::forward<Args>(args)...);
|
||||
enableModelDiagnostics(true);
|
||||
return releaseNet;
|
||||
}
|
||||
return maybeDebugNet;
|
||||
}
|
||||
|
||||
class LayerHandler
|
||||
{
|
||||
public:
|
||||
void addMissing(const std::string& name, const std::string& type);
|
||||
bool contains(const std::string& type) const;
|
||||
void printMissing();
|
||||
|
||||
protected:
|
||||
LayerParams getNotImplementedParams(const std::string& name, const std::string& op);
|
||||
|
||||
private:
|
||||
std::unordered_map<std::string, std::unordered_set<std::string>> layers;
|
||||
};
|
||||
|
||||
struct NetImplBase
|
||||
{
|
||||
const int networkId; // network global identifier
|
||||
|
@ -41,6 +41,8 @@ CV__DNN_INLINE_NS_BEGIN
|
||||
|
||||
extern bool DNN_DIAGNOSTICS_RUN;
|
||||
|
||||
class ONNXLayerHandler;
|
||||
|
||||
class ONNXImporter
|
||||
{
|
||||
opencv_onnx::ModelProto model_proto;
|
||||
@ -61,60 +63,16 @@ class ONNXImporter
|
||||
void addConstant(const std::string& name, const Mat& blob);
|
||||
void addLayer(LayerParams& layerParams,
|
||||
const opencv_onnx::NodeProto& node_proto);
|
||||
static const std::set<String>& getSupportedTypes();
|
||||
|
||||
public:
|
||||
|
||||
ONNXImporter(Net& net, const char *onnxFile)
|
||||
: dstNet(net), utilNet(), dispatch(buildDispatchMap())
|
||||
{
|
||||
hasDynamicShapes = false;
|
||||
CV_Assert(onnxFile);
|
||||
CV_LOG_DEBUG(NULL, "DNN/ONNX: processing ONNX model from file: " << onnxFile);
|
||||
|
||||
std::fstream input(onnxFile, std::ios::in | std::ios::binary);
|
||||
if (!input)
|
||||
{
|
||||
CV_Error(Error::StsBadArg, cv::format("Can't read ONNX file: %s", onnxFile));
|
||||
}
|
||||
|
||||
if (!model_proto.ParseFromIstream(&input))
|
||||
{
|
||||
CV_Error(Error::StsUnsupportedFormat, cv::format("Failed to parse ONNX model: %s", onnxFile));
|
||||
}
|
||||
|
||||
populateNet();
|
||||
}
|
||||
|
||||
ONNXImporter(Net& net, const char* buffer, size_t sizeBuffer)
|
||||
: dstNet(net), utilNet(), dispatch(buildDispatchMap())
|
||||
{
|
||||
hasDynamicShapes = false;
|
||||
CV_LOG_DEBUG(NULL, "DNN/ONNX: processing in-memory ONNX model (" << sizeBuffer << " bytes)");
|
||||
|
||||
struct _Buf : public std::streambuf
|
||||
{
|
||||
_Buf(const char* buffer, size_t sizeBuffer)
|
||||
{
|
||||
char* p = const_cast<char*>(buffer);
|
||||
setg(p, p, p + sizeBuffer);
|
||||
}
|
||||
};
|
||||
|
||||
_Buf buf(buffer, sizeBuffer);
|
||||
std::istream input(&buf);
|
||||
|
||||
if (!model_proto.ParseFromIstream(&input))
|
||||
CV_Error(Error::StsUnsupportedFormat, "Failed to parse onnx model from in-memory byte array.");
|
||||
|
||||
populateNet();
|
||||
}
|
||||
ONNXImporter(Net& net, const char *onnxFile);
|
||||
ONNXImporter(Net& net, const char* buffer, size_t sizeBuffer);
|
||||
|
||||
void populateNet();
|
||||
|
||||
protected:
|
||||
std::unique_ptr<ONNXLayerHandler> layerHandler;
|
||||
Net& dstNet;
|
||||
Net utilNet;
|
||||
|
||||
opencv_onnx::GraphProto graph_proto;
|
||||
std::string framework_name;
|
||||
@ -131,9 +89,13 @@ protected:
|
||||
void handleNode(const opencv_onnx::NodeProto& node_proto);
|
||||
|
||||
private:
|
||||
friend class ONNXLayerHandler;
|
||||
typedef void (ONNXImporter::*ONNXImporterNodeParser)(LayerParams& layerParams, const opencv_onnx::NodeProto& node_proto);
|
||||
typedef std::map<std::string, ONNXImporterNodeParser> DispatchMap;
|
||||
|
||||
const DispatchMap dispatch;
|
||||
static const DispatchMap buildDispatchMap();
|
||||
|
||||
void parseMaxPool (LayerParams& layerParams, const opencv_onnx::NodeProto& node_proto);
|
||||
void parseAveragePool (LayerParams& layerParams, const opencv_onnx::NodeProto& node_proto);
|
||||
void parseReduce (LayerParams& layerParams, const opencv_onnx::NodeProto& node_proto);
|
||||
@ -178,12 +140,84 @@ private:
|
||||
void parseSoftMax (LayerParams& layerParams, const opencv_onnx::NodeProto& node_proto);
|
||||
void parseDetectionOutput (LayerParams& layerParams, const opencv_onnx::NodeProto& node_proto);
|
||||
void parseCumSum (LayerParams& layerParams, const opencv_onnx::NodeProto& node_proto);
|
||||
void parseCustom (LayerParams& layerParams, const opencv_onnx::NodeProto& node_proto);
|
||||
|
||||
const DispatchMap dispatch;
|
||||
static const DispatchMap buildDispatchMap();
|
||||
void parseCustomLayer (LayerParams& layerParams, const opencv_onnx::NodeProto& node_proto);
|
||||
};
|
||||
|
||||
class ONNXLayerHandler : public detail::LayerHandler
|
||||
{
|
||||
public:
|
||||
explicit ONNXLayerHandler(ONNXImporter* importer_);
|
||||
|
||||
void fillRegistry(const opencv_onnx::GraphProto& net);
|
||||
|
||||
protected:
|
||||
ONNXImporter* importer;
|
||||
};
|
||||
|
||||
ONNXLayerHandler::ONNXLayerHandler(ONNXImporter* importer_) : importer(importer_){}
|
||||
|
||||
void ONNXLayerHandler::fillRegistry(const opencv_onnx::GraphProto &net)
|
||||
{
|
||||
int layersSize = net.node_size();
|
||||
for (int li = 0; li < layersSize; li++) {
|
||||
const opencv_onnx::NodeProto &node_proto = net.node(li);
|
||||
const std::string& name = node_proto.output(0);
|
||||
const std::string& type = node_proto.op_type();
|
||||
if (importer->dispatch.find(type) == importer->dispatch.end())
|
||||
{
|
||||
addMissing(name, type);
|
||||
}
|
||||
}
|
||||
printMissing();
|
||||
}
|
||||
|
||||
ONNXImporter::ONNXImporter(Net& net, const char *onnxFile)
|
||||
: layerHandler(DNN_DIAGNOSTICS_RUN ? new ONNXLayerHandler(this) : nullptr),
|
||||
dstNet(net), dispatch(buildDispatchMap())
|
||||
{
|
||||
hasDynamicShapes = false;
|
||||
CV_Assert(onnxFile);
|
||||
CV_LOG_DEBUG(NULL, "DNN/ONNX: processing ONNX model from file: " << onnxFile);
|
||||
|
||||
std::fstream input(onnxFile, std::ios::in | std::ios::binary);
|
||||
if (!input)
|
||||
{
|
||||
CV_Error(Error::StsBadArg, cv::format("Can't read ONNX file: %s", onnxFile));
|
||||
}
|
||||
|
||||
if (!model_proto.ParseFromIstream(&input))
|
||||
{
|
||||
CV_Error(Error::StsUnsupportedFormat, cv::format("Failed to parse ONNX model: %s", onnxFile));
|
||||
}
|
||||
|
||||
populateNet();
|
||||
}
|
||||
|
||||
ONNXImporter::ONNXImporter(Net& net, const char* buffer, size_t sizeBuffer)
|
||||
: layerHandler(DNN_DIAGNOSTICS_RUN ? new ONNXLayerHandler(this) : nullptr), dstNet(net), dispatch(buildDispatchMap())
|
||||
{
|
||||
hasDynamicShapes = false;
|
||||
CV_LOG_DEBUG(NULL, "DNN/ONNX: processing in-memory ONNX model (" << sizeBuffer << " bytes)");
|
||||
|
||||
struct _Buf : public std::streambuf
|
||||
{
|
||||
_Buf(const char* buffer, size_t sizeBuffer)
|
||||
{
|
||||
char* p = const_cast<char*>(buffer);
|
||||
setg(p, p, p + sizeBuffer);
|
||||
}
|
||||
};
|
||||
|
||||
_Buf buf(buffer, sizeBuffer);
|
||||
std::istream input(&buf);
|
||||
|
||||
if (!model_proto.ParseFromIstream(&input))
|
||||
CV_Error(Error::StsUnsupportedFormat, "Failed to parse onnx model from in-memory byte array.");
|
||||
|
||||
populateNet();
|
||||
}
|
||||
|
||||
inline void replaceLayerParam(LayerParams& layerParams, const String& oldKey, const String& newKey)
|
||||
{
|
||||
if (layerParams.has(oldKey)) {
|
||||
@ -422,11 +456,7 @@ Mat ONNXImporter::getBlob(const std::string& input_name)
|
||||
void ONNXImporter::addLayer(LayerParams& layerParams,
|
||||
const opencv_onnx::NodeProto& node_proto)
|
||||
{
|
||||
int id;
|
||||
if (DNN_DIAGNOSTICS_RUN)
|
||||
id = utilNet.addLayer(layerParams.name, layerParams.type, layerParams);
|
||||
else
|
||||
id = dstNet.addLayer(layerParams.name, layerParams.type, layerParams);
|
||||
int id = dstNet.addLayer(layerParams.name, layerParams.type, layerParams);
|
||||
for (int i = 0; i < node_proto.output_size(); ++i)
|
||||
{
|
||||
layer_id.insert(std::make_pair(node_proto.output(i), LayerInfo(id, i)));
|
||||
@ -439,10 +469,7 @@ void ONNXImporter::addLayer(LayerParams& layerParams,
|
||||
const std::string& input_name = node_proto.input(j);
|
||||
IterLayerId_t layerId = layer_id.find(input_name);
|
||||
if (layerId != layer_id.end()) {
|
||||
if (DNN_DIAGNOSTICS_RUN)
|
||||
utilNet.connect(layerId->second.layerId, layerId->second.outputId, id, inpNum);
|
||||
else
|
||||
dstNet.connect(layerId->second.layerId, layerId->second.outputId, id, inpNum);
|
||||
dstNet.connect(layerId->second.layerId, layerId->second.outputId, id, inpNum);
|
||||
++inpNum;
|
||||
// Collect input shapes.
|
||||
IterShape_t shapeIt = outShapes.find(input_name);
|
||||
@ -451,11 +478,7 @@ void ONNXImporter::addLayer(LayerParams& layerParams,
|
||||
}
|
||||
}
|
||||
// Compute shape of output blob for this layer.
|
||||
Ptr<Layer> layer;
|
||||
if (DNN_DIAGNOSTICS_RUN)
|
||||
layer = utilNet.getLayer(id);
|
||||
else
|
||||
layer = dstNet.getLayer(id); // FIXIT: avoid instantiation of layers during the import stage
|
||||
Ptr<Layer> layer = dstNet.getLayer(id); // FIXIT: avoid instantiation of layers during the import stage
|
||||
layer->getMemoryShapes(layerInpShapes, 0, layerOutShapes, layerInternalShapes);
|
||||
for (int i = 0; i < node_proto.output_size() && i < (int)layerOutShapes.size(); ++i)
|
||||
{
|
||||
@ -532,35 +555,11 @@ void ONNXImporter::populateNet()
|
||||
layer_id.insert(std::make_pair(name, LayerInfo(0, netInputs.size() - 1)));
|
||||
}
|
||||
}
|
||||
utilNet.setInputsNames(netInputs);
|
||||
dstNet.setInputsNames(netInputs);
|
||||
|
||||
if (DNN_DIAGNOSTICS_RUN) {
|
||||
auto &supportedTypes = getSupportedTypes();
|
||||
for (int li = 0; li < layersSize; li++) {
|
||||
const opencv_onnx::NodeProto &node_proto = graph_proto.node(li);
|
||||
std::string name = node_proto.output(0);
|
||||
std::string layer_type = node_proto.op_type();
|
||||
auto registered = supportedTypes.find(layer_type);
|
||||
if (registered == supportedTypes.end()) {
|
||||
CV_LOG_ERROR(NULL, "DNN/ONNX: NOTE: Potential problem with creating node " << name<< " with type " << layer_type << ".\n Type "
|
||||
<< layer_type << " IS NOT SUPPORTED!\n"
|
||||
);
|
||||
}
|
||||
}
|
||||
auto oldConstBlobs = constBlobs;
|
||||
auto oldOutShapes = outShapes;
|
||||
auto oldLayerId = layer_id;
|
||||
CV_LOG_INFO(NULL, "DNN/ONNX: start diagnostic run!");
|
||||
for (int li = 0; li < layersSize; li++) {
|
||||
const opencv_onnx::NodeProto &node_proto = graph_proto.node(li);
|
||||
handleNode(node_proto);
|
||||
}
|
||||
CV_LOG_INFO(NULL, "DNN/ONNX: diagnostic run completed!");
|
||||
constBlobs = oldConstBlobs;
|
||||
outShapes = oldOutShapes;
|
||||
layer_id = oldLayerId;
|
||||
enableModelDiagnostics(false);
|
||||
layerHandler->fillRegistry(graph_proto);
|
||||
}
|
||||
|
||||
for(int li = 0; li < layersSize; li++)
|
||||
@ -569,83 +568,7 @@ void ONNXImporter::populateNet()
|
||||
handleNode(node_proto);
|
||||
}
|
||||
|
||||
CV_LOG_DEBUG(NULL, "DNN/ONNX: import completed!");
|
||||
}
|
||||
|
||||
const std::set<String>& ONNXImporter::getSupportedTypes()
|
||||
{
|
||||
static const std::set<String> layerTypes = {
|
||||
"MaxPool",
|
||||
"AveragePool",
|
||||
"GlobalAveragePool",
|
||||
"GlobalMaxPool",
|
||||
"ReduceMean",
|
||||
"ReduceSum",
|
||||
"ReduceMax",
|
||||
"Slice",
|
||||
"Split",
|
||||
"Add",
|
||||
"Sum",
|
||||
"Sub",
|
||||
"Pow",
|
||||
"Max",
|
||||
"Neg",
|
||||
"Constant",
|
||||
"LSTM",
|
||||
"GRU",
|
||||
"ImageScaler",
|
||||
"Clip",
|
||||
"LeakyRelu",
|
||||
"Relu",
|
||||
"Elu",
|
||||
"Tanh",
|
||||
"PRelu",
|
||||
"LRN",
|
||||
"InstanceNormalization",
|
||||
"BatchNormalization",
|
||||
"Gemm",
|
||||
"MatMul",
|
||||
"Mul",
|
||||
"Div",
|
||||
"Conv",
|
||||
"ConvTranspose",
|
||||
"Transpose",
|
||||
"Squeeze",
|
||||
"Flatten",
|
||||
"Unsqueeze",
|
||||
"Expand",
|
||||
"Reshape",
|
||||
"Pad",
|
||||
"Shape",
|
||||
"Cast",
|
||||
"ConstantOfShape",
|
||||
"ConstantFill",
|
||||
"Gather",
|
||||
"Concat",
|
||||
"Resize",
|
||||
"Upsample",
|
||||
"SoftMax",
|
||||
"Softmax",
|
||||
"LogSoftmax",
|
||||
"DetectionOutput",
|
||||
"Interp",
|
||||
"CropAndResize",
|
||||
"ROIPooling",
|
||||
"PSROIPooling",
|
||||
"ChannelsPReLU",
|
||||
"Sigmoid",
|
||||
"Swish",
|
||||
"Mish",
|
||||
"AbsVal",
|
||||
"BNLL",
|
||||
"MaxUnpool",
|
||||
"Dropout",
|
||||
"Identity",
|
||||
"Crop",
|
||||
"Normalize",
|
||||
"CumSum"
|
||||
};
|
||||
return layerTypes;
|
||||
CV_LOG_DEBUG(NULL, (DNN_DIAGNOSTICS_RUN ? "DNN/ONNX: diagnostic run completed!" : "DNN/ONNX: import completed!"));
|
||||
}
|
||||
|
||||
void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto)
|
||||
@ -673,7 +596,7 @@ void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto)
|
||||
}
|
||||
else
|
||||
{
|
||||
parseCustom(layerParams, node_proto);
|
||||
parseCustomLayer(layerParams, node_proto);
|
||||
}
|
||||
}
|
||||
catch (const cv::Exception& e)
|
||||
@ -683,6 +606,7 @@ void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto)
|
||||
CV_LOG_ERROR(NULL, "DNN/ONNX: Potential problem during processing node with " << node_proto.input_size() << " inputs and " << node_proto.output_size() << " outputs: "
|
||||
<< cv::format("[%s]:(%s)", layer_type.c_str(), name.c_str()) << "\n" << e.msg
|
||||
);
|
||||
cv::AutoLock lock(getLayerFactoryMutex());
|
||||
auto registeredLayers = getLayerFactoryImpl();
|
||||
if (registeredLayers.find(layerParams.type) != registeredLayers.end())
|
||||
{
|
||||
@ -1068,11 +992,7 @@ void ONNXImporter::parseBias(LayerParams& layerParams, const opencv_onnx::NodePr
|
||||
constParams.name = layerParams.name + "/const";
|
||||
constParams.type = "Const";
|
||||
constParams.blobs.push_back((isSub ? -1 : 1) * blob);
|
||||
int id;
|
||||
if (DNN_DIAGNOSTICS_RUN)
|
||||
id = utilNet.addLayer(constParams.name, constParams.type, constParams);
|
||||
else
|
||||
id = dstNet.addLayer(constParams.name, constParams.type, constParams);
|
||||
int id = dstNet.addLayer(constParams.name, constParams.type, constParams);
|
||||
layer_id.insert(std::make_pair(constParams.name, LayerInfo(id, 0)));
|
||||
outShapes[constParams.name] = shape(blob);
|
||||
|
||||
@ -1117,19 +1037,12 @@ void ONNXImporter::parseBias(LayerParams& layerParams, const opencv_onnx::NodePr
|
||||
powerParams.type = "Power";
|
||||
powerParams.set("scale", -1);
|
||||
|
||||
int id;
|
||||
//Create Power layer
|
||||
if (DNN_DIAGNOSTICS_RUN)
|
||||
id = utilNet.addLayer(powerParams.name, powerParams.type, powerParams);
|
||||
else
|
||||
id = dstNet.addLayer(powerParams.name, powerParams.type, powerParams);
|
||||
int id = dstNet.addLayer(powerParams.name, powerParams.type, powerParams);
|
||||
//Connect to input
|
||||
IterLayerId_t layerId = layer_id.find(node_proto.input(1));
|
||||
CV_Assert(layerId != layer_id.end());
|
||||
if (DNN_DIAGNOSTICS_RUN)
|
||||
utilNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
|
||||
else
|
||||
dstNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
|
||||
dstNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
|
||||
//Add shape
|
||||
layer_id.insert(std::make_pair(powerParams.name, LayerInfo(id, 0)));
|
||||
outShapes[powerParams.name] = outShapes[node_proto.input(1)];
|
||||
@ -1404,18 +1317,11 @@ void ONNXImporter::parseInstanceNormalization(LayerParams& layerParams, const op
|
||||
layerParams.erase("epsilon");
|
||||
|
||||
//Create MVN layer
|
||||
int id;
|
||||
if (DNN_DIAGNOSTICS_RUN)
|
||||
id = utilNet.addLayer(mvnParams.name, mvnParams.type, mvnParams);
|
||||
else
|
||||
id = dstNet.addLayer(mvnParams.name, mvnParams.type, mvnParams);
|
||||
int id = dstNet.addLayer(mvnParams.name, mvnParams.type, mvnParams);
|
||||
//Connect to input
|
||||
IterLayerId_t layerId = layer_id.find(node_proto.input(0));
|
||||
CV_Assert(layerId != layer_id.end());
|
||||
if (DNN_DIAGNOSTICS_RUN)
|
||||
utilNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
|
||||
else
|
||||
dstNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
|
||||
dstNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
|
||||
//Add shape
|
||||
layer_id.insert(std::make_pair(mvnParams.name, LayerInfo(id, 0)));
|
||||
outShapes[mvnParams.name] = outShapes[node_proto.input(0)];
|
||||
@ -1621,19 +1527,12 @@ void ONNXImporter::parseMul(LayerParams& layerParams, const opencv_onnx::NodePro
|
||||
powerParams.type = "Power";
|
||||
powerParams.set("power", -1);
|
||||
|
||||
int id;
|
||||
//Create Power layer
|
||||
if (DNN_DIAGNOSTICS_RUN)
|
||||
id = utilNet.addLayer(powerParams.name, powerParams.type, powerParams);
|
||||
else
|
||||
id = dstNet.addLayer(powerParams.name, powerParams.type, powerParams);
|
||||
int id = dstNet.addLayer(powerParams.name, powerParams.type, powerParams);
|
||||
//Connect to input
|
||||
IterLayerId_t layerId = layer_id.find(node_proto.input(1));
|
||||
CV_Assert(layerId != layer_id.end());
|
||||
if (DNN_DIAGNOSTICS_RUN)
|
||||
utilNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
|
||||
else
|
||||
dstNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
|
||||
dstNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
|
||||
//Add shape
|
||||
layer_id.insert(std::make_pair(powerParams.name, LayerInfo(id, 0)));
|
||||
outShapes[powerParams.name] = outShapes[node_proto.input(1)];
|
||||
@ -2418,7 +2317,7 @@ void ONNXImporter::parseCumSum(LayerParams& layerParams, const opencv_onnx::Node
|
||||
addLayer(layerParams, node_proto);
|
||||
}
|
||||
|
||||
void ONNXImporter::parseCustom(LayerParams& layerParams, const opencv_onnx::NodeProto& node_proto)
|
||||
void ONNXImporter::parseCustomLayer(LayerParams& layerParams, const opencv_onnx::NodeProto& node_proto)
|
||||
{
|
||||
for (int j = 0; j < node_proto.input_size(); j++) {
|
||||
if (layer_id.find(node_proto.input(j)) == layer_id.end())
|
||||
@ -2476,23 +2375,18 @@ const ONNXImporter::DispatchMap ONNXImporter::buildDispatchMap()
|
||||
dispatch["SoftMax"] = dispatch["LogSoftmax"] = &ONNXImporter::parseSoftMax;
|
||||
dispatch["DetectionOutput"] = &ONNXImporter::parseDetectionOutput;
|
||||
dispatch["CumSum"] = &ONNXImporter::parseCumSum;
|
||||
dispatch["Custom"] = &ONNXImporter::parseCustom;
|
||||
|
||||
return dispatch;
|
||||
}
|
||||
|
||||
Net readNetFromONNX(const String& onnxFile)
|
||||
{
|
||||
Net net;
|
||||
ONNXImporter onnxImporter(net, onnxFile.c_str());
|
||||
return net;
|
||||
return detail::readNetDiagnostic<ONNXImporter>(onnxFile.c_str());
|
||||
}
|
||||
|
||||
Net readNetFromONNX(const char* buffer, size_t sizeBuffer)
|
||||
{
|
||||
Net net;
|
||||
ONNXImporter onnxImporter(net, buffer, sizeBuffer);
|
||||
return net;
|
||||
return detail::readNetDiagnostic<ONNXImporter>(buffer, sizeBuffer);
|
||||
}
|
||||
|
||||
Net readNetFromONNX(const std::vector<uchar>& buffer)
|
||||
|
@ -507,7 +507,7 @@ void ExcludeLayer(tensorflow::GraphDef& net, const int layer_index, const int in
|
||||
net.mutable_node()->DeleteSubrange(layer_index, 1);
|
||||
}
|
||||
|
||||
class LayerHandler;
|
||||
class TFLayerHandler;
|
||||
|
||||
class TFImporter
|
||||
{
|
||||
@ -516,8 +516,7 @@ public:
|
||||
TFImporter(Net& net, const char *dataModel, size_t lenModel,
|
||||
const char *dataConfig = NULL, size_t lenConfig = 0);
|
||||
protected:
|
||||
std::unique_ptr<LayerHandler> layerHandler;
|
||||
std::unique_ptr<Net> utilNet;
|
||||
std::unique_ptr<TFLayerHandler> layerHandler;
|
||||
Net& dstNet;
|
||||
void populateNet();
|
||||
|
||||
@ -559,7 +558,7 @@ private:
|
||||
void addPermuteLayer(const int* order, const std::string& permName, Pin& inpId);
|
||||
void setPadding(LayerParams &layerParams, const tensorflow::NodeDef &layer, std::string& inputName, float value = 0.);
|
||||
|
||||
friend class LayerHandler;
|
||||
friend class TFLayerHandler;
|
||||
typedef void (TFImporter::*TFImporterNodeParser)(tensorflow::GraphDef&, const tensorflow::NodeDef&, LayerParams&);
|
||||
typedef std::map<std::string, TFImporterNodeParser> DispatchMap;
|
||||
|
||||
@ -625,18 +624,17 @@ void TFImporter::setPadding(LayerParams &layerParams, const tensorflow::NodeDef
|
||||
layerParams.set("pad_mode", "VALID");
|
||||
}
|
||||
|
||||
class LayerHandler
|
||||
class TFLayerHandler : public detail::LayerHandler
|
||||
{
|
||||
public:
|
||||
LayerHandler(TFImporter* importer_);
|
||||
~LayerHandler() = default;
|
||||
explicit TFLayerHandler(TFImporter* importer_);
|
||||
|
||||
bool handleMissing(const opencv_tensorflow::NodeDef& layer);
|
||||
void handleFailed(const opencv_tensorflow::NodeDef& layer);
|
||||
void fillRegistry(const tensorflow::GraphDef& net);
|
||||
bool handleMissing(const tensorflow::NodeDef& layer);
|
||||
void handleFailed(const tensorflow::NodeDef& layer);
|
||||
|
||||
private:
|
||||
protected:
|
||||
TFImporter* importer;
|
||||
std::set<std::string> layers;
|
||||
};
|
||||
|
||||
const TFImporter::DispatchMap TFImporter::buildDispatchMap()
|
||||
@ -2471,9 +2469,8 @@ void TFImporter::parseCustomLayer(tensorflow::GraphDef& net, const tensorflow::N
|
||||
}
|
||||
|
||||
TFImporter::TFImporter(Net& net, const char *model, const char *config)
|
||||
: layerHandler(DNN_DIAGNOSTICS_RUN ? new LayerHandler(this) : nullptr),
|
||||
utilNet(DNN_DIAGNOSTICS_RUN ? new Net : nullptr),
|
||||
dstNet(DNN_DIAGNOSTICS_RUN ? *utilNet : net), dispatch(buildDispatchMap())
|
||||
: layerHandler(DNN_DIAGNOSTICS_RUN ? new TFLayerHandler(this) : nullptr),
|
||||
dstNet(net), dispatch(buildDispatchMap())
|
||||
{
|
||||
if (model && model[0])
|
||||
{
|
||||
@ -2494,9 +2491,8 @@ TFImporter::TFImporter(
|
||||
const char *dataModel, size_t lenModel,
|
||||
const char *dataConfig, size_t lenConfig
|
||||
)
|
||||
: layerHandler(DNN_DIAGNOSTICS_RUN ? new LayerHandler(this) : nullptr),
|
||||
utilNet(DNN_DIAGNOSTICS_RUN ? new Net : nullptr),
|
||||
dstNet(DNN_DIAGNOSTICS_RUN ? *utilNet : net), dispatch(buildDispatchMap())
|
||||
: layerHandler(DNN_DIAGNOSTICS_RUN ? new TFLayerHandler(this) : nullptr),
|
||||
dstNet(net), dispatch(buildDispatchMap())
|
||||
{
|
||||
if (dataModel != NULL && lenModel > 0)
|
||||
{
|
||||
@ -2855,6 +2851,11 @@ void TFImporter::populateNet()
|
||||
addConstNodes(netBin, value_id, layers_to_ignore);
|
||||
addConstNodes(netTxt, value_id, layers_to_ignore);
|
||||
|
||||
if (DNN_DIAGNOSTICS_RUN) {
|
||||
CV_LOG_INFO(NULL, "DNN/TF: start diagnostic run!");
|
||||
layerHandler->fillRegistry(net);
|
||||
}
|
||||
|
||||
for (int li = 0; li < layersSize; li++)
|
||||
{
|
||||
const tensorflow::NodeDef& layer = net.node(li);
|
||||
@ -2873,7 +2874,7 @@ void TFImporter::populateNet()
|
||||
CV_Assert(!netInputsNames[i].empty());
|
||||
}
|
||||
dstNet.setInputsNames(netInputsNames);
|
||||
CV_LOG_DEBUG(NULL, "DNN/TF: ===================== Import completed =====================");
|
||||
CV_LOG_DEBUG(NULL, (DNN_DIAGNOSTICS_RUN? "DNN/TF: diagnostic run completed!" : "DNN/TF: import completed!"));
|
||||
}
|
||||
|
||||
void TFImporter::addPermuteLayer(const int* order, const std::string& permName, Pin& inpId)
|
||||
@ -2933,60 +2934,60 @@ void TFImporter::parseNode(const tensorflow::NodeDef& layer)
|
||||
}
|
||||
}
|
||||
|
||||
LayerHandler::LayerHandler(TFImporter* importer_) : importer(importer_) {}
|
||||
TFLayerHandler::TFLayerHandler(TFImporter* importer_) : importer(importer_) {}
|
||||
|
||||
void LayerHandler::handleFailed(const opencv_tensorflow::NodeDef& layer)
|
||||
void TFLayerHandler::fillRegistry(const tensorflow::GraphDef& net)
|
||||
{
|
||||
LayerParams lp;
|
||||
lp.name = layer.name();
|
||||
lp.type = "NotImplemented";
|
||||
lp.set("type", layer.op());
|
||||
for (int li = 0; li < net.node_size(); li++) {
|
||||
const tensorflow::NodeDef& layer = net.node(li);
|
||||
|
||||
const std::string& name = layer.name();
|
||||
const std::string& type = layer.op();
|
||||
if (importer->dispatch.find(type) == importer->dispatch.end())
|
||||
{
|
||||
addMissing(name, type);
|
||||
}
|
||||
}
|
||||
printMissing();
|
||||
};
|
||||
|
||||
bool TFLayerHandler::handleMissing(const tensorflow::NodeDef& layer)
|
||||
{
|
||||
bool unsupported = contains(layer.op());
|
||||
|
||||
if (unsupported)
|
||||
{
|
||||
handleFailed(layer);
|
||||
}
|
||||
|
||||
return unsupported;
|
||||
}
|
||||
|
||||
void TFLayerHandler::handleFailed(const tensorflow::NodeDef& layer)
|
||||
{
|
||||
LayerParams lp = getNotImplementedParams(layer.name(), layer.op());
|
||||
|
||||
// the layer will be created or its params and type will be replaced
|
||||
int id = importer->dstNet.addLayer(lp.name, "NotImplemented", lp);
|
||||
int id = importer->dstNet.addLayer(lp.name, lp.type, lp);
|
||||
if (id != -1) // internal layer failure before the call to addLayer()
|
||||
{
|
||||
importer->layer_id[lp.name] = id;
|
||||
}
|
||||
}
|
||||
|
||||
bool LayerHandler::handleMissing(const opencv_tensorflow::NodeDef& layer)
|
||||
{
|
||||
LayerParams lp;
|
||||
// If we didn't add it, but can create it, it's custom and not missing.
|
||||
if (layers.find(layer.op()) == layers.end() && LayerFactory::createLayerInstance(layer.op(), lp))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (layers.insert(layer.op()).second)
|
||||
{
|
||||
CV_LOG_ERROR(NULL, "DNN/TF: Node='" << layer.name() << "' of type='"<< layer.op()
|
||||
<< "' is not supported. This error won't be displayed again.");
|
||||
}
|
||||
|
||||
handleFailed(layer);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
#endif //HAVE_PROTOBUF
|
||||
|
||||
Net readNetFromTensorflow(const String &model, const String &config)
|
||||
{
|
||||
Net net;
|
||||
TFImporter importer(net, model.c_str(), config.c_str());
|
||||
return net;
|
||||
return detail::readNetDiagnostic<TFImporter>(model.c_str(), config.c_str());
|
||||
}
|
||||
|
||||
Net readNetFromTensorflow(const char* bufferModel, size_t lenModel,
|
||||
const char* bufferConfig, size_t lenConfig)
|
||||
{
|
||||
Net net;
|
||||
TFImporter importer(net, bufferModel, lenModel, bufferConfig, lenConfig);
|
||||
return net;
|
||||
return detail::readNetDiagnostic<TFImporter>(bufferModel, lenModel, bufferConfig, lenConfig);
|
||||
}
|
||||
|
||||
Net readNetFromTensorflow(const std::vector<uchar>& bufferModel, const std::vector<uchar>& bufferConfig)
|
||||
|
@ -13,6 +13,7 @@ Test for Tensorflow models loading
|
||||
#include "npy_blob.hpp"
|
||||
|
||||
#include <opencv2/dnn/layer.details.hpp> // CV_DNN_REGISTER_LAYER_CLASS
|
||||
#include <opencv2/dnn/utils/debug_utils.hpp>
|
||||
|
||||
namespace opencv_test
|
||||
{
|
||||
@ -605,11 +606,13 @@ public:
|
||||
Test_TensorFlow_diagnostics()
|
||||
{
|
||||
enableModelDiagnostics(true);
|
||||
skipModelImport(true);
|
||||
}
|
||||
|
||||
~Test_TensorFlow_diagnostics()
|
||||
{
|
||||
enableModelDiagnostics(false);
|
||||
skipModelImport(false);
|
||||
}
|
||||
|
||||
void runFailingTensorFlowNet(const std::string& prefix, bool hasText = false)
|
||||
|
Loading…
Reference in New Issue
Block a user