Merge pull request #19693 from LupusSanctus:onnx_diagnostic

ONNX diagnostic tool

* Final

* Add forgotten Normalize layer to the set of supported types

* ONNX diagnostic tool corrections

* Fixed CI test warnings

* Added code minor corrections

Co-authored-by: Sergey Slashchinin <sergei.slashchinin@xperience.ai>
This commit is contained in:
Anastasia M 2021-03-29 19:38:28 +03:00 committed by GitHub
parent 35eaacd1db
commit e08de1101d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 436 additions and 124 deletions

View File

@ -59,3 +59,4 @@ ocv_add_app(annotation)
ocv_add_app(visualisation)
ocv_add_app(interactive-calibration)
ocv_add_app(version)
ocv_add_app(model-diagnostics)

View File

@ -0,0 +1,3 @@
ocv_add_application(opencv_model_diagnostics
MODULES opencv_core opencv_dnn
SRCS model_diagnostics.cpp)

View File

@ -0,0 +1,62 @@
/*************************************************
USAGE:
./model_diagnostics -m <onnx file location>
**************************************************/
#include <opencv2/dnn.hpp>
#include <opencv2/core/utils/filesystem.hpp>
#include <iostream>
using namespace cv;
using namespace dnn;
static void diagnosticsErrorCallback(const Exception& exc)
{
CV_UNUSED(exc);
fflush(stdout);
fflush(stderr);
}
static std::string checkFileExists(const std::string& fileName)
{
if (fileName.empty() || utils::fs::exists(fileName))
return fileName;
CV_Error(Error::StsObjectNotFound, "File " + fileName + " was not found! "
"Please, specify a full path to the file.");
}
std::string diagnosticKeys =
"{ model m | | Path to the model .onnx file. }"
"{ config c | | Path to the model configuration file. }"
"{ framework f | | [Optional] Name of the model framework. }";
int main( int argc, const char** argv )
{
CommandLineParser argParser(argc, argv, diagnosticKeys);
argParser.about("Use this tool to run the diagnostics of provided ONNX model"
"to obtain the information about its support (supported layers).");
if (argc == 1)
{
argParser.printMessage();
return 0;
}
std::string model = checkFileExists(argParser.get<std::string>("model"));
std::string config = checkFileExists(argParser.get<std::string>("config"));
std::string frameworkId = argParser.get<std::string>("framework");
CV_Assert(!model.empty());
enableModelDiagnostics(true);
redirectError((ErrorCallback)diagnosticsErrorCallback, NULL);
Net ocvNet = readNet(model, config, frameworkId);
return 0;
}

View File

@ -100,6 +100,18 @@ CV__DNN_INLINE_NS_BEGIN
CV_EXPORTS std::vector< std::pair<Backend, Target> > getAvailableBackends();
CV_EXPORTS_W std::vector<Target> getAvailableTargets(dnn::Backend be);
/**
* @brief Enables detailed logging of the DNN model loading with CV DNN API.
* @param[in] isDiagnosticsMode Indicates whether diagnostic mode should be set.
*
* Diagnostic mode provides detailed logging of the model loading stage to explore
* potential problems (ex.: not implemented layer type).
*
* @note In diagnostic mode series of assertions will be skipped, it can lead to the
* expected application crash.
*/
CV_EXPORTS void enableModelDiagnostics(bool isDiagnosticsMode);
/** @brief This class provides all data needed to initialize layer.
*
* It includes dictionary with scalar params (which can be read by using Dict interface),

View File

@ -0,0 +1,23 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#ifndef OPENCV_DNN_LAYER_REG_HPP
#define OPENCV_DNN_LAYER_REG_HPP
#include <opencv2/dnn.hpp>
namespace cv {
namespace dnn {
CV__DNN_INLINE_NS_BEGIN
//! @addtogroup dnn
//! @{
//! Register layer types of DNN model.
typedef std::map<std::string, std::vector<LayerFactory::Constructor> > LayerFactory_Impl;
LayerFactory_Impl& getLayerFactoryImpl();
//! @}
CV__DNN_INLINE_NS_END
}
}
#endif

View File

@ -63,6 +63,7 @@
#include <memory>
#include <opencv2/dnn/shape_utils.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/dnn/layer_reg.private.hpp>
#include <opencv2/core/utils/configuration.private.hpp>
#include <opencv2/core/utils/logger.hpp>
@ -93,6 +94,13 @@ static bool DNN_CHECK_NAN_INF = utils::getConfigurationParameterBool("OPENCV_DNN
static bool DNN_CHECK_NAN_INF_DUMP = utils::getConfigurationParameterBool("OPENCV_DNN_CHECK_NAN_INF_DUMP", false);
static bool DNN_CHECK_NAN_INF_RAISE_ERROR = utils::getConfigurationParameterBool("OPENCV_DNN_CHECK_NAN_INF_RAISE_ERROR", false);
bool DNN_DIAGNOSTICS_RUN = false;
void enableModelDiagnostics(bool isDiagnosticsMode)
{
DNN_DIAGNOSTICS_RUN = isDiagnosticsMode;
}
using std::vector;
using std::map;
using std::make_pair;
@ -5310,15 +5318,13 @@ static Mutex& getLayerFactoryMutex()
return *instance;
}
typedef std::map<String, std::vector<LayerFactory::Constructor> > LayerFactory_Impl;
static LayerFactory_Impl& getLayerFactoryImpl_()
{
static LayerFactory_Impl impl;
return impl;
}
static LayerFactory_Impl& getLayerFactoryImpl()
LayerFactory_Impl& getLayerFactoryImpl()
{
static LayerFactory_Impl* volatile instance = NULL;
if (instance == NULL)

View File

@ -80,7 +80,7 @@ public:
CV_Assert(!params.has("begin") && !params.has("size") && !params.has("end"));
const DictValue &indicesValue = params.get("slice_point");
sliceRanges.resize(indicesValue.size() + 1,
std::vector<Range>(axis + 1, Range::all()));
std::vector<Range>(std::max(axis,0) + 1, Range::all()));
int prevSlice = 0;
for (int i = 0; i < indicesValue.size(); ++i)
{

View File

@ -10,11 +10,14 @@
#include "../graph_simplifier.hpp"
#include "onnx_graph_simplifier.hpp"
#include <opencv2/core/utils/logger.hpp>
#include <queue>
namespace cv { namespace dnn {
CV__DNN_INLINE_NS_BEGIN
extern bool DNN_DIAGNOSTICS_RUN;
// This wrapper can behave differently for fake input nodes and real graph nodes.
class ONNXNodeWrapper : public ImportNodeWrapper
{
@ -639,8 +642,17 @@ Mat getMatFromTensor(opencv_onnx::TensorProto& tensor_proto)
}
}
else
CV_Error(Error::StsUnsupportedFormat, "Unsupported data type: " +
opencv_onnx::TensorProto_DataType_Name(datatype));
{
std::string errorMsg = "Unsupported data type: " +
opencv_onnx::TensorProto_DataType_Name(datatype);
if (!DNN_DIAGNOSTICS_RUN)
{
CV_Error(Error::StsUnsupportedFormat, errorMsg);
}
CV_LOG_ERROR(NULL, errorMsg);
return blob;
}
if (tensor_proto.dims_size() == 0)
blob.dims = 1; // To force 1-dimensional cv::Mat for scalars.
return blob;

View File

@ -8,6 +8,8 @@
#include "../precomp.hpp"
#include <opencv2/dnn/shape_utils.hpp>
#include <opencv2/dnn/layer_reg.private.hpp>
#include <opencv2/core/utils/logger.defines.hpp>
#undef CV_LOG_STRIP_LEVEL
#define CV_LOG_STRIP_LEVEL CV_LOG_LEVEL_DEBUG + 1
@ -37,6 +39,7 @@ namespace cv {
namespace dnn {
CV__DNN_INLINE_NS_BEGIN
extern bool DNN_DIAGNOSTICS_RUN;
class ONNXImporter
{
@ -58,11 +61,12 @@ class ONNXImporter
void addConstant(const std::string& name, const Mat& blob);
void addLayer(LayerParams& layerParams,
const opencv_onnx::NodeProto& node_proto);
static const std::set<String>& getSupportedTypes();
public:
ONNXImporter(Net& net, const char *onnxFile)
: dstNet(net)
: dstNet(net), utilNet()
{
hasDynamicShapes = false;
CV_Assert(onnxFile);
@ -83,7 +87,7 @@ public:
}
ONNXImporter(Net& net, const char* buffer, size_t sizeBuffer)
: dstNet(net)
: dstNet(net), utilNet()
{
hasDynamicShapes = false;
CV_LOG_DEBUG(NULL, "DNN/ONNX: processing in-memory ONNX model (" << sizeBuffer << " bytes)");
@ -110,6 +114,7 @@ public:
protected:
Net& dstNet;
Net utilNet;
opencv_onnx::GraphProto graph_proto;
std::string framework_name;
@ -182,6 +187,10 @@ std::map<std::string, Mat> ONNXImporter::getGraphTensors(
tensor_proto = graph_proto.initializer(i);
Mat mat = getMatFromTensor(tensor_proto);
releaseONNXTensor(tensor_proto);
if (DNN_DIAGNOSTICS_RUN && mat.empty())
continue;
layers_weights.insert(std::make_pair(tensor_proto.name(), mat));
}
return layers_weights;
@ -201,6 +210,8 @@ LayerParams ONNXImporter::getLayerParams(const opencv_onnx::NodeProto& node_prot
opencv_onnx::AttributeProto attribute_proto = node_proto.attribute(i);
std::string attribute_name = attribute_proto.name();
try
{
if(attribute_name == "kernel_shape")
{
CV_Assert(attribute_proto.ints_size() == 1 || attribute_proto.ints_size() == 2 || attribute_proto.ints_size() == 3);
@ -315,6 +326,18 @@ LayerParams ONNXImporter::getLayerParams(const opencv_onnx::NodeProto& node_prot
CV_Error(Error::StsNotImplemented, cv::format("DNN/ONNX/Attribute[%s]: unsupported attribute format", attribute_name.c_str()));
}
}
catch (const cv::Exception& e)
{
CV_UNUSED(e);
if (DNN_DIAGNOSTICS_RUN)
{
CV_LOG_ERROR(NULL, "DNN/ONNX: Potential problem with processing attributes for node " << node_proto.name() << " Attribute " << attribute_name.c_str()
);
continue;
}
throw;
}
}
return lp;
}
@ -338,7 +361,11 @@ Mat ONNXImporter::getBlob(const std::string& input_name)
void ONNXImporter::addLayer(LayerParams& layerParams,
const opencv_onnx::NodeProto& node_proto)
{
int id = dstNet.addLayer(layerParams.name, layerParams.type, layerParams);
int id;
if (DNN_DIAGNOSTICS_RUN)
id = utilNet.addLayer(layerParams.name, layerParams.type, layerParams);
else
id = dstNet.addLayer(layerParams.name, layerParams.type, layerParams);
for (int i = 0; i < node_proto.output_size(); ++i)
{
layer_id.insert(std::make_pair(node_proto.output(i), LayerInfo(id, i)));
@ -351,6 +378,9 @@ void ONNXImporter::addLayer(LayerParams& layerParams,
const std::string& input_name = node_proto.input(j);
IterLayerId_t layerId = layer_id.find(input_name);
if (layerId != layer_id.end()) {
if (DNN_DIAGNOSTICS_RUN)
utilNet.connect(layerId->second.layerId, layerId->second.outputId, id, inpNum);
else
dstNet.connect(layerId->second.layerId, layerId->second.outputId, id, inpNum);
++inpNum;
// Collect input shapes.
@ -360,7 +390,11 @@ void ONNXImporter::addLayer(LayerParams& layerParams,
}
}
// Compute shape of output blob for this layer.
Ptr<Layer> layer = dstNet.getLayer(id); // FIXIT: avoid instantiation of layers during the import stage
Ptr<Layer> layer;
if (DNN_DIAGNOSTICS_RUN)
layer = utilNet.getLayer(id);
else
layer = dstNet.getLayer(id); // FIXIT: avoid instantiation of layers during the import stage
layer->getMemoryShapes(layerInpShapes, 0, layerOutShapes, layerInternalShapes);
for (int i = 0; i < node_proto.output_size() && i < (int)layerOutShapes.size(); ++i)
{
@ -437,8 +471,37 @@ void ONNXImporter::populateNet()
layer_id.insert(std::make_pair(name, LayerInfo(0, netInputs.size() - 1)));
}
}
utilNet.setInputsNames(netInputs);
dstNet.setInputsNames(netInputs);
if (DNN_DIAGNOSTICS_RUN) {
auto &supportedTypes = getSupportedTypes();
for (int li = 0; li < layersSize; li++) {
const opencv_onnx::NodeProto &node_proto = graph_proto.node(li);
std::string name = node_proto.output(0);
std::string layer_type = node_proto.op_type();
auto registered = supportedTypes.find(layer_type);
if (registered == supportedTypes.end()) {
CV_LOG_ERROR(NULL, "DNN/ONNX: NOTE: Potential problem with creating node " << name<< " with type " << layer_type << ".\n Type "
<< layer_type << " IS NOT SUPPORTED!\n"
);
}
}
auto oldConstBlobs = constBlobs;
auto oldOutShapes = outShapes;
auto oldLayerId = layer_id;
CV_LOG_INFO(NULL, "DNN/ONNX: start diagnostic run!");
for (int li = 0; li < layersSize; li++) {
const opencv_onnx::NodeProto &node_proto = graph_proto.node(li);
handleNode(node_proto);
}
CV_LOG_INFO(NULL, "DNN/ONNX: diagnostic run completed!");
constBlobs = oldConstBlobs;
outShapes = oldOutShapes;
layer_id = oldLayerId;
enableModelDiagnostics(false);
}
for(int li = 0; li < layersSize; li++)
{
const opencv_onnx::NodeProto& node_proto = graph_proto.node(li);
@ -448,6 +511,80 @@ void ONNXImporter::populateNet()
CV_LOG_DEBUG(NULL, "DNN/ONNX: import completed!");
}
const std::set<String>& ONNXImporter::getSupportedTypes()
{
static const std::set<String> layerTypes = {
"MaxPool",
"AveragePool",
"GlobalAveragePool",
"GlobalMaxPool",
"ReduceMean",
"ReduceSum",
"ReduceMax",
"Slice",
"Split",
"Add",
"Sum",
"Sub",
"Pow",
"Max",
"Neg",
"Constant",
"LSTM",
"ImageScaler",
"Clip",
"LeakyRelu",
"Relu",
"Elu",
"Tanh",
"PRelu",
"LRN",
"InstanceNormalization",
"BatchNormalization",
"Gemm",
"MatMul",
"Mul",
"Div",
"Conv",
"ConvTranspose",
"Transpose",
"Squeeze",
"Flatten",
"Unsqueeze",
"Expand",
"Reshape",
"Pad",
"Shape",
"Cast",
"ConstantOfShape",
"ConstantFill",
"Gather",
"Concat",
"Resize",
"Upsample",
"SoftMax",
"Softmax",
"LogSoftmax",
"DetectionOutput",
"Interp",
"CropAndResize",
"ROIPooling",
"PSROIPooling",
"ChannelsPReLU",
"Sigmoid",
"Swish",
"Mish",
"AbsVal",
"BNLL",
"MaxUnpool",
"Dropout",
"Identity",
"Crop",
"Normalize"
};
return layerTypes;
}
void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto_)
{
opencv_onnx::NodeProto node_proto = node_proto_; // TODO FIXIT
@ -458,11 +595,11 @@ void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto_)
CV_LOG_DEBUG(NULL, "DNN/ONNX: processing node with " << node_proto.input_size() << " inputs and " << node_proto.output_size() << " outputs: "
<< cv::format("[%s]:(%s)", layer_type.c_str(), name.c_str())
);
LayerParams layerParams;
try
{
// FIXIT not all cases can be repacked into "LayerParams". Importer should handle such cases directly for each "layer_type"
LayerParams layerParams = getLayerParams(node_proto);
layerParams = getLayerParams(node_proto);
layerParams.name = name;
layerParams.type = layer_type;
@ -798,7 +935,11 @@ void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto_)
constParams.name = layerParams.name + "/const";
constParams.type = "Const";
constParams.blobs.push_back((isSub ? -1 : 1) * blob);
int id = dstNet.addLayer(constParams.name, constParams.type, constParams);
int id;
if (DNN_DIAGNOSTICS_RUN)
id = utilNet.addLayer(constParams.name, constParams.type, constParams);
else
id = dstNet.addLayer(constParams.name, constParams.type, constParams);
layer_id.insert(std::make_pair(constParams.name, LayerInfo(id, 0)));
outShapes[constParams.name] = shape(blob);
@ -843,11 +984,18 @@ void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto_)
powerParams.type = "Power";
powerParams.set("scale", -1);
int id;
//Create Power layer
int id = dstNet.addLayer(powerParams.name, powerParams.type, powerParams);
if (DNN_DIAGNOSTICS_RUN)
id = utilNet.addLayer(powerParams.name, powerParams.type, powerParams);
else
id = dstNet.addLayer(powerParams.name, powerParams.type, powerParams);
//Connect to input
IterLayerId_t layerId = layer_id.find(node_proto.input(1));
CV_Assert(layerId != layer_id.end());
if (DNN_DIAGNOSTICS_RUN)
utilNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
else
dstNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
//Add shape
layer_id.insert(std::make_pair(powerParams.name, LayerInfo(id, 0)));
@ -1035,10 +1183,17 @@ void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto_)
layerParams.erase("epsilon");
//Create MVN layer
int id = dstNet.addLayer(mvnParams.name, mvnParams.type, mvnParams);
int id;
if (DNN_DIAGNOSTICS_RUN)
id = utilNet.addLayer(mvnParams.name, mvnParams.type, mvnParams);
else
id = dstNet.addLayer(mvnParams.name, mvnParams.type, mvnParams);
//Connect to input
IterLayerId_t layerId = layer_id.find(node_proto.input(0));
CV_Assert(layerId != layer_id.end());
if (DNN_DIAGNOSTICS_RUN)
utilNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
else
dstNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
//Add shape
layer_id.insert(std::make_pair(mvnParams.name, LayerInfo(id, 0)));
@ -1232,11 +1387,18 @@ void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto_)
powerParams.type = "Power";
powerParams.set("power", -1);
int id;
//Create Power layer
int id = dstNet.addLayer(powerParams.name, powerParams.type, powerParams);
if (DNN_DIAGNOSTICS_RUN)
id = utilNet.addLayer(powerParams.name, powerParams.type, powerParams);
else
id = dstNet.addLayer(powerParams.name, powerParams.type, powerParams);
//Connect to input
IterLayerId_t layerId = layer_id.find(node_proto.input(1));
CV_Assert(layerId != layer_id.end());
if (DNN_DIAGNOSTICS_RUN)
utilNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
else
dstNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
//Add shape
layer_id.insert(std::make_pair(powerParams.name, LayerInfo(id, 0)));
@ -1921,10 +2083,32 @@ void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto_)
addLayer(layerParams, node_proto);
}
catch (const cv::Exception& e)
{
if (DNN_DIAGNOSTICS_RUN)
{
CV_LOG_ERROR(NULL, "DNN/ONNX: Potential problem during processing node with " << node_proto.input_size() << " inputs and " << node_proto.output_size() << " outputs: "
<< cv::format("[%s]:(%s)", layer_type.c_str(), name.c_str()) << "\n" << e.msg
);
auto registeredLayers = getLayerFactoryImpl();
if (registeredLayers.find(layerParams.type) != registeredLayers.end())
{
try
{
Ptr<Layer> layer = LayerFactory::createLayerInstance(layerParams.type, layerParams);
}
catch (const std::exception& e)
{
CV_LOG_ERROR(NULL, "DNN/ONNX: Layer of type " << layerParams.type << "(" << layer_type << ") cannot be created with parameters " << layerParams << ". Error: " << e.what()
);
}
}
}
else
{
CV_LOG_ERROR(NULL, "DNN/ONNX: ERROR during processing node with " << node_proto.input_size() << " inputs and " << node_proto.output_size() << " outputs: "
<< cv::format("[%s]:(%s)", layer_type.c_str(), name.c_str())
);
}
for (int i = 0; i < node_proto.input_size(); i++)
{
CV_LOG_INFO(NULL, " Input[" << i << "] = '" << node_proto.input(i) << "'");
@ -1933,6 +2117,15 @@ void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto_)
{
CV_LOG_INFO(NULL, " Output[" << i << "] = '" << node_proto.output(i) << "'");
}
if (DNN_DIAGNOSTICS_RUN)
{
for (int i = 0; i < node_proto.output_size(); ++i)
{
layer_id.insert(std::make_pair(node_proto.output(i), LayerInfo(0, i)));
outShapes[node_proto.output(i)] = outShapes[node_proto.input(0)];
}
}
else
CV_Error(Error::StsError, cv::format("Node [%s]:(%s) parse error: %s", layer_type.c_str(), name.c_str(), e.what()));
}
}