Merge pull request #21066 from andrewerf:21052-openvino-native-onnx

Native ONNX to Inference Engine backend #21066

Resolves #21052

### Pull Request Readiness Checklist

See details at https://github.com/opencv/opencv/wiki/How_to_contribute#making-a-good-pull-request

- [x] I agree to contribute to the project under Apache 2 License.
- [x] To the best of my knowledge, the proposed patch is not based on a code under GPL or other license that is incompatible with OpenCV
- [x] The PR is proposed to proper branch
- [x] There is reference to original bug report and related work
- [ ] There is accuracy test, performance test and test data in opencv_extra repository, if applicable
- [ ] The feature is well documented and sample code can be built with the project CMake
This commit is contained in:
andrewerf 2023-10-20 11:49:27 +03:00 committed by GitHub
parent 1aa4621777
commit b44cb33d2f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 38 additions and 6 deletions

View File

@ -1021,14 +1021,14 @@ CV__DNN_INLINE_NS_BEGIN
* * `*.pb` (TensorFlow, https://www.tensorflow.org/) * * `*.pb` (TensorFlow, https://www.tensorflow.org/)
* * `*.t7` | `*.net` (Torch, http://torch.ch/) * * `*.t7` | `*.net` (Torch, http://torch.ch/)
* * `*.weights` (Darknet, https://pjreddie.com/darknet/) * * `*.weights` (Darknet, https://pjreddie.com/darknet/)
* * `*.bin` (DLDT, https://software.intel.com/openvino-toolkit) * * `*.bin` | `*.onnx` (OpenVINO, https://software.intel.com/openvino-toolkit)
* * `*.onnx` (ONNX, https://onnx.ai/) * * `*.onnx` (ONNX, https://onnx.ai/)
* @param[in] config Text file contains network configuration. It could be a * @param[in] config Text file contains network configuration. It could be a
* file with the following extensions: * file with the following extensions:
* * `*.prototxt` (Caffe, http://caffe.berkeleyvision.org/) * * `*.prototxt` (Caffe, http://caffe.berkeleyvision.org/)
* * `*.pbtxt` (TensorFlow, https://www.tensorflow.org/) * * `*.pbtxt` (TensorFlow, https://www.tensorflow.org/)
* * `*.cfg` (Darknet, https://pjreddie.com/darknet/) * * `*.cfg` (Darknet, https://pjreddie.com/darknet/)
* * `*.xml` (DLDT, https://software.intel.com/openvino-toolkit) * * `*.xml` (OpenVINO, https://software.intel.com/openvino-toolkit)
* @param[in] framework Explicit framework name tag to determine a format. * @param[in] framework Explicit framework name tag to determine a format.
* @returns Net object. * @returns Net object.
* *
@ -1064,7 +1064,7 @@ CV__DNN_INLINE_NS_BEGIN
* backend. * backend.
*/ */
CV_EXPORTS_W CV_EXPORTS_W
Net readNetFromModelOptimizer(const String &xml, const String &bin); Net readNetFromModelOptimizer(const String &xml, const String &bin = "");
/** @brief Load a network from Intel's Model Optimizer intermediate representation. /** @brief Load a network from Intel's Model Optimizer intermediate representation.
* @param[in] bufferModelConfig Buffer contains XML configuration with network's topology. * @param[in] bufferModelConfig Buffer contains XML configuration with network's topology.

View File

@ -43,9 +43,11 @@ Net readNet(const String& _model, const String& _config, const String& _framewor
std::swap(model, config); std::swap(model, config);
return readNetFromDarknet(config, model); return readNetFromDarknet(config, model);
} }
if (framework == "dldt" || modelExt == "bin" || configExt == "bin" || modelExt == "xml" || configExt == "xml") if (framework == "dldt" || framework == "openvino" ||
modelExt == "bin" || configExt == "bin" ||
modelExt == "xml" || configExt == "xml")
{ {
if (modelExt == "xml" || configExt == "bin") if (modelExt == "xml" || configExt == "bin" || modelExt == "onnx")
std::swap(model, config); std::swap(model, config);
return readNetFromModelOptimizer(config, model); return readNetFromModelOptimizer(config, model);
} }
@ -68,7 +70,7 @@ Net readNet(const String& _framework, const std::vector<uchar>& bufferModel,
return readNetFromDarknet(bufferConfig, bufferModel); return readNetFromDarknet(bufferConfig, bufferModel);
else if (framework == "torch") else if (framework == "torch")
CV_Error(Error::StsNotImplemented, "Reading Torch models from buffers"); CV_Error(Error::StsNotImplemented, "Reading Torch models from buffers");
else if (framework == "dldt") else if (framework == "dldt" || framework == "openvino")
return readNetFromModelOptimizer(bufferConfig, bufferModel); return readNetFromModelOptimizer(bufferConfig, bufferModel);
else if (framework == "tflite") else if (framework == "tflite")
return readNetFromTFLite(bufferModel); return readNetFromTFLite(bufferModel);

View File

@ -6,6 +6,7 @@
// Third party copyrights are property of their respective owners. // Third party copyrights are property of their respective owners.
#include "test_precomp.hpp" #include "test_precomp.hpp"
#include "npy_blob.hpp"
#include <opencv2/core/ocl.hpp> #include <opencv2/core/ocl.hpp>
#include <opencv2/core/opencl/ocl_defs.hpp> #include <opencv2/core/opencl/ocl_defs.hpp>
#include <opencv2/dnn/layer.details.hpp> // CV_DNN_REGISTER_LAYER_CLASS #include <opencv2/dnn/layer.details.hpp> // CV_DNN_REGISTER_LAYER_CLASS
@ -871,6 +872,35 @@ TEST_P(Test_Model_Optimizer, flexible_inputs)
normAssert(ref, out, 0, 0); normAssert(ref, out, 0, 0);
} }
TEST_P(Test_Model_Optimizer, readONNX)
{
const Backend backendId = get<0>(GetParam());
const Target targetId = get<1>(GetParam());
ASSERT_EQ(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, backendId);
const std::string& model = findDataFile("dnn/onnx/models/convolution.onnx");
std::vector<Net> nets = {
// Old API
readNetFromModelOptimizer(model, ""),
readNet("", model, "dldt"),
// New API
readNetFromModelOptimizer(model),
readNet(model, "", "openvino")
};
Mat inp = blobFromNPY(findDataFile("dnn/onnx/data/input_convolution.npy"));
Mat ref = blobFromNPY(findDataFile("dnn/onnx/data/output_convolution.npy"));
for (int i = 0; i < nets.size(); ++i) {
nets[i].setPreferableTarget(targetId);
nets[i].setInput(inp);
Mat out = nets[i].forward();
normAssert(out, ref, format("Index: %d", i).c_str());
}
}
INSTANTIATE_TEST_CASE_P(/**/, Test_Model_Optimizer, INSTANTIATE_TEST_CASE_P(/**/, Test_Model_Optimizer,
dnnBackendsAndTargetsIE() dnnBackendsAndTargetsIE()
); );