Einsum OpenVINO backend

This commit is contained in:
Dmitry Kurtaev 2024-03-29 09:40:03 +03:00
parent f87e1efd2a
commit cfa42e4338
3 changed files with 26 additions and 3 deletions

View File

@ -5,6 +5,7 @@
#include <inttypes.h> #include <inttypes.h>
#include <opencv2/dnn/shape_utils.hpp> #include <opencv2/dnn/shape_utils.hpp>
#include "../precomp.hpp" #include "../precomp.hpp"
#include "../ie_ngraph.hpp"
#include "layers_common.hpp" #include "layers_common.hpp"
#include "cpu_kernels/fast_gemm.hpp" #include "cpu_kernels/fast_gemm.hpp"
@ -304,7 +305,7 @@ public:
MatShape einsumOutDims; // vector to store output dimentions MatShape einsumOutDims; // vector to store output dimentions
// These hold equation subring, left hand side and right it of // These hold equation subring, left hand side and right it of
String lhs_eq, rhs_eq; String lhs_eq, rhs_eq, equation;
// Holds token from left hand side of the equation // Holds token from left hand side of the equation
std::vector<String> lhs_eq_tokens; std::vector<String> lhs_eq_tokens;
@ -378,7 +379,7 @@ public:
LayerEinsumImpl(const LayerParams& params) LayerEinsumImpl(const LayerParams& params)
{ {
setParamsFrom(params); setParamsFrom(params);
String equation = params.get<String>("equation"); equation = params.get<String>("equation");
int outputSize = params.get<int>("outputSize"); int outputSize = params.get<int>("outputSize");
numInputs = params.get<int>("inputSize"); numInputs = params.get<int>("inputSize");
@ -423,6 +424,11 @@ public:
calculateOutputShape(); calculateOutputShape();
} }
virtual bool supportBackend(int backendId) CV_OVERRIDE {
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
}
// getMeoryShapes // getMeoryShapes
bool getMemoryShapes(const std::vector<MatShape> &inputs, bool getMemoryShapes(const std::vector<MatShape> &inputs,
const int requiredOutputs, const int requiredOutputs,
@ -553,6 +559,19 @@ public:
result = result.reshape(1, einsumOutDims.size(), einsumOutDims.data()); result = result.reshape(1, einsumOutDims.size(), einsumOutDims.data());
result.copyTo(outputs[0]); result.copyTo(outputs[0]);
} // forward } // forward
#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >&,
const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE {
ov::OutputVector inputs(nodes.size());
for (size_t i = 0; i < nodes.size(); ++i) {
inputs[i] = nodes[i].dynamicCast<InfEngineNgraphNode>()->node;
}
auto einsum = std::make_shared<ov::op::v7::Einsum>(inputs, equation);
return new InfEngineNgraphNode(einsum);
}
#endif // HAVE_DNN_NGRAPH
}; // EinsumClass }; // EinsumClass
Mat LayerEinsumImpl::reduceSum(Mat& src, MatShape& reduceAxis) Mat LayerEinsumImpl::reduceSum(Mat& src, MatShape& reduceAxis)

View File

@ -597,7 +597,7 @@ CASE(test_dynamicquantizelinear_min_adjusted_expanded)
CASE(test_edge_pad) CASE(test_edge_pad)
// no filter // no filter
CASE(test_einsum_batch_diagonal) CASE(test_einsum_batch_diagonal)
// no filter SKIP;
CASE(test_einsum_batch_matmul) CASE(test_einsum_batch_matmul)
// no filter // no filter
CASE(test_einsum_inner_prod) CASE(test_einsum_inner_prod)

View File

@ -1471,6 +1471,8 @@ TEST_P(Test_ONNX_layers, Einsum_2D)
TEST_P(Test_ONNX_layers, Einsum_2D_Ellipses) TEST_P(Test_ONNX_layers, Einsum_2D_Ellipses)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
testONNXModels("einsum_2d_ellipses", npy, 0, 0, false, false, 2); testONNXModels("einsum_2d_ellipses", npy, 0, 0, false, false, 2);
} }
@ -1501,6 +1503,8 @@ TEST_P(Test_ONNX_layers, DISABLED_Einsum_HadamardProduct)
TEST_P(Test_ONNX_layers, Einsum_Batch_Diagonal) TEST_P(Test_ONNX_layers, Einsum_Batch_Diagonal)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
testONNXModels("einsum_batch_diagonal", npy, 0, 0, false, false, 1); testONNXModels("einsum_batch_diagonal", npy, 0, 0, false, false, 1);
} }