From 74d0b4cc780a1c82b1c512b3a303357cb329d73a Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Wed, 30 Nov 2022 01:26:09 +0000 Subject: [PATCH] dnn(openvino): fix custom layers BlockingDesc --- modules/dnn/src/ie_ngraph.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/modules/dnn/src/ie_ngraph.cpp b/modules/dnn/src/ie_ngraph.cpp index 235fa7dcbb..aee3e294e5 100644 --- a/modules/dnn/src/ie_ngraph.cpp +++ b/modules/dnn/src/ie_ngraph.cpp @@ -204,14 +204,13 @@ public: std::vector outDataConfig; #if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2020_2) InferenceEngine::SizeVector order; - size_t offset = std::numeric_limits::max(); for (int i = 0; i < node->get_input_size(); ++i) { InferenceEngine::DataConfig conf; auto shape = node->input_value(i).get_shape(); order.resize(shape.size()); std::iota(order.begin(), order.end(), 0); - conf.desc = InferenceEngine::TensorDesc(InferenceEngine::Precision::FP32, shape, {shape, order, offset}); + conf.desc = InferenceEngine::TensorDesc(InferenceEngine::Precision::FP32, shape, {shape, order}); inDataConfig.push_back(conf); } @@ -221,7 +220,7 @@ public: auto shape = node->output(i).get_shape(); order.resize(shape.size()); std::iota(order.begin(), order.end(), 0); - conf.desc = InferenceEngine::TensorDesc(InferenceEngine::Precision::FP32, shape, {shape, order, offset}); + conf.desc = InferenceEngine::TensorDesc(InferenceEngine::Precision::FP32, shape, {shape, order}); outDataConfig.push_back(conf); } #else