From 8422dda2c7f5848e6355a5497fdbd9524292c18c Mon Sep 17 00:00:00 2001 From: Dmitry Kurtaev Date: Fri, 7 Dec 2018 13:38:05 +0300 Subject: [PATCH] Element-wise subtraction from TensorFlow --- modules/dnn/src/layers/eltwise_layer.cpp | 4 ++- modules/dnn/src/tensorflow/tf_importer.cpp | 40 +++++----------------- modules/dnn/test/test_tf_importer.cpp | 3 +- 3 files changed, 14 insertions(+), 33 deletions(-) diff --git a/modules/dnn/src/layers/eltwise_layer.cpp b/modules/dnn/src/layers/eltwise_layer.cpp index 03aaf8b3ec..c038eb19e0 100644 --- a/modules/dnn/src/layers/eltwise_layer.cpp +++ b/modules/dnn/src/layers/eltwise_layer.cpp @@ -98,7 +98,8 @@ public: { return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE || - (backendId == DNN_BACKEND_INFERENCE_ENGINE && (op != SUM || coeffs.empty())); + (backendId == DNN_BACKEND_INFERENCE_ENGINE && + (preferableTarget != DNN_TARGET_MYRIAD || coeffs.empty())); } bool getMemoryShapes(const std::vector &inputs, @@ -427,6 +428,7 @@ public: lp.type = "Eltwise"; lp.precision = InferenceEngine::Precision::FP32; std::shared_ptr ieLayer(new InferenceEngine::EltwiseLayer(lp)); + ieLayer->coeff = coeffs; if (op == SUM) ieLayer->_operation = InferenceEngine::EltwiseLayer::Sum; else if (op == PROD) diff --git a/modules/dnn/src/tensorflow/tf_importer.cpp b/modules/dnn/src/tensorflow/tf_importer.cpp index a465b7d363..6ce99d6610 100644 --- a/modules/dnn/src/tensorflow/tf_importer.cpp +++ b/modules/dnn/src/tensorflow/tf_importer.cpp @@ -939,7 +939,7 @@ void TFImporter::populateNet(Net dstNet) if (getDataLayout(name, data_layouts) == DATA_LAYOUT_UNKNOWN) data_layouts[name] = DATA_LAYOUT_NHWC; } - else if (type == "BiasAdd" || type == "Add") + else if (type == "BiasAdd" || type == "Add" || type == "Sub") { bool haveConst = false; for(int ii = 0; !haveConst && ii < layer.input_size(); ++ii) @@ -953,6 +953,8 @@ void TFImporter::populateNet(Net dstNet) { Mat values = getTensorContent(getConstBlob(layer, value_id)); CV_Assert(values.type() == CV_32FC1); + if (type == "Sub") + values *= -1.0f; int id; if (values.total() == 1) // is a scalar. @@ -973,6 +975,12 @@ void TFImporter::populateNet(Net dstNet) else { layerParams.set("operation", "sum"); + if (type == "Sub") + { + static float subCoeffs[] = {1.f, -1.f}; + layerParams.set("coeff", DictValue::arrayReal(subCoeffs, 2)); + } + int id = dstNet.addLayer(name, "Eltwise", layerParams); layer_id[name] = id; @@ -985,36 +993,6 @@ void TFImporter::populateNet(Net dstNet) } } } - else if (type == "Sub") - { - bool haveConst = false; - for(int ii = 0; !haveConst && ii < layer.input_size(); ++ii) - { - Pin input = parsePin(layer.input(ii)); - haveConst = value_id.find(input.name) != value_id.end(); - } - CV_Assert(haveConst); - - Mat values = getTensorContent(getConstBlob(layer, value_id)); - CV_Assert(values.type() == CV_32FC1); - values *= -1.0f; - - int id; - if (values.total() == 1) // is a scalar. - { - layerParams.set("shift", values.at(0)); - id = dstNet.addLayer(name, "Power", layerParams); - } - else // is a vector - { - layerParams.blobs.resize(1, values); - id = dstNet.addLayer(name, "Shift", layerParams); - } - layer_id[name] = id; - - // one input only - connect(layer_id, dstNet, parsePin(layer.input(0)), id, 0); - } else if (type == "MatMul") { CV_Assert(layer.input_size() == 2); diff --git a/modules/dnn/test/test_tf_importer.cpp b/modules/dnn/test/test_tf_importer.cpp index 5f944776dc..7c53f8a93f 100644 --- a/modules/dnn/test/test_tf_importer.cpp +++ b/modules/dnn/test/test_tf_importer.cpp @@ -139,9 +139,10 @@ TEST_P(Test_TensorFlow_layers, padding) runTensorFlowNet("keras_pad_concat"); } -TEST_P(Test_TensorFlow_layers, eltwise_add_mul) +TEST_P(Test_TensorFlow_layers, eltwise) { runTensorFlowNet("eltwise_add_mul"); + runTensorFlowNet("eltwise_sub"); } TEST_P(Test_TensorFlow_layers, pad_and_concat)