diff --git a/modules/dnn/include/opencv2/dnn/all_layers.hpp b/modules/dnn/include/opencv2/dnn/all_layers.hpp index efbc8b131e..0c964df06b 100644 --- a/modules/dnn/include/opencv2/dnn/all_layers.hpp +++ b/modules/dnn/include/opencv2/dnn/all_layers.hpp @@ -250,7 +250,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN std::vector pads_begin, pads_end; CV_DEPRECATED_EXTERNAL Size kernel, stride, pad; CV_DEPRECATED_EXTERNAL int pad_l, pad_t, pad_r, pad_b; - CV_DEPRECATED_EXTERNAL bool globalPooling; //!< Flag is true if at least one of the axes is global pooled. + bool globalPooling; //!< Flag is true if at least one of the axes is global pooled. std::vector isGlobalPooling; bool computeMaxIdx; String padMode; diff --git a/modules/dnn/src/tensorflow/tf_importer.cpp b/modules/dnn/src/tensorflow/tf_importer.cpp index ef0588c9df..7dffb1c04f 100644 --- a/modules/dnn/src/tensorflow/tf_importer.cpp +++ b/modules/dnn/src/tensorflow/tf_importer.cpp @@ -2054,12 +2054,11 @@ void TFImporter::populateNet(Net dstNet) int num = (int)getLayerAttr(layer, "N").i(); CV_Assert(layer.input_size() == num); std::string base_name = name + "/reshape_"; - std::vector reshape_names; + std::vector reshape_ids; for (int i = 0; i < num; i++) { std::ostringstream ss; ss << i; std::string reshape_name = base_name + ss.str(); - reshape_names.push_back(reshape_name); LayerParams reshapeLP; reshapeLP.set("axis", dim); reshapeLP.set("num_axes", 1); @@ -2067,6 +2066,7 @@ void TFImporter::populateNet(Net dstNet) reshapeLP.set("dim", DictValue::arrayInt(&outShape[0], 2)); int id = dstNet.addLayer(reshape_name, "Reshape", reshapeLP); layer_id[reshape_name] = id; + reshape_ids.push_back(id); connect(layer_id, dstNet, parsePin(layer.input(i)), id, 0); } @@ -2075,7 +2075,7 @@ void TFImporter::populateNet(Net dstNet) layer_id[name] = id; for (int li = 0; li < num; li++) - connect(layer_id, dstNet, Pin(reshape_names[li]), id, li); + dstNet.connect(reshape_ids[li], 0, id, li); } else if (type == "ClipByValue") {