From eb7b45d26b6cb858424f570e47ed78b916c8d906 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Wed, 12 Jan 2022 03:46:13 +0000 Subject: [PATCH] dnn: fix API - explicit ctors, const methods --- modules/dnn/include/opencv2/dnn/dict.hpp | 14 +- modules/dnn/include/opencv2/dnn/dnn.hpp | 33 ++-- modules/dnn/misc/objc/gen_dict.json | 6 +- modules/dnn/src/dnn.cpp | 165 +++++++++++--------- modules/dnn/src/dnn_common.hpp | 4 +- modules/dnn/src/layers/recurrent_layers.cpp | 2 +- 6 files changed, 126 insertions(+), 98 deletions(-) diff --git a/modules/dnn/include/opencv2/dnn/dict.hpp b/modules/dnn/include/opencv2/dnn/dict.hpp index 463d314bee..059ce9b28e 100644 --- a/modules/dnn/include/opencv2/dnn/dict.hpp +++ b/modules/dnn/include/opencv2/dnn/dict.hpp @@ -60,13 +60,13 @@ CV__DNN_INLINE_NS_BEGIN struct CV_EXPORTS_W DictValue { DictValue(const DictValue &r); - DictValue(bool i) : type(Param::INT), pi(new AutoBuffer) { (*pi)[0] = i ? 1 : 0; } //!< Constructs integer scalar - DictValue(int64 i = 0) : type(Param::INT), pi(new AutoBuffer) { (*pi)[0] = i; } //!< Constructs integer scalar - CV_WRAP DictValue(int i) : type(Param::INT), pi(new AutoBuffer) { (*pi)[0] = i; } //!< Constructs integer scalar - DictValue(unsigned p) : type(Param::INT), pi(new AutoBuffer) { (*pi)[0] = p; } //!< Constructs integer scalar - CV_WRAP DictValue(double p) : type(Param::REAL), pd(new AutoBuffer) { (*pd)[0] = p; } //!< Constructs floating point scalar - CV_WRAP DictValue(const String &s) : type(Param::STRING), ps(new AutoBuffer) { (*ps)[0] = s; } //!< Constructs string scalar - DictValue(const char *s) : type(Param::STRING), ps(new AutoBuffer) { (*ps)[0] = s; } //!< @overload + explicit DictValue(bool i) : type(Param::INT), pi(new AutoBuffer) { (*pi)[0] = i ? 1 : 0; } //!< Constructs integer scalar + explicit DictValue(int64 i = 0) : type(Param::INT), pi(new AutoBuffer) { (*pi)[0] = i; } //!< Constructs integer scalar + CV_WRAP explicit DictValue(int i) : type(Param::INT), pi(new AutoBuffer) { (*pi)[0] = i; } //!< Constructs integer scalar + explicit DictValue(unsigned p) : type(Param::INT), pi(new AutoBuffer) { (*pi)[0] = p; } //!< Constructs integer scalar + CV_WRAP explicit DictValue(double p) : type(Param::REAL), pd(new AutoBuffer) { (*pd)[0] = p; } //!< Constructs floating point scalar + CV_WRAP explicit DictValue(const String &s) : type(Param::STRING), ps(new AutoBuffer) { (*ps)[0] = s; } //!< Constructs string scalar + explicit DictValue(const char *s) : type(Param::STRING), ps(new AutoBuffer) { (*ps)[0] = s; } //!< @overload template static DictValue arrayInt(TypeIter begin, int size); //!< Constructs integer array diff --git a/modules/dnn/include/opencv2/dnn/dnn.hpp b/modules/dnn/include/opencv2/dnn/dnn.hpp index d6b29cfcf3..8b2cba9ce1 100644 --- a/modules/dnn/include/opencv2/dnn/dnn.hpp +++ b/modules/dnn/include/opencv2/dnn/dnn.hpp @@ -134,7 +134,7 @@ CV__DNN_INLINE_NS_BEGIN class BackendNode { public: - BackendNode(int backendId); + explicit BackendNode(int backendId); virtual ~BackendNode(); //!< Virtual destructor to make polymorphism. @@ -277,18 +277,18 @@ CV__DNN_INLINE_NS_BEGIN * Each layer input and output can be labeled to easily identify them using "%[.output_name]" notation. * This method maps label of input blob to its index into input vector. */ - virtual int inputNameToIndex(String inputName); + virtual int inputNameToIndex(String inputName); // FIXIT const /** @brief Returns index of output blob in output array. * @see inputNameToIndex() */ - CV_WRAP virtual int outputNameToIndex(const String& outputName); + CV_WRAP virtual int outputNameToIndex(const String& outputName); // FIXIT const /** * @brief Ask layer if it support specific backend for doing computations. * @param[in] backendId computation backend identifier. * @see Backend */ - virtual bool supportBackend(int backendId); + virtual bool supportBackend(int backendId); // FIXIT const /** * @brief Returns Halide backend node. @@ -495,18 +495,29 @@ CV__DNN_INLINE_NS_BEGIN /** @brief Converts string name of the layer to the integer identifier. * @returns id of the layer, or -1 if the layer wasn't found. */ - CV_WRAP int getLayerId(const String &layer); + CV_WRAP int getLayerId(const String &layer) const; CV_WRAP std::vector getLayerNames() const; - /** @brief Container for strings and integers. */ + /** @brief Container for strings and integers. + * + * @deprecated Use getLayerId() with int result. + */ typedef DictValue LayerId; /** @brief Returns pointer to layer with specified id or name which the network use. */ - CV_WRAP Ptr getLayer(LayerId layerId); + CV_WRAP Ptr getLayer(int layerId) const; + /** @overload + * @deprecated Use int getLayerId(const String &layer) + */ + CV_WRAP inline Ptr getLayer(const String& layerName) const { return getLayer(getLayerId(layerName)); } + /** @overload + * @deprecated to be removed + */ + CV_WRAP Ptr getLayer(const LayerId& layerId) const; /** @brief Returns pointers to input layers of specific layer. */ - std::vector > getLayerInputs(LayerId layerId); // FIXIT: CV_WRAP + std::vector > getLayerInputs(int layerId) const; // FIXIT: CV_WRAP /** @brief Connects output of the first layer to input of the second layer. * @param outPin descriptor of the first layer output. @@ -662,14 +673,16 @@ CV__DNN_INLINE_NS_BEGIN * @note If shape of the new blob differs from the previous shape, * then the following forward pass may fail. */ - CV_WRAP void setParam(LayerId layer, int numParam, const Mat &blob); + CV_WRAP void setParam(int layer, int numParam, const Mat &blob); + CV_WRAP inline void setParam(const String& layerName, int numParam, const Mat &blob) { return setParam(getLayerId(layerName), numParam, blob); } /** @brief Returns parameter blob of the layer. * @param layer name or id of the layer. * @param numParam index of the layer parameter in the Layer::blobs array. * @see Layer::blobs */ - CV_WRAP Mat getParam(LayerId layer, int numParam = 0); + CV_WRAP Mat getParam(int layer, int numParam = 0) const; + CV_WRAP inline Mat getParam(const String& layerName, int numParam = 0) const { return getParam(getLayerId(layerName), numParam); } /** @brief Returns indexes of layers with unconnected outputs. */ diff --git a/modules/dnn/misc/objc/gen_dict.json b/modules/dnn/misc/objc/gen_dict.json index e6d561fba0..6072bdfc01 100644 --- a/modules/dnn/misc/objc/gen_dict.json +++ b/modules/dnn/misc/objc/gen_dict.json @@ -18,8 +18,12 @@ "(long)getFLOPS:(NSArray*)netInputShapes" : { "getFLOPS" : {"name" : "getFLOPSWithNetInputShapes"} }, "(long)getFLOPS:(int)layerId netInputShape:(IntVector*)netInputShape" : { "getFLOPS" : {"name" : "getFLOPSWithLayerId"} }, "(long)getFLOPS:(int)layerId netInputShapes:(NSArray*)netInputShapes" : { "getFLOPS" : {"name" : "getFLOPSWithLayerId"} }, + "(Layer*)getLayer:(NSString*)layerName" : { "getLayer" : {"name" : "getLayerByName"} }, + "(Layer*)getLayer:(DictValue*)layerId" : { "getLayer" : {"name" : "getLayerByDictValue"} }, "(void)getLayersShapes:(IntVector*)netInputShape layersIds:(IntVector*)layersIds inLayersShapes:(NSMutableArray*>*)inLayersShapes outLayersShapes:(NSMutableArray*>*)outLayersShapes" : { "getLayersShapes" : {"name" : "getLayersShapesWithNetInputShape"} }, - "(void)getLayersShapes:(NSArray*)netInputShapes layersIds:(IntVector*)layersIds inLayersShapes:(NSMutableArray*>*)inLayersShapes outLayersShapes:(NSMutableArray*>*)outLayersShapes" : { "getLayersShapes" : {"name" : "getLayersShapesWithNetInputShapes"} } + "(void)getLayersShapes:(NSArray*)netInputShapes layersIds:(IntVector*)layersIds inLayersShapes:(NSMutableArray*>*)inLayersShapes outLayersShapes:(NSMutableArray*>*)outLayersShapes" : { "getLayersShapes" : {"name" : "getLayersShapesWithNetInputShapes"} }, + "(Mat*)getParam:(NSString*)layerName numParam:(int)numParam" : { "getParam" : {"name" : "getParamByName"} }, + "(void)setParam:(NSString*)layerName numParam:(int)numParam blob:(Mat*)blob" : { "setParam" : {"name" : "setParamByName"} } } }, "type_dict": { diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp index 67312dba78..ab2442c952 100644 --- a/modules/dnn/src/dnn.cpp +++ b/modules/dnn/src/dnn.cpp @@ -895,11 +895,11 @@ public: // layer blob. int numReferences(const LayerPin& lp) { - std::map::iterator mapIt = reuseMap.find(lp); + std::map::const_iterator mapIt = reuseMap.find(lp); CV_Assert(mapIt != reuseMap.end()); LayerPin memHost = mapIt->second; - std::map::iterator refIt = refCounter.find(memHost); + std::map::const_iterator refIt = refCounter.find(memHost); CV_Assert(refIt != refCounter.end()); return refIt->second; } @@ -927,7 +927,7 @@ public: // Decrease references counter to allocated memory inside specific blob. void releaseReference(const LayerPin& lp) { - std::map::iterator mapIt = reuseMap.find(lp); + std::map::const_iterator mapIt = reuseMap.find(lp); CV_Assert(mapIt != reuseMap.end()); std::map::iterator refIt = refCounter.find(mapIt->second); @@ -951,8 +951,8 @@ public: Mat bestBlob; LayerPin bestBlobPin; - std::map::iterator hostIt; - std::map::iterator refIt; + std::map::const_iterator hostIt; + std::map::const_iterator refIt; const int targetTotal = total(shape); int bestBlobTotal = INT_MAX; @@ -964,7 +964,7 @@ public: // it might be used as output. if (refIt != refCounter.end() && refIt->second == 0) { - Mat& unusedBlob = hostIt->second; + const Mat& unusedBlob = hostIt->second; if (unusedBlob.total() >= targetTotal && unusedBlob.total() < bestBlobTotal && unusedBlob.type() == dtype) @@ -1177,7 +1177,7 @@ detail::NetImplBase::NetImplBase() // nothing } -std::string detail::NetImplBase::getDumpFileNameBase() +std::string detail::NetImplBase::getDumpFileNameBase() const { std::string dumpFileNameBase = cv::format("ocv_dnn_net_%05d_%02d", networkId, networkDumpCounter++); return dumpFileNameBase; @@ -1230,7 +1230,6 @@ struct Net::Impl : public detail::NetImplBase bool fusion; bool isAsync; std::vector layersTimings; - Mat output_blob; #ifdef HAVE_CUDA struct CudaInfo_t @@ -1329,7 +1328,7 @@ struct Net::Impl : public detail::NetImplBase std::vector< std::reference_wrapper > compileList; compileList.reserve(64); for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); ++it) { - LayerData &ld = it->second; + LayerData& ld = it->second; Ptr layer = ld.layerInstance; if (layer->supportBackend(DNN_BACKEND_HALIDE) && !ld.skip) { @@ -1522,19 +1521,19 @@ struct Net::Impl : public detail::NetImplBase } } - int getLayerId(const String &layerName) + int getLayerId(const String &layerName) const { - std::map::iterator it = layerNameToId.find(layerName); + std::map::const_iterator it = layerNameToId.find(layerName); return (it != layerNameToId.end()) ? it->second : -1; } - int getLayerId(int id) + int getLayerId(int id) const { - MapIdToLayerData::iterator it = layers.find(id); + MapIdToLayerData::const_iterator it = layers.find(id); return (it != layers.end()) ? id : -1; } - int getLayerId(DictValue &layerDesc) + int getLayerId(DictValue &layerDesc) const { if (layerDesc.isInt()) return getLayerId(layerDesc.get()); @@ -1545,23 +1544,23 @@ struct Net::Impl : public detail::NetImplBase return -1; } - String getLayerName(int id) + String getLayerName(int id) const { - MapIdToLayerData::iterator it = layers.find(id); + MapIdToLayerData::const_iterator it = layers.find(id); return (it != layers.end()) ? it->second.name : "(unknown layer)"; } - LayerData& getLayerData(int id) + LayerData& getLayerData(int id) const { - MapIdToLayerData::iterator it = layers.find(id); + MapIdToLayerData::const_iterator it = layers.find(id); if (it == layers.end()) CV_Error(Error::StsObjectNotFound, format("Layer with requested id=%d not found", id)); - return it->second; + return const_cast(it->second); } - LayerData& getLayerData(const String &layerName) + LayerData& getLayerData(const String &layerName) const { int id = getLayerId(layerName); @@ -1571,7 +1570,7 @@ struct Net::Impl : public detail::NetImplBase return getLayerData(id); } - LayerData& getLayerData(const DictValue &layerDesc) + LayerData& getLayerData(const DictValue &layerDesc) const { CV_Assert(layerDesc.isInt() || layerDesc.isString()); if (layerDesc.isInt()) @@ -1597,14 +1596,14 @@ struct Net::Impl : public detail::NetImplBase ld.inputBlobsId[inNum] = from; } - int resolvePinOutputName(LayerData &ld, const String &outName) + int resolvePinOutputName(LayerData &ld, const String &outName) const { if (outName.empty()) return 0; return ld.getLayerInstance()->outputNameToIndex(outName); } - LayerPin getPinByAlias(const String &layerName) + LayerPin getPinByAlias(const String &layerName) const { LayerPin pin; pin.lid = (layerName.empty()) ? 0 : getLayerId(layerName); @@ -1615,13 +1614,17 @@ struct Net::Impl : public detail::NetImplBase return pin; } - std::vector getLayerOutPins(const String &layerName) + std::vector getLayerOutPins(const String &layerName) const { int lid = (layerName.empty()) ? 0 : getLayerId(layerName); - std::vector pins; + MapIdToLayerData::const_iterator it = layers.find(lid); + if (it == layers.end()) + CV_Error_(Error::StsOutOfRange, ("Layer #%d is not valid", lid)); + const size_t nOutputs = it->second.outputBlobs.size(); - for (int i = 0; i < layers[lid].outputBlobs.size(); i++) + std::vector pins; + for (int i = 0; i < nOutputs; i++) { pins.push_back(LayerPin(lid, i)); } @@ -2087,12 +2090,11 @@ struct Net::Impl : public detail::NetImplBase CV_TRACE_FUNCTION(); CV_Assert_N(preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, haveInfEngine()); - MapIdToLayerData::iterator it; Ptr net; - for (it = layers.begin(); it != layers.end(); ++it) + for (MapIdToLayerData::const_iterator it = layers.begin(); it != layers.end(); ++it) { - LayerData &ld = it->second; + const LayerData& ld = it->second; if (ld.id == 0) { CV_Assert((netInputLayer->outNames.empty() && ld.outputBlobsWrappers.size() == 1) || @@ -2128,9 +2130,9 @@ struct Net::Impl : public detail::NetImplBase InfEngineNgraphNet& ienet = *ieNode->net; ienet.reset(); - for (it = layers.begin(); it != layers.end(); ++it) + for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); ++it) { - LayerData &ld = it->second; + LayerData& ld = it->second; if (ld.id == 0) { for (int i = 0; i < ld.inputBlobsWrappers.size(); ++i) @@ -2172,9 +2174,9 @@ struct Net::Impl : public detail::NetImplBase // Build Inference Engine networks from sets of layers that support this // backend. Split a whole model on several Inference Engine networks if // some of layers are not implemented. - for (it = layers.begin(); it != layers.end(); ++it) + for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); ++it) { - LayerData &ld = it->second; + LayerData& ld = it->second; if (ld.id == 0 && ld.skip) continue; @@ -2430,10 +2432,9 @@ struct Net::Impl : public detail::NetImplBase CV_TRACE_FUNCTION(); CV_Assert_N(preferableBackend == DNN_BACKEND_WEBNN, haveWebnn()); - MapIdToLayerData::iterator it; Ptr net; - for (it = layers.begin(); it != layers.end(); ++it) + for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); ++it) { LayerData &ld = it->second; if (ld.id == 0) @@ -2462,7 +2463,7 @@ struct Net::Impl : public detail::NetImplBase // Build WebNN networks from sets of layers that support this // backend. Split a whole model on several WebNN networks if // some of layers are not implemented. - for (it = layers.begin(); it != layers.end(); ++it) + for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); ++it) { LayerData &ld = it->second; @@ -2662,8 +2663,7 @@ struct Net::Impl : public detail::NetImplBase if (!haveVulkan()) return; - MapIdToLayerData::iterator it = layers.begin(); - for (; it != layers.end(); it++) + for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); it++) { LayerData &ld = it->second; Ptr layer = ld.layerInstance; @@ -2812,7 +2812,7 @@ struct Net::Impl : public detail::NetImplBase ld.inputLayersId.insert(ld.inputBlobsId[i].lid); //allocate parents - for (set::iterator i = ld.inputLayersId.begin(); i != ld.inputLayersId.end(); i++) + for (set::const_iterator i = ld.inputLayersId.begin(); i != ld.inputLayersId.end(); i++) allocateLayer(*i, layersShapes); //bind inputs @@ -2902,8 +2902,7 @@ struct Net::Impl : public detail::NetImplBase // we try to embed this activation into the convolution and disable separate execution of the activation std::set pinsToKeep(blobsToKeep_.begin(), blobsToKeep_.end()); - MapIdToLayerData::iterator it; - for (it = layers.begin(); it != layers.end(); it++) + for (MapIdToLayerData::const_iterator it = layers.begin(); it != layers.end(); it++) { int lid = it->first; LayerData& ld = layers[lid]; @@ -3450,8 +3449,7 @@ struct Net::Impl : public detail::NetImplBase { CV_TRACE_FUNCTION(); - MapIdToLayerData::iterator it; - for (it = layers.begin(); it != layers.end(); it++) + for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); it++) it->second.flag = 0; CV_Assert(!layers[0].outputBlobs.empty()); @@ -3485,7 +3483,7 @@ struct Net::Impl : public detail::NetImplBase // Fake references to input blobs. for (int i = 0; i < layers[0].outputBlobs.size(); ++i) blobManager.addReference(LayerPin(0, i)); - for (it = layers.begin(); it != layers.end(); ++it) + for (MapIdToLayerData::const_iterator it = layers.begin(); it != layers.end(); ++it) { const LayerData& ld = it->second; blobManager.addReferences(ld.inputBlobsId); @@ -3496,7 +3494,7 @@ struct Net::Impl : public detail::NetImplBase blobManager.addReference(blobsToKeep_[i]); } - for (it = layers.begin(); it != layers.end(); it++) + for (MapIdToLayerData::const_iterator it = layers.begin(); it != layers.end(); it++) { int lid = it->first; allocateLayer(lid, layersShapes); @@ -3517,7 +3515,11 @@ struct Net::Impl : public detail::NetImplBase TickMeter tm; tm.start(); +#ifndef HAVE_VULKAN + std::map >::const_iterator it = ld.backendNodes.find(preferableBackend); +#else std::map >::iterator it = ld.backendNodes.find(preferableBackend); +#endif if (preferableBackend == DNN_BACKEND_OPENCV || it == ld.backendNodes.end() || it->second.empty()) { if (isAsync) @@ -3711,6 +3713,7 @@ struct Net::Impl : public detail::NetImplBase { forwardWebnn(ld.outputBlobsWrappers, node, isAsync); } +#ifdef HAVE_VULKAN else if (preferableBackend == DNN_BACKEND_VKCOM) { try @@ -3724,6 +3727,7 @@ struct Net::Impl : public detail::NetImplBase forwardLayer(ld); } } +#endif else { CV_Error(Error::StsNotImplemented, "Unknown backend identifier"); @@ -3748,8 +3752,7 @@ struct Net::Impl : public detail::NetImplBase if (clearFlags) { - MapIdToLayerData::iterator it; - for (it = layers.begin(); it != layers.end(); it++) + for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); it++) it->second.flag = 0; } @@ -3758,8 +3761,7 @@ struct Net::Impl : public detail::NetImplBase return; //forward parents - MapIdToLayerData::iterator it; - for (it = layers.begin(); it != layers.end() && (it->second.id < ld.id); ++it) + for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end() && (it->second.id < ld.id); ++it) { LayerData &ld = it->second; if (ld.flag) @@ -3845,7 +3847,7 @@ struct Net::Impl : public detail::NetImplBase for(int i = 0; i < inputLayerIds.size(); i++) { int layerId = inputLayerIds[i].lid; - LayersShapesMap::iterator it = + LayersShapesMap::const_iterator it = inOutShapes.find(layerId); if(it == inOutShapes.end() || it->second.out.empty()) @@ -3928,7 +3930,7 @@ struct Net::Impl : public detail::NetImplBase inOutShapes.clear(); inOutShapes[0].in = netInputShapes; //insert shape for first input layer - for (MapIdToLayerData::iterator it = layers.begin(); + for (MapIdToLayerData::const_iterator it = layers.begin(); it != layers.end(); it++) { getLayerShapesRecursively(it->first, inOutShapes); @@ -3969,12 +3971,11 @@ struct Net::Impl : public detail::NetImplBase CV_LOG_DEBUG(NULL, toString(inputShapes, "Network input shapes")); LayersShapesMap layersShapes; layersShapes[0].in = inputShapes; - for (MapIdToLayerData::iterator it = layers.begin(); - it != layers.end(); it++) + for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); it++) { int layerId = it->first; LayerData& layerData = it->second; - std::vector& inputLayerIds = layerData.inputBlobsId; + const std::vector& inputLayerIds = layerData.inputBlobsId; LayerShapes& layerShapes = layersShapes[layerId]; CV_LOG_DEBUG(NULL, "layer " << layerId << ": [" << layerData.type << "]:(" << layerData.name << ") with inputs.size=" << inputLayerIds.size()); if (layerShapes.in.empty()) @@ -3984,7 +3985,7 @@ struct Net::Impl : public detail::NetImplBase const LayerPin& inputPin = inputLayerIds[i]; int inputLayerId = inputPin.lid; CV_LOG_DEBUG(NULL, " input[" << i << "] " << inputLayerId << ":" << inputPin.oid << " as [" << layers[inputLayerId].type << "]:(" << layers[inputLayerId].name << ")"); - LayersShapesMap::iterator inputIt = layersShapes.find(inputLayerId); + LayersShapesMap::const_iterator inputIt = layersShapes.find(inputLayerId); if (inputIt == layersShapes.end() || inputIt->second.out.empty()) { getLayerShapesRecursively(inputLayerId, layersShapes); @@ -4001,19 +4002,23 @@ struct Net::Impl : public detail::NetImplBase CV_LOG_DEBUG(NULL, "updateLayersShapes() - DONE"); } - LayerPin getLatestLayerPin(const std::vector& pins) + LayerPin getLatestLayerPin(const std::vector& pins) const { return *std::max_element(pins.begin(), pins.end()); } - Mat getBlob(const LayerPin& pin) + Mat getBlob(const LayerPin& pin) const { CV_TRACE_FUNCTION(); if (!pin.valid()) CV_Error(Error::StsObjectNotFound, "Requested blob not found"); - LayerData &ld = layers[pin.lid]; + MapIdToLayerData::const_iterator it = layers.find(pin.lid); + if (it == layers.end()) + CV_Error_(Error::StsOutOfRange, ("Layer #%d is not valid (output #%d requested)", pin.lid, pin.oid)); + + const LayerData &ld = it->second; if ((size_t)pin.oid >= ld.outputBlobs.size()) { CV_Error(Error::StsOutOfRange, format("Layer \"%s\" produce only %zu outputs, " @@ -4029,6 +4034,7 @@ struct Net::Impl : public detail::NetImplBase if (ld.outputBlobs[pin.oid].depth() == CV_16S) { + Mat output_blob; convertFp16(ld.outputBlobs[pin.oid], output_blob); return output_blob; } @@ -4036,7 +4042,7 @@ struct Net::Impl : public detail::NetImplBase return ld.outputBlobs[pin.oid]; } - Mat getBlob(String outputName) + Mat getBlob(String outputName) const { return getBlob(getPinByAlias(outputName)); } @@ -4096,9 +4102,9 @@ struct Net::Impl : public detail::NetImplBase Net createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNet); #endif - string dump(); + string dump() const; - void dumpNetworkToFile() + void dumpNetworkToFile() const { #ifndef OPENCV_DNN_DISABLE_NETWORK_AUTO_DUMP string dumpFileNameBase = getDumpFileNameBase(); @@ -5059,7 +5065,7 @@ void Net::setInput(InputArray blob, const String& name, double scalefactor, cons impl->netWasAllocated = impl->netWasAllocated && oldShape; } -Mat Net::getParam(LayerId layer, int numParam) +Mat Net::getParam(int layer, int numParam) const { LayerData &ld = impl->getLayerData(layer); std::vector &layerBlobs = ld.getLayerInstance()->blobs; @@ -5067,7 +5073,7 @@ Mat Net::getParam(LayerId layer, int numParam) return layerBlobs[numParam]; } -void Net::setParam(LayerId layer, int numParam, const Mat &blob) +void Net::setParam(int layer, int numParam, const Mat &blob) { LayerData &ld = impl->getLayerData(layer); @@ -5077,7 +5083,7 @@ void Net::setParam(LayerId layer, int numParam, const Mat &blob) layerBlobs[numParam] = blob; } -int Net::getLayerId(const String &layer) +int Net::getLayerId(const String &layer) const { return impl->getLayerId(layer); } @@ -5120,7 +5126,7 @@ String Net::dump() return impl->dump(); } -string Net::Impl::dump() +string Net::Impl::dump() const { bool hasInput = !netInputLayer->inputsData.empty(); @@ -5388,13 +5394,18 @@ void Net::dumpToFile(const String& path) { file.close(); } -Ptr Net::getLayer(LayerId layerId) +Ptr Net::getLayer(int layerId) const +{ + LayerData &ld = impl->getLayerData(layerId); + return ld.getLayerInstance(); +} +Ptr Net::getLayer(const LayerId& layerId) const { LayerData &ld = impl->getLayerData(layerId); return ld.getLayerInstance(); } -std::vector > Net::getLayerInputs(LayerId layerId) +std::vector > Net::getLayerInputs(int layerId) const { LayerData &ld = impl->getLayerData(layerId); @@ -5413,7 +5424,7 @@ std::vector Net::getLayerNames() const std::vector res; res.reserve(impl->layers.size()); - Impl::MapIdToLayerData::iterator it; + Impl::MapIdToLayerData::const_iterator it; for (it = impl->layers.begin(); it != impl->layers.end(); it++) { if (it->second.id) //skip Data layer @@ -5432,11 +5443,11 @@ std::vector Net::getUnconnectedOutLayers() const { std::vector layersIds; - Impl::MapIdToLayerData::iterator it; + Impl::MapIdToLayerData::const_iterator it; for (it = impl->layers.begin(); it != impl->layers.end(); it++) { int lid = it->first; - LayerData &ld = it->second; + const LayerData &ld = it->second; if (ld.requiredOutputs.size() == 0) layersIds.push_back(lid); @@ -5536,13 +5547,13 @@ int64 Net::getFLOPS(const MatShape& netInputShape) const int64 Net::getFLOPS(const int layerId, const std::vector& netInputShapes) const { - Impl::MapIdToLayerData::iterator layer = impl->layers.find(layerId); + Impl::MapIdToLayerData::const_iterator layer = impl->layers.find(layerId); CV_Assert(layer != impl->layers.end()); LayerShapes shapes; impl->getLayerShapes(netInputShapes, layerId, shapes); - return layer->second.getLayerInstance()->getFLOPS(shapes.in, shapes.out); + return const_cast(layer->second).getLayerInstance()->getFLOPS(shapes.in, shapes.out); } int64 Net::getFLOPS(const int layerId, @@ -5556,7 +5567,7 @@ void Net::getLayerTypes(std::vector& layersTypes) const layersTypes.clear(); std::map layers; - for (Impl::MapIdToLayerData::iterator it = impl->layers.begin(); + for (Impl::MapIdToLayerData::const_iterator it = impl->layers.begin(); it != impl->layers.end(); it++) { if (layers.find(it->second.type) == layers.end()) @@ -5564,7 +5575,7 @@ void Net::getLayerTypes(std::vector& layersTypes) const layers[it->second.type]++; } - for (std::map::iterator it = layers.begin(); + for (std::map::const_iterator it = layers.begin(); it != layers.end(); it++) { layersTypes.push_back(it->first); @@ -5574,7 +5585,7 @@ void Net::getLayerTypes(std::vector& layersTypes) const int Net::getLayersCount(const String& layerType) const { int count = 0; - for (Impl::MapIdToLayerData::iterator it = impl->layers.begin(); + for (Impl::MapIdToLayerData::const_iterator it = impl->layers.begin(); it != impl->layers.end(); it++) { if (it->second.type == layerType) @@ -5589,7 +5600,7 @@ void Net::getMemoryConsumption(const int layerId, { CV_TRACE_FUNCTION(); - Impl::MapIdToLayerData::iterator layer = impl->layers.find(layerId); + Impl::MapIdToLayerData::const_iterator layer = impl->layers.find(layerId); CV_Assert(layer != impl->layers.end()); weights = blobs = 0; @@ -5658,7 +5669,7 @@ void Net::getMemoryConsumption(const std::vector& netInputShapes, for(int i = 0; i < layerIds.size(); i++) { int w = 0, b = 0; - Impl::MapIdToLayerData::iterator layer = impl->layers.find(layerIds[i]); + Impl::MapIdToLayerData::const_iterator layer = impl->layers.find(layerIds[i]); CV_Assert(layer != impl->layers.end()); for(int j = 0; j < layer->second.params.blobs.size(); j++) diff --git a/modules/dnn/src/dnn_common.hpp b/modules/dnn/src/dnn_common.hpp index ffeb3bfda1..c7108e5143 100644 --- a/modules/dnn/src/dnn_common.hpp +++ b/modules/dnn/src/dnn_common.hpp @@ -71,12 +71,12 @@ private: struct NetImplBase { const int networkId; // network global identifier - int networkDumpCounter; // dump counter + mutable int networkDumpCounter; // dump counter int dumpLevel; // level of information dumps (initialized through OPENCV_DNN_NETWORK_DUMP parameter) NetImplBase(); - std::string getDumpFileNameBase(); + std::string getDumpFileNameBase() const; }; } // namespace detail diff --git a/modules/dnn/src/layers/recurrent_layers.cpp b/modules/dnn/src/layers/recurrent_layers.cpp index 14210becb4..dbb3e2700a 100644 --- a/modules/dnn/src/layers/recurrent_layers.cpp +++ b/modules/dnn/src/layers/recurrent_layers.cpp @@ -184,7 +184,7 @@ public: CV_Assert(!reverse || !bidirectional); // read activations - DictValue activations = params.get("activations", ""); + DictValue activations = params.get("activations", DictValue(String())); if (activations.size() == 1) // if activations wasn't specified use default { f_activation = sigmoid;