Merge pull request #21429 from alalek:dnn_api_explicit_const_3.4

This commit is contained in:
Alexander Alekhin 2022-01-21 20:04:58 +00:00
commit f811ba8777
5 changed files with 113 additions and 93 deletions

View File

@ -60,13 +60,13 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
struct CV_EXPORTS_W DictValue
{
DictValue(const DictValue &r);
DictValue(bool i) : type(Param::INT), pi(new AutoBuffer<int64,1>) { (*pi)[0] = i ? 1 : 0; } //!< Constructs integer scalar
DictValue(int64 i = 0) : type(Param::INT), pi(new AutoBuffer<int64,1>) { (*pi)[0] = i; } //!< Constructs integer scalar
CV_WRAP DictValue(int i) : type(Param::INT), pi(new AutoBuffer<int64,1>) { (*pi)[0] = i; } //!< Constructs integer scalar
DictValue(unsigned p) : type(Param::INT), pi(new AutoBuffer<int64,1>) { (*pi)[0] = p; } //!< Constructs integer scalar
CV_WRAP DictValue(double p) : type(Param::REAL), pd(new AutoBuffer<double,1>) { (*pd)[0] = p; } //!< Constructs floating point scalar
CV_WRAP DictValue(const String &s) : type(Param::STRING), ps(new AutoBuffer<String,1>) { (*ps)[0] = s; } //!< Constructs string scalar
DictValue(const char *s) : type(Param::STRING), ps(new AutoBuffer<String,1>) { (*ps)[0] = s; } //!< @overload
explicit DictValue(bool i) : type(Param::INT), pi(new AutoBuffer<int64,1>) { (*pi)[0] = i ? 1 : 0; } //!< Constructs integer scalar
explicit DictValue(int64 i = 0) : type(Param::INT), pi(new AutoBuffer<int64,1>) { (*pi)[0] = i; } //!< Constructs integer scalar
CV_WRAP explicit DictValue(int i) : type(Param::INT), pi(new AutoBuffer<int64,1>) { (*pi)[0] = i; } //!< Constructs integer scalar
explicit DictValue(unsigned p) : type(Param::INT), pi(new AutoBuffer<int64,1>) { (*pi)[0] = p; } //!< Constructs integer scalar
CV_WRAP explicit DictValue(double p) : type(Param::REAL), pd(new AutoBuffer<double,1>) { (*pd)[0] = p; } //!< Constructs floating point scalar
CV_WRAP explicit DictValue(const String &s) : type(Param::STRING), ps(new AutoBuffer<String,1>) { (*ps)[0] = s; } //!< Constructs string scalar
explicit DictValue(const char *s) : type(Param::STRING), ps(new AutoBuffer<String,1>) { (*ps)[0] = s; } //!< @overload
template<typename TypeIter>
static DictValue arrayInt(TypeIter begin, int size); //!< Constructs integer array

View File

@ -125,7 +125,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
class BackendNode
{
public:
BackendNode(int backendId);
explicit BackendNode(int backendId);
virtual ~BackendNode(); //!< Virtual destructor to make polymorphism.
@ -259,18 +259,18 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
* Each layer input and output can be labeled to easily identify them using "%<layer_name%>[.output_name]" notation.
* This method maps label of input blob to its index into input vector.
*/
virtual int inputNameToIndex(String inputName);
virtual int inputNameToIndex(String inputName); // FIXIT const
/** @brief Returns index of output blob in output array.
* @see inputNameToIndex()
*/
CV_WRAP virtual int outputNameToIndex(const String& outputName);
CV_WRAP virtual int outputNameToIndex(const String& outputName); // FIXIT const
/**
* @brief Ask layer if it support specific backend for doing computations.
* @param[in] backendId computation backend identifier.
* @see Backend
*/
virtual bool supportBackend(int backendId);
virtual bool supportBackend(int backendId); // FIXIT const
/**
* @brief Returns Halide backend node.
@ -442,18 +442,29 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
/** @brief Converts string name of the layer to the integer identifier.
* @returns id of the layer, or -1 if the layer wasn't found.
*/
CV_WRAP int getLayerId(const String &layer);
CV_WRAP int getLayerId(const String &layer) const;
CV_WRAP std::vector<String> getLayerNames() const;
/** @brief Container for strings and integers. */
/** @brief Container for strings and integers.
*
* @deprecated Use getLayerId() with int result.
*/
typedef DictValue LayerId;
/** @brief Returns pointer to layer with specified id or name which the network use. */
CV_WRAP Ptr<Layer> getLayer(LayerId layerId);
CV_WRAP Ptr<Layer> getLayer(int layerId) const;
/** @overload
* @deprecated Use int getLayerId(const String &layer)
*/
CV_WRAP inline Ptr<Layer> getLayer(const String& layerName) const { return getLayer(getLayerId(layerName)); }
/** @overload
* @deprecated to be removed
*/
CV_WRAP Ptr<Layer> getLayer(const LayerId& layerId) const;
/** @brief Returns pointers to input layers of specific layer. */
std::vector<Ptr<Layer> > getLayerInputs(LayerId layerId); // FIXIT: CV_WRAP
std::vector<Ptr<Layer> > getLayerInputs(int layerId) const; // FIXIT: CV_WRAP
/** @brief Connects output of the first layer to input of the second layer.
* @param outPin descriptor of the first layer output.
@ -587,14 +598,16 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
* @note If shape of the new blob differs from the previous shape,
* then the following forward pass may fail.
*/
CV_WRAP void setParam(LayerId layer, int numParam, const Mat &blob);
CV_WRAP void setParam(int layer, int numParam, const Mat &blob);
CV_WRAP inline void setParam(const String& layerName, int numParam, const Mat &blob) { return setParam(getLayerId(layerName), numParam, blob); }
/** @brief Returns parameter blob of the layer.
* @param layer name or id of the layer.
* @param numParam index of the layer parameter in the Layer::blobs array.
* @see Layer::blobs
*/
CV_WRAP Mat getParam(LayerId layer, int numParam = 0);
CV_WRAP Mat getParam(int layer, int numParam = 0) const;
CV_WRAP inline Mat getParam(const String& layerName, int numParam = 0) const { return getParam(getLayerId(layerName), numParam); }
/** @brief Returns indexes of layers with unconnected outputs.
*/

View File

@ -846,11 +846,11 @@ public:
// layer blob.
int numReferences(const LayerPin& lp)
{
std::map<LayerPin, LayerPin>::iterator mapIt = reuseMap.find(lp);
std::map<LayerPin, LayerPin>::const_iterator mapIt = reuseMap.find(lp);
CV_Assert(mapIt != reuseMap.end());
LayerPin memHost = mapIt->second;
std::map<LayerPin, int>::iterator refIt = refCounter.find(memHost);
std::map<LayerPin, int>::const_iterator refIt = refCounter.find(memHost);
CV_Assert(refIt != refCounter.end());
return refIt->second;
}
@ -878,7 +878,7 @@ public:
// Decrease references counter to allocated memory inside specific blob.
void releaseReference(const LayerPin& lp)
{
std::map<LayerPin, LayerPin>::iterator mapIt = reuseMap.find(lp);
std::map<LayerPin, LayerPin>::const_iterator mapIt = reuseMap.find(lp);
CV_Assert(mapIt != reuseMap.end());
std::map<LayerPin, int>::iterator refIt = refCounter.find(mapIt->second);
@ -902,8 +902,8 @@ public:
Mat bestBlob;
LayerPin bestBlobPin;
std::map<LayerPin, Mat>::iterator hostIt;
std::map<LayerPin, int>::iterator refIt;
std::map<LayerPin, Mat>::const_iterator hostIt;
std::map<LayerPin, int>::const_iterator refIt;
const int targetTotal = total(shape);
int bestBlobTotal = INT_MAX;
@ -915,7 +915,7 @@ public:
// it might be used as output.
if (refIt != refCounter.end() && refIt->second == 0)
{
Mat& unusedBlob = hostIt->second;
const Mat& unusedBlob = hostIt->second;
if (unusedBlob.total() >= targetTotal &&
unusedBlob.total() < bestBlobTotal)
{
@ -1097,7 +1097,7 @@ detail::NetImplBase::NetImplBase()
// nothing
}
std::string detail::NetImplBase::getDumpFileNameBase()
std::string detail::NetImplBase::getDumpFileNameBase() const
{
std::string dumpFileNameBase = cv::format("ocv_dnn_net_%05d_%02d", networkId, networkDumpCounter++);
return dumpFileNameBase;
@ -1148,7 +1148,6 @@ struct Net::Impl : public detail::NetImplBase
bool fusion;
bool isAsync;
std::vector<int64> layersTimings;
Mat output_blob;
Ptr<BackendWrapper> wrap(Mat& host)
{
@ -1207,7 +1206,7 @@ struct Net::Impl : public detail::NetImplBase
std::vector< std::reference_wrapper<LayerData> > compileList; compileList.reserve(64);
for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); ++it)
{
LayerData &ld = it->second;
LayerData& ld = it->second;
Ptr<Layer> layer = ld.layerInstance;
if (layer->supportBackend(DNN_BACKEND_HALIDE) && !ld.skip)
{
@ -1371,19 +1370,19 @@ struct Net::Impl : public detail::NetImplBase
}
}
int getLayerId(const String &layerName)
int getLayerId(const String &layerName) const
{
std::map<String, int>::iterator it = layerNameToId.find(layerName);
std::map<String, int>::const_iterator it = layerNameToId.find(layerName);
return (it != layerNameToId.end()) ? it->second : -1;
}
int getLayerId(int id)
int getLayerId(int id) const
{
MapIdToLayerData::iterator it = layers.find(id);
MapIdToLayerData::const_iterator it = layers.find(id);
return (it != layers.end()) ? id : -1;
}
int getLayerId(DictValue &layerDesc)
int getLayerId(DictValue &layerDesc) const
{
if (layerDesc.isInt())
return getLayerId(layerDesc.get<int>());
@ -1394,23 +1393,23 @@ struct Net::Impl : public detail::NetImplBase
return -1;
}
String getLayerName(int id)
String getLayerName(int id) const
{
MapIdToLayerData::iterator it = layers.find(id);
MapIdToLayerData::const_iterator it = layers.find(id);
return (it != layers.end()) ? it->second.name : "(unknown layer)";
}
LayerData& getLayerData(int id)
LayerData& getLayerData(int id) const
{
MapIdToLayerData::iterator it = layers.find(id);
MapIdToLayerData::const_iterator it = layers.find(id);
if (it == layers.end())
CV_Error(Error::StsObjectNotFound, format("Layer with requested id=%d not found", id));
return it->second;
return const_cast<LayerData&>(it->second);
}
LayerData& getLayerData(const String &layerName)
LayerData& getLayerData(const String &layerName) const
{
int id = getLayerId(layerName);
@ -1420,7 +1419,7 @@ struct Net::Impl : public detail::NetImplBase
return getLayerData(id);
}
LayerData& getLayerData(const DictValue &layerDesc)
LayerData& getLayerData(const DictValue &layerDesc) const
{
CV_Assert(layerDesc.isInt() || layerDesc.isString());
if (layerDesc.isInt())
@ -1446,14 +1445,14 @@ struct Net::Impl : public detail::NetImplBase
ld.inputBlobsId[inNum] = from;
}
int resolvePinOutputName(LayerData &ld, const String &outName)
int resolvePinOutputName(LayerData &ld, const String &outName) const
{
if (outName.empty())
return 0;
return ld.getLayerInstance()->outputNameToIndex(outName);
}
LayerPin getPinByAlias(const String &layerName)
LayerPin getPinByAlias(const String &layerName) const
{
LayerPin pin;
pin.lid = (layerName.empty()) ? 0 : getLayerId(layerName);
@ -1464,13 +1463,17 @@ struct Net::Impl : public detail::NetImplBase
return pin;
}
std::vector<LayerPin> getLayerOutPins(const String &layerName)
std::vector<LayerPin> getLayerOutPins(const String &layerName) const
{
int lid = (layerName.empty()) ? 0 : getLayerId(layerName);
std::vector<LayerPin> pins;
MapIdToLayerData::const_iterator it = layers.find(lid);
if (it == layers.end())
CV_Error_(Error::StsOutOfRange, ("Layer #%d is not valid", lid));
const size_t nOutputs = it->second.outputBlobs.size();
for (int i = 0; i < layers[lid].outputBlobs.size(); i++)
std::vector<LayerPin> pins;
for (int i = 0; i < nOutputs; i++)
{
pins.push_back(LayerPin(lid, i));
}
@ -1920,12 +1923,11 @@ struct Net::Impl : public detail::NetImplBase
CV_TRACE_FUNCTION();
CV_Assert_N(preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, haveInfEngine());
MapIdToLayerData::iterator it;
Ptr<InfEngineNgraphNet> net;
for (it = layers.begin(); it != layers.end(); ++it)
for (MapIdToLayerData::const_iterator it = layers.begin(); it != layers.end(); ++it)
{
LayerData &ld = it->second;
const LayerData& ld = it->second;
if (ld.id == 0)
{
CV_Assert((netInputLayer->outNames.empty() && ld.outputBlobsWrappers.size() == 1) ||
@ -1961,9 +1963,9 @@ struct Net::Impl : public detail::NetImplBase
InfEngineNgraphNet& ienet = *ieNode->net;
ienet.reset();
for (it = layers.begin(); it != layers.end(); ++it)
for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); ++it)
{
LayerData &ld = it->second;
LayerData& ld = it->second;
if (ld.id == 0)
{
for (int i = 0; i < ld.inputBlobsWrappers.size(); ++i)
@ -2005,9 +2007,9 @@ struct Net::Impl : public detail::NetImplBase
// Build Inference Engine networks from sets of layers that support this
// backend. Split a whole model on several Inference Engine networks if
// some of layers are not implemented.
for (it = layers.begin(); it != layers.end(); ++it)
for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); ++it)
{
LayerData &ld = it->second;
LayerData& ld = it->second;
if (ld.id == 0 && ld.skip)
continue;
@ -2256,7 +2258,7 @@ struct Net::Impl : public detail::NetImplBase
ld.inputLayersId.insert(ld.inputBlobsId[i].lid);
//allocate parents
for (set<int>::iterator i = ld.inputLayersId.begin(); i != ld.inputLayersId.end(); i++)
for (set<int>::const_iterator i = ld.inputLayersId.begin(); i != ld.inputLayersId.end(); i++)
allocateLayer(*i, layersShapes);
//bind inputs
@ -2348,8 +2350,7 @@ struct Net::Impl : public detail::NetImplBase
// we try to embed this activation into the convolution and disable separate execution of the activation
std::set<LayerPin> pinsToKeep(blobsToKeep_.begin(),
blobsToKeep_.end());
MapIdToLayerData::iterator it;
for (it = layers.begin(); it != layers.end(); it++)
for (MapIdToLayerData::const_iterator it = layers.begin(); it != layers.end(); it++)
{
int lid = it->first;
LayerData& ld = layers[lid];
@ -2705,8 +2706,7 @@ struct Net::Impl : public detail::NetImplBase
{
CV_TRACE_FUNCTION();
MapIdToLayerData::iterator it;
for (it = layers.begin(); it != layers.end(); it++)
for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); it++)
it->second.flag = 0;
CV_Assert(!layers[0].outputBlobs.empty());
@ -2730,7 +2730,7 @@ struct Net::Impl : public detail::NetImplBase
// Fake references to input blobs.
for (int i = 0; i < layers[0].outputBlobs.size(); ++i)
blobManager.addReference(LayerPin(0, i));
for (it = layers.begin(); it != layers.end(); ++it)
for (MapIdToLayerData::const_iterator it = layers.begin(); it != layers.end(); ++it)
{
const LayerData& ld = it->second;
blobManager.addReferences(ld.inputBlobsId);
@ -2741,7 +2741,7 @@ struct Net::Impl : public detail::NetImplBase
blobManager.addReference(blobsToKeep_[i]);
}
for (it = layers.begin(); it != layers.end(); it++)
for (MapIdToLayerData::const_iterator it = layers.begin(); it != layers.end(); it++)
{
int lid = it->first;
allocateLayer(lid, layersShapes);
@ -2762,7 +2762,7 @@ struct Net::Impl : public detail::NetImplBase
TickMeter tm;
tm.start();
std::map<int, Ptr<BackendNode> >::iterator it = ld.backendNodes.find(preferableBackend);
std::map<int, Ptr<BackendNode> >::const_iterator it = ld.backendNodes.find(preferableBackend);
if (preferableBackend == DNN_BACKEND_OPENCV || it == ld.backendNodes.end() || it->second.empty())
{
if (isAsync)
@ -2959,8 +2959,7 @@ struct Net::Impl : public detail::NetImplBase
if (clearFlags)
{
MapIdToLayerData::iterator it;
for (it = layers.begin(); it != layers.end(); it++)
for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); it++)
it->second.flag = 0;
}
@ -2969,8 +2968,7 @@ struct Net::Impl : public detail::NetImplBase
return;
//forward parents
MapIdToLayerData::iterator it;
for (it = layers.begin(); it != layers.end() && (it->second.id < ld.id); ++it)
for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end() && (it->second.id < ld.id); ++it)
{
LayerData &ld = it->second;
if (ld.flag)
@ -3032,7 +3030,7 @@ struct Net::Impl : public detail::NetImplBase
for(int i = 0; i < inputLayerIds.size(); i++)
{
int layerId = inputLayerIds[i].lid;
LayersShapesMap::iterator it =
LayersShapesMap::const_iterator it =
inOutShapes.find(layerId);
if(it == inOutShapes.end() ||
it->second.out.empty())
@ -3115,7 +3113,7 @@ struct Net::Impl : public detail::NetImplBase
inOutShapes.clear();
inOutShapes[0].in = netInputShapes; //insert shape for first input layer
for (MapIdToLayerData::iterator it = layers.begin();
for (MapIdToLayerData::const_iterator it = layers.begin();
it != layers.end(); it++)
{
getLayerShapesRecursively(it->first, inOutShapes);
@ -3155,12 +3153,11 @@ struct Net::Impl : public detail::NetImplBase
CV_LOG_DEBUG(NULL, toString(inputShapes, "Network input shapes"));
LayersShapesMap layersShapes;
layersShapes[0].in = inputShapes;
for (MapIdToLayerData::iterator it = layers.begin();
it != layers.end(); it++)
for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); it++)
{
int layerId = it->first;
LayerData& layerData = it->second;
std::vector<LayerPin>& inputLayerIds = layerData.inputBlobsId;
const std::vector<LayerPin>& inputLayerIds = layerData.inputBlobsId;
LayerShapes& layerShapes = layersShapes[layerId];
CV_LOG_DEBUG(NULL, "layer " << layerId << ": [" << layerData.type << "]:(" << layerData.name << ") with inputs.size=" << inputLayerIds.size());
if (layerShapes.in.empty())
@ -3170,7 +3167,7 @@ struct Net::Impl : public detail::NetImplBase
const LayerPin& inputPin = inputLayerIds[i];
int inputLayerId = inputPin.lid;
CV_LOG_DEBUG(NULL, " input[" << i << "] " << inputLayerId << ":" << inputPin.oid << " as [" << layers[inputLayerId].type << "]:(" << layers[inputLayerId].name << ")");
LayersShapesMap::iterator inputIt = layersShapes.find(inputLayerId);
LayersShapesMap::const_iterator inputIt = layersShapes.find(inputLayerId);
if (inputIt == layersShapes.end() || inputIt->second.out.empty())
{
getLayerShapesRecursively(inputLayerId, layersShapes);
@ -3187,19 +3184,23 @@ struct Net::Impl : public detail::NetImplBase
CV_LOG_DEBUG(NULL, "updateLayersShapes() - DONE");
}
LayerPin getLatestLayerPin(const std::vector<LayerPin>& pins)
LayerPin getLatestLayerPin(const std::vector<LayerPin>& pins) const
{
return *std::max_element(pins.begin(), pins.end());
}
Mat getBlob(const LayerPin& pin)
Mat getBlob(const LayerPin& pin) const
{
CV_TRACE_FUNCTION();
if (!pin.valid())
CV_Error(Error::StsObjectNotFound, "Requested blob not found");
LayerData &ld = layers[pin.lid];
MapIdToLayerData::const_iterator it = layers.find(pin.lid);
if (it == layers.end())
CV_Error_(Error::StsOutOfRange, ("Layer #%d is not valid (output #%d requested)", pin.lid, pin.oid));
const LayerData &ld = it->second;
if ((size_t)pin.oid >= ld.outputBlobs.size())
{
CV_Error(Error::StsOutOfRange, format("Layer \"%s\" produce only %d outputs, "
@ -3215,6 +3216,7 @@ struct Net::Impl : public detail::NetImplBase
if (ld.outputBlobs[pin.oid].depth() == CV_16S)
{
Mat output_blob;
convertFp16(ld.outputBlobs[pin.oid], output_blob);
return output_blob;
}
@ -3222,7 +3224,7 @@ struct Net::Impl : public detail::NetImplBase
return ld.outputBlobs[pin.oid];
}
Mat getBlob(String outputName)
Mat getBlob(String outputName) const
{
return getBlob(getPinByAlias(outputName));
}
@ -3282,9 +3284,9 @@ struct Net::Impl : public detail::NetImplBase
Net createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNet);
#endif
string dump();
string dump() const;
void dumpNetworkToFile()
void dumpNetworkToFile() const
{
#ifndef OPENCV_DNN_DISABLE_NETWORK_AUTO_DUMP
string dumpFileNameBase = getDumpFileNameBase();
@ -3945,7 +3947,7 @@ void Net::setInput(InputArray blob, const String& name, double scalefactor, cons
impl->netWasAllocated = impl->netWasAllocated && oldShape;
}
Mat Net::getParam(LayerId layer, int numParam)
Mat Net::getParam(int layer, int numParam) const
{
LayerData &ld = impl->getLayerData(layer);
std::vector<Mat> &layerBlobs = ld.getLayerInstance()->blobs;
@ -3953,7 +3955,7 @@ Mat Net::getParam(LayerId layer, int numParam)
return layerBlobs[numParam];
}
void Net::setParam(LayerId layer, int numParam, const Mat &blob)
void Net::setParam(int layer, int numParam, const Mat &blob)
{
LayerData &ld = impl->getLayerData(layer);
@ -3963,7 +3965,7 @@ void Net::setParam(LayerId layer, int numParam, const Mat &blob)
layerBlobs[numParam] = blob;
}
int Net::getLayerId(const String &layer)
int Net::getLayerId(const String &layer) const
{
return impl->getLayerId(layer);
}
@ -4006,7 +4008,7 @@ String Net::dump()
return impl->dump();
}
string Net::Impl::dump()
string Net::Impl::dump() const
{
bool hasInput = !netInputLayer->inputsData.empty();
@ -4266,13 +4268,18 @@ void Net::dumpToFile(const String& path) {
file.close();
}
Ptr<Layer> Net::getLayer(LayerId layerId)
Ptr<Layer> Net::getLayer(int layerId) const
{
LayerData &ld = impl->getLayerData(layerId);
return ld.getLayerInstance();
}
Ptr<Layer> Net::getLayer(const LayerId& layerId) const
{
LayerData &ld = impl->getLayerData(layerId);
return ld.getLayerInstance();
}
std::vector<Ptr<Layer> > Net::getLayerInputs(LayerId layerId)
std::vector<Ptr<Layer> > Net::getLayerInputs(int layerId) const
{
LayerData &ld = impl->getLayerData(layerId);
@ -4291,7 +4298,7 @@ std::vector<String> Net::getLayerNames() const
std::vector<String> res;
res.reserve(impl->layers.size());
Impl::MapIdToLayerData::iterator it;
Impl::MapIdToLayerData::const_iterator it;
for (it = impl->layers.begin(); it != impl->layers.end(); it++)
{
if (it->second.id) //skip Data layer
@ -4310,11 +4317,11 @@ std::vector<int> Net::getUnconnectedOutLayers() const
{
std::vector<int> layersIds;
Impl::MapIdToLayerData::iterator it;
Impl::MapIdToLayerData::const_iterator it;
for (it = impl->layers.begin(); it != impl->layers.end(); it++)
{
int lid = it->first;
LayerData &ld = it->second;
const LayerData &ld = it->second;
if (ld.requiredOutputs.size() == 0)
layersIds.push_back(lid);
@ -4414,13 +4421,13 @@ int64 Net::getFLOPS(const MatShape& netInputShape) const
int64 Net::getFLOPS(const int layerId,
const std::vector<MatShape>& netInputShapes) const
{
Impl::MapIdToLayerData::iterator layer = impl->layers.find(layerId);
Impl::MapIdToLayerData::const_iterator layer = impl->layers.find(layerId);
CV_Assert(layer != impl->layers.end());
LayerShapes shapes;
impl->getLayerShapes(netInputShapes, layerId, shapes);
return layer->second.getLayerInstance()->getFLOPS(shapes.in, shapes.out);
return const_cast<LayerData&>(layer->second).getLayerInstance()->getFLOPS(shapes.in, shapes.out);
}
int64 Net::getFLOPS(const int layerId,
@ -4434,7 +4441,7 @@ void Net::getLayerTypes(std::vector<String>& layersTypes) const
layersTypes.clear();
std::map<String, int> layers;
for (Impl::MapIdToLayerData::iterator it = impl->layers.begin();
for (Impl::MapIdToLayerData::const_iterator it = impl->layers.begin();
it != impl->layers.end(); it++)
{
if (layers.find(it->second.type) == layers.end())
@ -4442,7 +4449,7 @@ void Net::getLayerTypes(std::vector<String>& layersTypes) const
layers[it->second.type]++;
}
for (std::map<String, int>::iterator it = layers.begin();
for (std::map<String, int>::const_iterator it = layers.begin();
it != layers.end(); it++)
{
layersTypes.push_back(it->first);
@ -4452,7 +4459,7 @@ void Net::getLayerTypes(std::vector<String>& layersTypes) const
int Net::getLayersCount(const String& layerType) const
{
int count = 0;
for (Impl::MapIdToLayerData::iterator it = impl->layers.begin();
for (Impl::MapIdToLayerData::const_iterator it = impl->layers.begin();
it != impl->layers.end(); it++)
{
if (it->second.type == layerType)
@ -4467,7 +4474,7 @@ void Net::getMemoryConsumption(const int layerId,
{
CV_TRACE_FUNCTION();
Impl::MapIdToLayerData::iterator layer = impl->layers.find(layerId);
Impl::MapIdToLayerData::const_iterator layer = impl->layers.find(layerId);
CV_Assert(layer != impl->layers.end());
weights = blobs = 0;
@ -4535,7 +4542,7 @@ void Net::getMemoryConsumption(const std::vector<MatShape>& netInputShapes,
for(int i = 0; i < layerIds.size(); i++)
{
int w = 0, b = 0;
Impl::MapIdToLayerData::iterator layer = impl->layers.find(layerIds[i]);
Impl::MapIdToLayerData::const_iterator layer = impl->layers.find(layerIds[i]);
CV_Assert(layer != impl->layers.end());
for(int j = 0; j < layer->second.params.blobs.size(); j++)

View File

@ -19,12 +19,12 @@ namespace detail {
struct NetImplBase
{
const int networkId; // network global identifier
int networkDumpCounter; // dump counter
mutable int networkDumpCounter; // dump counter
int dumpLevel; // level of information dumps (initialized through OPENCV_DNN_NETWORK_DUMP parameter)
NetImplBase();
std::string getDumpFileNameBase();
std::string getDumpFileNameBase() const;
};
} // namespace detail

View File

@ -184,7 +184,7 @@ public:
CV_Assert(!reverse || !bidirectional);
// read activations
DictValue activations = params.get<DictValue>("activations", "");
DictValue activations = params.get<DictValue>("activations", DictValue(String()));
if (activations.size() == 1) // if activations wasn't specified use default
{
f_activation = sigmoid;