mirror of
https://github.com/opencv/opencv.git
synced 2024-11-25 19:50:38 +08:00
dnn: added trace macros
This commit is contained in:
parent
bbb14d3746
commit
ed10383359
@ -43,7 +43,7 @@
|
||||
using namespace cv;
|
||||
using namespace cv::dnn;
|
||||
|
||||
#if HAVE_PROTOBUF
|
||||
#ifdef HAVE_PROTOBUF
|
||||
#include "caffe.pb.h"
|
||||
|
||||
#include <iostream>
|
||||
@ -82,6 +82,8 @@ public:
|
||||
|
||||
CaffeImporter(const char *pototxt, const char *caffeModel)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
ReadNetParamsFromTextFileOrDie(pototxt, &net);
|
||||
|
||||
if (caffeModel && caffeModel[0])
|
||||
@ -264,6 +266,8 @@ public:
|
||||
|
||||
void populateNet(Net dstNet)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
int layersSize = net.layer_size();
|
||||
layerCounter.clear();
|
||||
addedBlobs.clear();
|
||||
|
@ -87,7 +87,7 @@
|
||||
//
|
||||
//M*/
|
||||
|
||||
#if HAVE_PROTOBUF
|
||||
#ifdef HAVE_PROTOBUF
|
||||
#include <google/protobuf/io/coded_stream.h>
|
||||
#include <google/protobuf/io/zero_copy_stream_impl.h>
|
||||
#include <google/protobuf/text_format.h>
|
||||
|
@ -89,7 +89,7 @@
|
||||
|
||||
#ifndef __OPENCV_DNN_CAFFE_IO_HPP__
|
||||
#define __OPENCV_DNN_CAFFE_IO_HPP__
|
||||
#if HAVE_PROTOBUF
|
||||
#ifdef HAVE_PROTOBUF
|
||||
|
||||
#include "caffe.pb.h"
|
||||
|
||||
|
@ -90,6 +90,7 @@ static String toString(const T &v)
|
||||
Mat blobFromImage(const Mat& image, double scalefactor, const Size& size,
|
||||
const Scalar& mean, bool swapRB)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
std::vector<Mat> images(1, image);
|
||||
return blobFromImages(images, scalefactor, size, mean, swapRB);
|
||||
}
|
||||
@ -97,6 +98,7 @@ Mat blobFromImage(const Mat& image, double scalefactor, const Size& size,
|
||||
Mat blobFromImages(const std::vector<Mat>& images_, double scalefactor, Size size,
|
||||
const Scalar& mean_, bool swapRB)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
std::vector<Mat> images = images_;
|
||||
for (int i = 0; i < images.size(); i++)
|
||||
{
|
||||
@ -207,6 +209,8 @@ class BackendWrapManager
|
||||
public:
|
||||
Ptr<BackendWrapper> wrap(const Mat& m, int backendId, int targetId)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
CV_Assert(backendId != DNN_BACKEND_DEFAULT);
|
||||
|
||||
std::map<void*, Ptr<BackendWrapper> >::iterator hostsIt;
|
||||
@ -261,6 +265,8 @@ public:
|
||||
|
||||
void reset()
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
hostWrappers.clear();
|
||||
extraWrappers.clear();
|
||||
}
|
||||
@ -321,6 +327,8 @@ struct LayerData
|
||||
LayerData(int _id, const String &_name, const String &_type, LayerParams &_params)
|
||||
: id(_id), name(_name), type(_type), params(_params)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
//add logging info
|
||||
params.name = name;
|
||||
params.type = type;
|
||||
@ -349,6 +357,9 @@ struct LayerData
|
||||
|
||||
Ptr<Layer> getLayerInstance()
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(type, "type", type.c_str());
|
||||
|
||||
if (layerInstance)
|
||||
return layerInstance;
|
||||
|
||||
@ -500,6 +511,8 @@ public:
|
||||
void allocateBlobsForLayer(LayerData &ld, const LayerShapes& layerShapes,
|
||||
std::vector<LayerPin>& pinsForInternalBlobs)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
pinsForInternalBlobs.clear();
|
||||
|
||||
std::vector<Mat>& outputBlobs = ld.outputBlobs,
|
||||
@ -578,6 +591,8 @@ public:
|
||||
// Clear internal state. Calls before an every reallocation.
|
||||
void reset()
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
refCounter.clear();
|
||||
reuseMap.clear();
|
||||
memHosts.clear();
|
||||
@ -639,6 +654,8 @@ struct Net::Impl
|
||||
|
||||
void compileHalide()
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
CV_Assert(preferableBackend == DNN_BACKEND_HALIDE);
|
||||
|
||||
HalideScheduler scheduler(halideConfigFile);
|
||||
@ -666,6 +683,8 @@ struct Net::Impl
|
||||
|
||||
void clear()
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
MapIdToLayerData::iterator it;
|
||||
for (it = layers.begin(); it != layers.end(); it++)
|
||||
{
|
||||
@ -694,6 +713,8 @@ struct Net::Impl
|
||||
|
||||
void setUpNet(const std::vector<LayerPin>& blobsToKeep_ = std::vector<LayerPin>())
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
if (!netWasAllocated || this->blobsToKeep != blobsToKeep_)
|
||||
{
|
||||
clear();
|
||||
@ -862,6 +883,8 @@ struct Net::Impl
|
||||
|
||||
void computeNetOutputLayers()
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
netOutputs.clear();
|
||||
|
||||
MapIdToLayerData::iterator it;
|
||||
@ -883,6 +906,8 @@ struct Net::Impl
|
||||
|
||||
void initBackend()
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
backendWrapper.reset();
|
||||
if (preferableBackend == DNN_BACKEND_DEFAULT)
|
||||
{
|
||||
@ -953,6 +978,8 @@ struct Net::Impl
|
||||
|
||||
void allocateLayer(int lid, const LayersShapesMap& layersShapes)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
LayerData &ld = layers[lid];
|
||||
|
||||
//already allocated
|
||||
@ -1026,6 +1053,8 @@ struct Net::Impl
|
||||
|
||||
void fuseLayers(const std::vector<LayerPin>& blobsToKeep_)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
// scan through all the layers. If there is convolution layer followed by the activation layer,
|
||||
// we try to embed this activation into the convolution and disable separate execution of the activation
|
||||
std::vector<String> outnames;
|
||||
@ -1094,6 +1123,8 @@ struct Net::Impl
|
||||
|
||||
void allocateLayers(const std::vector<LayerPin>& blobsToKeep_)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
MapIdToLayerData::iterator it;
|
||||
for (it = layers.begin(); it != layers.end(); it++)
|
||||
it->second.flag = 0;
|
||||
@ -1131,6 +1162,8 @@ struct Net::Impl
|
||||
|
||||
void forwardLayer(LayerData &ld)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
Ptr<Layer> layer = ld.layerInstance;
|
||||
|
||||
if (preferableBackend == DNN_BACKEND_DEFAULT ||
|
||||
@ -1159,6 +1192,8 @@ struct Net::Impl
|
||||
|
||||
void forwardToLayer(LayerData &ld, bool clearFlags = true)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
if (clearFlags)
|
||||
{
|
||||
MapIdToLayerData::iterator it;
|
||||
@ -1186,6 +1221,8 @@ struct Net::Impl
|
||||
|
||||
void forwardAll()
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
forwardToLayer(layers.rbegin()->second, true);
|
||||
}
|
||||
|
||||
@ -1247,6 +1284,8 @@ struct Net::Impl
|
||||
|
||||
Mat getBlob(const LayerPin& pin)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
if (!pin.valid())
|
||||
CV_Error(Error::StsObjectNotFound, "Requested blob not found");
|
||||
|
||||
@ -1285,6 +1324,8 @@ Net::~Net()
|
||||
|
||||
int Net::addLayer(const String &name, const String &type, LayerParams ¶ms)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
if (name.find('.') != String::npos)
|
||||
{
|
||||
CV_Error(Error::StsBadArg, "Added layer name \"" + name + "\" must not contain dot symbol");
|
||||
@ -1306,6 +1347,8 @@ int Net::addLayer(const String &name, const String &type, LayerParams ¶ms)
|
||||
|
||||
int Net::addLayerToPrev(const String &name, const String &type, LayerParams ¶ms)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
int prvLid = impl->lastLayerId;
|
||||
int newLid = this->addLayer(name, type, params);
|
||||
this->connect(prvLid, 0, newLid, 0);
|
||||
@ -1314,11 +1357,15 @@ int Net::addLayerToPrev(const String &name, const String &type, LayerParams &par
|
||||
|
||||
void Net::connect(int outLayerId, int outNum, int inpLayerId, int inpNum)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
impl->connect(outLayerId, outNum, inpLayerId, inpNum);
|
||||
}
|
||||
|
||||
void Net::connect(String _outPin, String _inPin)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
LayerPin outPin = impl->getPinByAlias(_outPin);
|
||||
LayerPin inpPin = impl->getPinByAlias(_inPin);
|
||||
|
||||
@ -1329,6 +1376,8 @@ void Net::connect(String _outPin, String _inPin)
|
||||
|
||||
Mat Net::forward(const String& outputName)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
String layerName = outputName;
|
||||
|
||||
if (layerName.empty())
|
||||
@ -1342,6 +1391,8 @@ Mat Net::forward(const String& outputName)
|
||||
|
||||
void Net::forward(std::vector<Mat>& outputBlobs, const String& outputName)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
impl->setUpNet();
|
||||
|
||||
String layerName = outputName;
|
||||
@ -1359,6 +1410,8 @@ void Net::forward(std::vector<Mat>& outputBlobs, const String& outputName)
|
||||
void Net::forward(std::vector<Mat>& outputBlobs,
|
||||
const std::vector<String>& outBlobNames)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
std::vector<LayerPin> pins;
|
||||
for (int i = 0; i < outBlobNames.size(); i++)
|
||||
{
|
||||
@ -1381,6 +1434,8 @@ void Net::forward(std::vector<Mat>& outputBlobs,
|
||||
void Net::forward(std::vector<std::vector<Mat> >& outputBlobs,
|
||||
const std::vector<String>& outBlobNames)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
std::vector<LayerPin> pins;
|
||||
for (int i = 0; i < outBlobNames.size(); i++)
|
||||
{
|
||||
@ -1407,6 +1462,9 @@ void Net::forward(std::vector<std::vector<Mat> >& outputBlobs,
|
||||
|
||||
void Net::setPreferableBackend(int backendId)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG(backendId);
|
||||
|
||||
impl->netWasAllocated = impl->netWasAllocated &&
|
||||
impl->preferableBackend == backendId;
|
||||
impl->preferableBackend = backendId;
|
||||
@ -1414,6 +1472,9 @@ void Net::setPreferableBackend(int backendId)
|
||||
|
||||
void Net::setPreferableTarget(int targetId)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG(targetId);
|
||||
|
||||
impl->netWasAllocated = impl->netWasAllocated &&
|
||||
impl->preferableTarget == targetId;
|
||||
impl->preferableTarget = targetId;
|
||||
@ -1421,11 +1482,16 @@ void Net::setPreferableTarget(int targetId)
|
||||
|
||||
void Net::setInputsNames(const std::vector<String> &inputBlobNames)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
impl->netInputLayer->setNames(inputBlobNames);
|
||||
}
|
||||
|
||||
void Net::setInput(const Mat &blob_, const String& name)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
LayerPin pin;
|
||||
pin.lid = 0;
|
||||
pin.oid = impl->resolvePinOutputName(impl->getLayerData(pin.lid), name);
|
||||
@ -1595,6 +1661,8 @@ void Net::getLayerShapes(const ShapesVec& netInputShapes,
|
||||
|
||||
int64 Net::getFLOPS(const std::vector<MatShape>& netInputShapes) const
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
int64 flops = 0;
|
||||
std::vector<int> ids;
|
||||
std::vector<std::vector<MatShape> > inShapes, outShapes;
|
||||
@ -1670,6 +1738,8 @@ void Net::getMemoryConsumption(const int layerId,
|
||||
const std::vector<MatShape>& netInputShapes,
|
||||
size_t& weights, size_t& blobs) const
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
Impl::MapIdToLayerData::iterator layer = impl->layers.find(layerId);
|
||||
CV_Assert(layer != impl->layers.end());
|
||||
|
||||
@ -1692,6 +1762,8 @@ void Net::getMemoryConsumption(const int layerId,
|
||||
void Net::getMemoryConsumption(const std::vector<MatShape>& netInputShapes,
|
||||
size_t& weights, size_t& blobs) const
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
std::vector<int> layerIds;
|
||||
std::vector<size_t> w, b;
|
||||
getMemoryConsumption(netInputShapes, layerIds, w, b);
|
||||
@ -1723,6 +1795,8 @@ void Net::getMemoryConsumption(const std::vector<MatShape>& netInputShapes,
|
||||
std::vector<int>& layerIds, std::vector<size_t>& weights,
|
||||
std::vector<size_t>& blobs) const
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
layerIds.clear();
|
||||
weights.clear();
|
||||
blobs.clear();
|
||||
@ -1762,6 +1836,9 @@ void Net::getMemoryConsumption(const MatShape& netInputShape, std::vector<int>&
|
||||
|
||||
void Net::setHalideScheduler(const String& scheduler)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(scheduler, "scheduler", scheduler.c_str());
|
||||
|
||||
impl->halideConfigFile = scheduler;
|
||||
}
|
||||
|
||||
@ -1810,6 +1887,8 @@ void Layer::applyHalideScheduler(Ptr<BackendNode>& node, const std::vector<Mat*>
|
||||
const std::vector<Mat> &outputs, int targetId) const
|
||||
{
|
||||
#ifdef HAVE_HALIDE
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
Halide::Var x("x"), y("y"), c("c"), n("n"), co("co"), ci("ci"),
|
||||
xo("xo"), xi("xi"), yo("yo"), yi("yi"), tile("tile");
|
||||
Halide::Func& top = node.dynamicCast<HalideBackendNode>()->funcs.back();
|
||||
@ -1891,6 +1970,8 @@ static void vecToPVec(const std::vector<T> &v, std::vector<T*> &pv)
|
||||
|
||||
void Layer::finalize(const std::vector<Mat> &inputs, std::vector<Mat> &outputs)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
std::vector<Mat*> inputsp;
|
||||
vecToPVec(inputs, inputsp);
|
||||
this->finalize(inputsp, outputs);
|
||||
@ -1903,6 +1984,8 @@ void Layer::finalize(const std::vector<Mat*> &input, std::vector<Mat> &output)
|
||||
|
||||
std::vector<Mat> Layer::finalize(const std::vector<Mat> &inputs)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
std::vector<Mat> outputs;
|
||||
this->finalize(inputs, outputs);
|
||||
return outputs;
|
||||
@ -1910,6 +1993,8 @@ std::vector<Mat> Layer::finalize(const std::vector<Mat> &inputs)
|
||||
|
||||
void Layer::forward(const std::vector<Mat> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
std::vector<Mat*> inputsp;
|
||||
vecToPVec(inputs, inputsp);
|
||||
this->forward(inputsp, outputs, internals);
|
||||
@ -1917,6 +2002,8 @@ void Layer::forward(const std::vector<Mat> &inputs, std::vector<Mat> &outputs, s
|
||||
|
||||
void Layer::run(const std::vector<Mat> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
std::vector<Mat*> inputsp;
|
||||
vecToPVec(inputs, inputsp);
|
||||
this->finalize(inputsp, outputs);
|
||||
@ -1972,32 +2059,41 @@ static LayerFactory_Impl& getLayerFactoryImpl()
|
||||
return *instance;
|
||||
}
|
||||
|
||||
void LayerFactory::registerLayer(const String &_type, Constuctor constructor)
|
||||
void LayerFactory::registerLayer(const String &type, Constuctor constructor)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(type, "type", type.c_str());
|
||||
|
||||
cv::AutoLock lock(getLayerFactoryMutex());
|
||||
String type = _type.toLowerCase();
|
||||
LayerFactory_Impl::const_iterator it = getLayerFactoryImpl().find(type);
|
||||
String type_ = type.toLowerCase();
|
||||
LayerFactory_Impl::const_iterator it = getLayerFactoryImpl().find(type_);
|
||||
|
||||
if (it != getLayerFactoryImpl().end() && it->second != constructor)
|
||||
{
|
||||
CV_Error(cv::Error::StsBadArg, "Layer \"" + type + "\" already was registered");
|
||||
CV_Error(cv::Error::StsBadArg, "Layer \"" + type_ + "\" already was registered");
|
||||
}
|
||||
|
||||
getLayerFactoryImpl().insert(std::make_pair(type, constructor));
|
||||
getLayerFactoryImpl().insert(std::make_pair(type_, constructor));
|
||||
}
|
||||
|
||||
void LayerFactory::unregisterLayer(const String &_type)
|
||||
void LayerFactory::unregisterLayer(const String &type)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(type, "type", type.c_str());
|
||||
|
||||
cv::AutoLock lock(getLayerFactoryMutex());
|
||||
String type = _type.toLowerCase();
|
||||
getLayerFactoryImpl().erase(type);
|
||||
String type_ = type.toLowerCase();
|
||||
getLayerFactoryImpl().erase(type_);
|
||||
}
|
||||
|
||||
Ptr<Layer> LayerFactory::createLayerInstance(const String &_type, LayerParams& params)
|
||||
Ptr<Layer> LayerFactory::createLayerInstance(const String &type, LayerParams& params)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(type, "type", type.c_str());
|
||||
|
||||
cv::AutoLock lock(getLayerFactoryMutex());
|
||||
String type = _type.toLowerCase();
|
||||
LayerFactory_Impl::const_iterator it = getLayerFactoryImpl().find(type);
|
||||
String type_ = type.toLowerCase();
|
||||
LayerFactory_Impl::const_iterator it = getLayerFactoryImpl().find(type_);
|
||||
|
||||
if (it != getLayerFactoryImpl().end())
|
||||
{
|
||||
|
@ -60,6 +60,8 @@ Mutex* __initialization_mutex_initializer = &getInitializationMutex();
|
||||
|
||||
void initializeLayerFactory()
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
CV_DNN_REGISTER_LAYER_CLASS(Slice, SliceLayer);
|
||||
CV_DNN_REGISTER_LAYER_CLASS(Split, SplitLayer);
|
||||
CV_DNN_REGISTER_LAYER_CLASS(Concat, ConcatLayer);
|
||||
|
@ -104,6 +104,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
CV_Assert(blobs.size() >= 2);
|
||||
CV_Assert(inputs.size() == 1);
|
||||
|
||||
|
@ -64,6 +64,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
for (int i = 0, n = outputs.size(); i < n; ++i)
|
||||
if (outputs[i].data != inputs[i]->data)
|
||||
inputs[i]->copyTo(outputs[i]);
|
||||
|
@ -96,6 +96,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
int cAxis = clamp(axis, inputs[0]->dims);
|
||||
Mat& outMat = outputs[0];
|
||||
std::vector<Range> ranges(outputs[0].dims, Range::all());
|
||||
|
@ -624,6 +624,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
/*printf("conv %s: input (%d x %d x %d x %d), kernel (%d x %d), pad (%d x %d), stride (%d x %d), dilation (%d x %d)\n",
|
||||
name.c_str(), inputs[0]->size[0], inputs[0]->size[1], inputs[0]->size[2], inputs[0]->size[3],
|
||||
kernel.width, kernel.height, pad.width, pad.height,
|
||||
@ -1006,6 +1009,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat *> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
int outCn = blobs[0].size[0];
|
||||
int inpCn = inputs[0]->size[1];
|
||||
bool is1x1flag = is1x1();
|
||||
|
@ -135,6 +135,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat *> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
Mat &input = *inputs[0];
|
||||
Mat &output = outputs[0];
|
||||
|
||||
|
@ -206,6 +206,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
const float* locationData = inputs[0]->ptr<float>();
|
||||
const float* confidenceData = inputs[1]->ptr<float>();
|
||||
const float* priorData = inputs[2]->ptr<float>();
|
||||
|
@ -156,6 +156,8 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
for (size_t i = 0; i < inputs.size(); i++)
|
||||
{
|
||||
const Mat &src = *inputs[i];
|
||||
|
@ -251,6 +251,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat *> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
CV_Assert(outputs.size() == 1);
|
||||
const int nstripes = getNumThreads();
|
||||
EltwiseInvoker::run((const Mat**)&inputs[0], (int)inputs.size(), outputs[0],
|
||||
|
@ -106,6 +106,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
for (size_t i = 0; i < inputs.size(); i++)
|
||||
{
|
||||
MatShape outShape = shape(outputs[i]);
|
||||
|
@ -233,6 +233,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &input, std::vector<Mat> &output, std::vector<Mat> &)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
int axisCan = clamp(axis, input[0]->dims);
|
||||
int outerSize = input[0]->total(0, axisCan);
|
||||
|
||||
|
@ -86,6 +86,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
CV_Assert(inputs.size() == outputs.size());
|
||||
for (int i = 0; i < inputs.size(); i++)
|
||||
{
|
||||
|
@ -57,6 +57,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
CV_Assert(inputs.size() == 2);
|
||||
Mat& input = *inputs[0];
|
||||
Mat& indices = *inputs[1];
|
||||
|
@ -62,6 +62,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat *> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
for (size_t inpIdx = 0; inpIdx < inputs.size(); inpIdx++)
|
||||
{
|
||||
Mat &inpBlob = *inputs[inpIdx];
|
||||
|
@ -142,6 +142,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
checkInputs(inputs);
|
||||
|
||||
Mat& buffer = internals[0], sumChannelMultiplier = internals[1],
|
||||
|
@ -61,6 +61,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
for(int i = 0; i < inputs.size(); i++)
|
||||
{
|
||||
outputs[i] = paddingValue;
|
||||
|
@ -245,6 +245,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
size_t k, ninputs = inputs.size();
|
||||
if(!_needsPermute)
|
||||
{
|
||||
|
@ -106,6 +106,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
for (size_t ii = 0; ii < inputs.size(); ii++)
|
||||
{
|
||||
switch (type)
|
||||
|
@ -227,6 +227,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
int _layerWidth = inputs[0]->size[3];
|
||||
int _layerHeight = inputs[0]->size[2];
|
||||
|
||||
|
@ -220,6 +220,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &input, std::vector<Mat> &output, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
const Mat &Wh = blobs[0];
|
||||
const Mat &Wx = blobs[1];
|
||||
const Mat &bias = blobs[2];
|
||||
@ -404,6 +407,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &input, std::vector<Mat> &output, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
Mat xTs = input[0]->reshape(1, numSamplesTotal);
|
||||
Mat oTs = output[0].reshape(1, numSamplesTotal);
|
||||
Mat hTs = produceH ? output[1].reshape(1, numSamplesTotal) : Mat();
|
||||
|
@ -196,6 +196,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
for (size_t i = 0; i < inputs.size(); i++)
|
||||
{
|
||||
Mat srcBlob = *inputs[i];
|
||||
|
@ -45,6 +45,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
CV_Assert(blobs.size() == 1 + hasBias);
|
||||
|
||||
for (size_t ii = 0; ii < outputs.size(); ii++)
|
||||
|
@ -38,6 +38,9 @@ public:
|
||||
|
||||
virtual void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
CV_Assert(inputs.size() > 0);
|
||||
CV_Assert(blobs.size() > 0);
|
||||
|
||||
|
@ -118,6 +118,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
const Mat& inpMat = *inputs[0];
|
||||
std::vector<Range> ranges(inpMat.dims, Range::all());
|
||||
int cAxis = clamp(axis, inpMat.dims);
|
||||
|
@ -84,6 +84,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
const Mat &src = *inputs[0];
|
||||
Mat &dst = outputs[0];
|
||||
|
||||
|
@ -80,6 +80,9 @@ public:
|
||||
|
||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
for (size_t i = 0; i < outputs.size(); i++)
|
||||
{
|
||||
CV_Assert(inputs[0]->total() == outputs[i].total());
|
||||
|
@ -40,6 +40,7 @@
|
||||
//M*/
|
||||
|
||||
#include <opencv2/core.hpp>
|
||||
#include <opencv2/core/utils/trace.hpp>
|
||||
#include "cvconfig.h"
|
||||
#include <opencv2/dnn.hpp>
|
||||
#include <opencv2/dnn/all_layers.hpp>
|
||||
|
@ -13,7 +13,7 @@ Implementation of Tensorflow models parser
|
||||
using namespace cv;
|
||||
using namespace cv::dnn;
|
||||
|
||||
#if HAVE_PROTOBUF
|
||||
#ifdef HAVE_PROTOBUF
|
||||
#include "graph.pb.h"
|
||||
|
||||
#include <iostream>
|
||||
|
@ -9,7 +9,7 @@
|
||||
Implementation of various functions which are related to Tensorflow models reading.
|
||||
*/
|
||||
|
||||
#if HAVE_PROTOBUF
|
||||
#ifdef HAVE_PROTOBUF
|
||||
#include <google/protobuf/io/coded_stream.h>
|
||||
#include <google/protobuf/io/zero_copy_stream_impl.h>
|
||||
#include <google/protobuf/text_format.h>
|
||||
|
@ -11,7 +11,7 @@ Declaration of various functions which are related to Tensorflow models reading.
|
||||
|
||||
#ifndef __OPENCV_DNN_TF_IO_HPP__
|
||||
#define __OPENCV_DNN_TF_IO_HPP__
|
||||
#if HAVE_PROTOBUF
|
||||
#ifdef HAVE_PROTOBUF
|
||||
|
||||
#include "graph.pb.h"
|
||||
|
||||
|
@ -115,6 +115,8 @@ struct TorchImporter : public ::cv::dnn::Importer
|
||||
|
||||
TorchImporter(String filename, bool isBinary)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
rootModule = curModule = NULL;
|
||||
moduleCounter = 0;
|
||||
|
||||
@ -966,6 +968,8 @@ struct TorchImporter : public ::cv::dnn::Importer
|
||||
|
||||
void populateNet(Net net_)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
if (rootModule == NULL)
|
||||
{
|
||||
rootModule = new Module("Sequential");
|
||||
@ -1014,6 +1018,8 @@ Mat readTorchBlob(const String&, bool)
|
||||
|
||||
Net readNetFromTorch(const String &model, bool isBinary)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
Ptr<Importer> importer = createTorchImporter(model, isBinary);
|
||||
Net net;
|
||||
if (importer)
|
||||
|
@ -41,6 +41,7 @@
|
||||
#include <opencv2/dnn.hpp>
|
||||
#include <opencv2/imgproc.hpp>
|
||||
#include <opencv2/highgui.hpp>
|
||||
#include <opencv2/core/utils/trace.hpp>
|
||||
using namespace cv;
|
||||
using namespace cv::dnn;
|
||||
|
||||
@ -84,6 +85,8 @@ static std::vector<String> readClassNames(const char *filename = "synset_words.t
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
String modelTxt = "bvlc_googlenet.prototxt";
|
||||
String modelBin = "bvlc_googlenet.caffemodel";
|
||||
String imageFile = (argc > 1) ? argv[1] : "space_shuttle.jpg";
|
||||
@ -117,13 +120,20 @@ int main(int argc, char **argv)
|
||||
Scalar(104, 117, 123)); //Convert Mat to batch of images
|
||||
//! [Prepare blob]
|
||||
|
||||
//! [Set input blob]
|
||||
net.setInput(inputBlob, "data"); //set the network input
|
||||
//! [Set input blob]
|
||||
|
||||
//! [Make forward pass]
|
||||
Mat prob = net.forward("prob"); //compute output
|
||||
//! [Make forward pass]
|
||||
Mat prob;
|
||||
cv::TickMeter t;
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
CV_TRACE_REGION("forward");
|
||||
//! [Set input blob]
|
||||
net.setInput(inputBlob, "data"); //set the network input
|
||||
//! [Set input blob]
|
||||
t.start();
|
||||
//! [Make forward pass]
|
||||
prob = net.forward("prob"); //compute output
|
||||
//! [Make forward pass]
|
||||
t.stop();
|
||||
}
|
||||
|
||||
//! [Gather output]
|
||||
int classId;
|
||||
@ -136,6 +146,7 @@ int main(int argc, char **argv)
|
||||
std::cout << "Best class: #" << classId << " '" << classNames.at(classId) << "'" << std::endl;
|
||||
std::cout << "Probability: " << classProb * 100 << "%" << std::endl;
|
||||
//! [Print results]
|
||||
std::cout << "Time: " << (double)t.getTimeMilli() / t.getCounter() << " ms (average from " << t.getCounter() << " iterations)" << std::endl;
|
||||
|
||||
return 0;
|
||||
} //main
|
||||
|
Loading…
Reference in New Issue
Block a user