2018-02-06 16:57:35 +08:00
|
|
|
// This file is part of OpenCV project.
|
|
|
|
// It is subject to the license terms in the LICENSE file found in the top-level directory
|
|
|
|
// of this distribution and at http://opencv.org/license.html.
|
|
|
|
//
|
|
|
|
// Copyright (C) 2018, Intel Corporation, all rights reserved.
|
|
|
|
// Third party copyrights are property of their respective owners.
|
|
|
|
|
|
|
|
#ifndef __OPENCV_DNN_OP_INF_ENGINE_HPP__
|
|
|
|
#define __OPENCV_DNN_OP_INF_ENGINE_HPP__
|
|
|
|
|
2018-07-20 00:22:23 +08:00
|
|
|
#include "opencv2/core/cvdef.h"
|
2018-07-28 00:56:35 +08:00
|
|
|
#include "opencv2/core/cvstd.hpp"
|
|
|
|
#include "opencv2/dnn.hpp"
|
2018-07-20 00:22:23 +08:00
|
|
|
|
2018-02-06 16:57:35 +08:00
|
|
|
#ifdef HAVE_INF_ENGINE
|
2018-03-15 21:16:56 +08:00
|
|
|
#if defined(__GNUC__) && __GNUC__ >= 5
|
|
|
|
//#pragma GCC diagnostic push
|
|
|
|
#pragma GCC diagnostic ignored "-Wsuggest-override"
|
|
|
|
#endif
|
2018-02-06 16:57:35 +08:00
|
|
|
#include <inference_engine.hpp>
|
2018-03-15 21:16:56 +08:00
|
|
|
#if defined(__GNUC__) && __GNUC__ >= 5
|
|
|
|
//#pragma GCC diagnostic pop
|
|
|
|
#endif
|
2018-07-30 23:21:17 +08:00
|
|
|
|
2018-08-27 20:45:44 +08:00
|
|
|
#define INF_ENGINE_RELEASE_2018R3 2018030000
|
2018-10-23 00:23:50 +08:00
|
|
|
#define INF_ENGINE_RELEASE_2018R4 2018040000
|
2018-12-20 21:41:14 +08:00
|
|
|
#define INF_ENGINE_RELEASE_2018R5 2018050000
|
2018-07-30 23:21:17 +08:00
|
|
|
|
|
|
|
#ifndef INF_ENGINE_RELEASE
|
2018-12-20 21:41:14 +08:00
|
|
|
#warning("IE version have not been provided via command-line. Using 2018R5 by default")
|
|
|
|
#define INF_ENGINE_RELEASE INF_ENGINE_RELEASE_2018R5
|
2018-07-30 23:21:17 +08:00
|
|
|
#endif
|
|
|
|
|
|
|
|
#define INF_ENGINE_VER_MAJOR_GT(ver) (((INF_ENGINE_RELEASE) / 10000) > ((ver) / 10000))
|
2018-08-27 20:45:44 +08:00
|
|
|
#define INF_ENGINE_VER_MAJOR_GE(ver) (((INF_ENGINE_RELEASE) / 10000) >= ((ver) / 10000))
|
2019-01-14 14:55:44 +08:00
|
|
|
#define INF_ENGINE_VER_MAJOR_LT(ver) (((INF_ENGINE_RELEASE) / 10000) < ((ver) / 10000))
|
2019-02-11 22:13:39 +08:00
|
|
|
#define INF_ENGINE_VER_MAJOR_EQ(ver) (((INF_ENGINE_RELEASE) / 10000) == ((ver) / 10000))
|
2019-01-14 14:55:44 +08:00
|
|
|
|
|
|
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5)
|
|
|
|
#include <ie_builders.hpp>
|
|
|
|
#endif
|
2018-07-30 23:21:17 +08:00
|
|
|
|
2018-02-06 16:57:35 +08:00
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
namespace cv { namespace dnn {
|
|
|
|
|
|
|
|
#ifdef HAVE_INF_ENGINE
|
|
|
|
|
2019-01-14 14:55:44 +08:00
|
|
|
#if INF_ENGINE_VER_MAJOR_LT(INF_ENGINE_RELEASE_2018R5)
|
2018-02-06 16:57:35 +08:00
|
|
|
class InfEngineBackendNet : public InferenceEngine::ICNNNetwork
|
|
|
|
{
|
|
|
|
public:
|
2018-03-17 00:27:04 +08:00
|
|
|
InfEngineBackendNet();
|
|
|
|
|
|
|
|
InfEngineBackendNet(InferenceEngine::CNNNetwork& net);
|
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual void Release() CV_NOEXCEPT CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
void setPrecision(InferenceEngine::Precision p) CV_NOEXCEPT;
|
2018-03-12 22:35:28 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual InferenceEngine::Precision getPrecision() CV_NOEXCEPT;
|
2018-07-20 00:22:23 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual InferenceEngine::Precision getPrecision() const CV_NOEXCEPT;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual void getOutputsInfo(InferenceEngine::OutputsDataMap &out) CV_NOEXCEPT /*CV_OVERRIDE*/;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual void getOutputsInfo(InferenceEngine::OutputsDataMap &out) const CV_NOEXCEPT /*CV_OVERRIDE*/;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual void getInputsInfo(InferenceEngine::InputsDataMap &inputs) CV_NOEXCEPT /*CV_OVERRIDE*/;
|
2018-02-14 19:17:44 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual void getInputsInfo(InferenceEngine::InputsDataMap &inputs) const CV_NOEXCEPT /*CV_OVERRIDE*/;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual InferenceEngine::InputInfo::Ptr getInput(const std::string &inputName) CV_NOEXCEPT;
|
2018-07-20 00:22:23 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual InferenceEngine::InputInfo::Ptr getInput(const std::string &inputName) const CV_NOEXCEPT;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-11-27 17:02:00 +08:00
|
|
|
virtual InferenceEngine::StatusCode serialize(const std::string &xmlPath, const std::string &binPath, InferenceEngine::ResponseDesc* resp) const CV_NOEXCEPT;
|
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual void getName(char *pName, size_t len) CV_NOEXCEPT;
|
2018-05-22 20:18:18 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual void getName(char *pName, size_t len) const CV_NOEXCEPT;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual const std::string& getName() const CV_NOEXCEPT;
|
2018-07-20 00:22:23 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual size_t layerCount() CV_NOEXCEPT;
|
2018-07-20 00:22:23 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual size_t layerCount() const CV_NOEXCEPT;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual InferenceEngine::DataPtr& getData(const char *dname) CV_NOEXCEPT CV_OVERRIDE;
|
2018-03-15 21:16:56 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual void addLayer(const InferenceEngine::CNNLayerPtr &layer) CV_NOEXCEPT CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
|
|
|
virtual InferenceEngine::StatusCode addOutput(const std::string &layerName,
|
|
|
|
size_t outputIndex = 0,
|
2018-09-12 03:35:03 +08:00
|
|
|
InferenceEngine::ResponseDesc *resp = nullptr) CV_NOEXCEPT;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
|
|
|
virtual InferenceEngine::StatusCode getLayerByName(const char *layerName,
|
|
|
|
InferenceEngine::CNNLayerPtr &out,
|
2018-09-12 03:35:03 +08:00
|
|
|
InferenceEngine::ResponseDesc *resp) CV_NOEXCEPT;
|
2018-07-20 00:22:23 +08:00
|
|
|
|
|
|
|
virtual InferenceEngine::StatusCode getLayerByName(const char *layerName,
|
|
|
|
InferenceEngine::CNNLayerPtr &out,
|
2018-09-12 03:35:03 +08:00
|
|
|
InferenceEngine::ResponseDesc *resp) const CV_NOEXCEPT;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual void setTargetDevice(InferenceEngine::TargetDevice device) CV_NOEXCEPT CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual InferenceEngine::TargetDevice getTargetDevice() CV_NOEXCEPT;
|
2018-07-20 00:22:23 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual InferenceEngine::TargetDevice getTargetDevice() const CV_NOEXCEPT;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual InferenceEngine::StatusCode setBatchSize(const size_t size) CV_NOEXCEPT CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual InferenceEngine::StatusCode setBatchSize(size_t size, InferenceEngine::ResponseDesc* responseDesc) CV_NOEXCEPT;
|
2018-07-28 00:56:35 +08:00
|
|
|
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual size_t getBatchSize() const CV_NOEXCEPT CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-07-30 23:21:17 +08:00
|
|
|
#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R2)
|
2018-09-12 03:35:03 +08:00
|
|
|
virtual InferenceEngine::StatusCode AddExtension(const InferenceEngine::IShapeInferExtensionPtr& extension, InferenceEngine::ResponseDesc* resp) CV_NOEXCEPT;
|
|
|
|
virtual InferenceEngine::StatusCode reshape(const InputShapes& inputShapes, InferenceEngine::ResponseDesc* resp) CV_NOEXCEPT;
|
2018-07-30 23:21:17 +08:00
|
|
|
#endif
|
2018-07-28 00:56:35 +08:00
|
|
|
|
2018-03-12 22:35:28 +08:00
|
|
|
void init(int targetId);
|
2018-02-06 16:57:35 +08:00
|
|
|
|
|
|
|
void addBlobs(const std::vector<Ptr<BackendWrapper> >& wrappers);
|
|
|
|
|
|
|
|
void forward();
|
|
|
|
|
|
|
|
bool isInitialized();
|
|
|
|
|
|
|
|
private:
|
|
|
|
std::vector<InferenceEngine::CNNLayerPtr> layers;
|
|
|
|
InferenceEngine::InputsDataMap inputs;
|
|
|
|
InferenceEngine::OutputsDataMap outputs;
|
|
|
|
InferenceEngine::BlobMap inpBlobs;
|
|
|
|
InferenceEngine::BlobMap outBlobs;
|
|
|
|
InferenceEngine::BlobMap allBlobs;
|
2018-03-12 22:35:28 +08:00
|
|
|
InferenceEngine::TargetDevice targetDevice;
|
|
|
|
InferenceEngine::Precision precision;
|
2018-06-01 19:10:32 +08:00
|
|
|
InferenceEngine::InferenceEnginePluginPtr enginePtr;
|
|
|
|
InferenceEngine::InferencePlugin plugin;
|
|
|
|
InferenceEngine::ExecutableNetwork netExec;
|
|
|
|
InferenceEngine::InferRequest infRequest;
|
2018-08-29 18:26:43 +08:00
|
|
|
// In case of models from Model Optimizer we need to manage their lifetime.
|
|
|
|
InferenceEngine::CNNNetwork netOwner;
|
2018-12-07 17:40:34 +08:00
|
|
|
// There is no way to check if netOwner is initialized or not so we use
|
|
|
|
// a separate flag to determine if the model has been loaded from IR.
|
|
|
|
bool hasNetOwner;
|
2018-03-17 00:27:04 +08:00
|
|
|
|
2018-07-20 00:22:23 +08:00
|
|
|
std::string name;
|
|
|
|
|
2018-03-17 00:27:04 +08:00
|
|
|
void initPlugin(InferenceEngine::ICNNNetwork& net);
|
2018-02-06 16:57:35 +08:00
|
|
|
};
|
|
|
|
|
2019-01-14 14:55:44 +08:00
|
|
|
#else // IE < R5
|
|
|
|
|
|
|
|
class InfEngineBackendNet
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
InfEngineBackendNet();
|
|
|
|
|
|
|
|
InfEngineBackendNet(InferenceEngine::CNNNetwork& net);
|
|
|
|
|
2019-02-14 18:30:30 +08:00
|
|
|
void addLayer(InferenceEngine::Builder::Layer& layer);
|
2019-01-14 14:55:44 +08:00
|
|
|
|
|
|
|
void addOutput(const std::string& name);
|
|
|
|
|
|
|
|
void connect(const std::vector<Ptr<BackendWrapper> >& inputs,
|
|
|
|
const std::vector<Ptr<BackendWrapper> >& outputs,
|
|
|
|
const std::string& layerName);
|
|
|
|
|
|
|
|
bool isInitialized();
|
|
|
|
|
|
|
|
void init(int targetId);
|
|
|
|
|
|
|
|
void forward();
|
|
|
|
|
|
|
|
void initPlugin(InferenceEngine::ICNNNetwork& net);
|
|
|
|
|
|
|
|
void addBlobs(const std::vector<Ptr<BackendWrapper> >& ptrs);
|
|
|
|
|
|
|
|
private:
|
|
|
|
InferenceEngine::Builder::Network netBuilder;
|
|
|
|
|
|
|
|
InferenceEngine::InferenceEnginePluginPtr enginePtr;
|
|
|
|
InferenceEngine::InferencePlugin plugin;
|
|
|
|
InferenceEngine::ExecutableNetwork netExec;
|
|
|
|
InferenceEngine::InferRequest infRequest;
|
|
|
|
InferenceEngine::BlobMap allBlobs;
|
|
|
|
InferenceEngine::BlobMap inpBlobs;
|
|
|
|
InferenceEngine::BlobMap outBlobs;
|
|
|
|
InferenceEngine::TargetDevice targetDevice;
|
|
|
|
|
|
|
|
InferenceEngine::CNNNetwork cnn;
|
|
|
|
bool hasNetOwner;
|
|
|
|
|
|
|
|
std::map<std::string, int> layers;
|
|
|
|
std::vector<std::string> requestedOutputs;
|
|
|
|
|
|
|
|
std::set<int> unconnectedLayersIds;
|
|
|
|
};
|
|
|
|
#endif // IE < R5
|
|
|
|
|
2018-02-06 16:57:35 +08:00
|
|
|
class InfEngineBackendNode : public BackendNode
|
|
|
|
{
|
|
|
|
public:
|
2019-01-14 14:55:44 +08:00
|
|
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5)
|
|
|
|
InfEngineBackendNode(const InferenceEngine::Builder::Layer& layer);
|
|
|
|
#else
|
2018-02-06 16:57:35 +08:00
|
|
|
InfEngineBackendNode(const InferenceEngine::CNNLayerPtr& layer);
|
2019-01-14 14:55:44 +08:00
|
|
|
#endif
|
2018-02-06 16:57:35 +08:00
|
|
|
|
|
|
|
void connect(std::vector<Ptr<BackendWrapper> >& inputs,
|
|
|
|
std::vector<Ptr<BackendWrapper> >& outputs);
|
|
|
|
|
|
|
|
// Inference Engine network object that allows to obtain the outputs of this layer.
|
2019-01-14 14:55:44 +08:00
|
|
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5)
|
|
|
|
InferenceEngine::Builder::Layer layer;
|
2018-02-06 16:57:35 +08:00
|
|
|
Ptr<InfEngineBackendNet> net;
|
2019-01-14 14:55:44 +08:00
|
|
|
#else
|
|
|
|
InferenceEngine::CNNLayerPtr layer;
|
|
|
|
Ptr<InfEngineBackendNet> net;
|
|
|
|
#endif
|
2018-02-06 16:57:35 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
class InfEngineBackendWrapper : public BackendWrapper
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
InfEngineBackendWrapper(int targetId, const Mat& m);
|
|
|
|
|
2018-06-05 04:51:28 +08:00
|
|
|
InfEngineBackendWrapper(Ptr<BackendWrapper> wrapper);
|
|
|
|
|
2018-02-06 16:57:35 +08:00
|
|
|
~InfEngineBackendWrapper();
|
|
|
|
|
2018-06-05 04:51:28 +08:00
|
|
|
static Ptr<BackendWrapper> create(Ptr<BackendWrapper> wrapper);
|
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual void copyToHost() CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual void setHostDirty() CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
|
|
|
InferenceEngine::DataPtr dataPtr;
|
2018-06-05 04:51:28 +08:00
|
|
|
InferenceEngine::Blob::Ptr blob;
|
2018-02-06 16:57:35 +08:00
|
|
|
};
|
|
|
|
|
2018-06-05 04:51:28 +08:00
|
|
|
InferenceEngine::Blob::Ptr wrapToInfEngineBlob(const Mat& m, InferenceEngine::Layout layout = InferenceEngine::Layout::ANY);
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-06-05 04:51:28 +08:00
|
|
|
InferenceEngine::Blob::Ptr wrapToInfEngineBlob(const Mat& m, const std::vector<size_t>& shape, InferenceEngine::Layout layout);
|
2018-02-06 16:57:35 +08:00
|
|
|
|
|
|
|
InferenceEngine::DataPtr infEngineDataNode(const Ptr<BackendWrapper>& ptr);
|
|
|
|
|
2018-03-12 22:35:28 +08:00
|
|
|
Mat infEngineBlobToMat(const InferenceEngine::Blob::Ptr& blob);
|
|
|
|
|
|
|
|
// Convert Inference Engine blob with FP32 precision to FP16 precision.
|
|
|
|
// Allocates memory for a new blob.
|
2019-02-11 22:13:39 +08:00
|
|
|
InferenceEngine::Blob::Ptr convertFp16(const InferenceEngine::Blob::Ptr& blob);
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2019-02-14 18:30:30 +08:00
|
|
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5)
|
|
|
|
void addConstantData(const std::string& name, InferenceEngine::Blob::Ptr data, InferenceEngine::Builder::Layer& l);
|
|
|
|
#endif
|
|
|
|
|
2018-03-17 00:27:04 +08:00
|
|
|
// This is a fake class to run networks from Model Optimizer. Objects of that
|
|
|
|
// class simulate responses of layers are imported by OpenCV and supported by
|
|
|
|
// Inference Engine. The main difference is that they do not perform forward pass.
|
|
|
|
class InfEngineBackendLayer : public Layer
|
|
|
|
{
|
|
|
|
public:
|
2019-01-11 01:29:44 +08:00
|
|
|
InfEngineBackendLayer(const InferenceEngine::CNNNetwork &t_net_) : t_net(t_net_) {};
|
2018-03-17 00:27:04 +08:00
|
|
|
|
|
|
|
virtual bool getMemoryShapes(const std::vector<MatShape> &inputs,
|
|
|
|
const int requiredOutputs,
|
|
|
|
std::vector<MatShape> &outputs,
|
2018-03-15 21:16:56 +08:00
|
|
|
std::vector<MatShape> &internals) const CV_OVERRIDE;
|
2018-03-17 00:27:04 +08:00
|
|
|
|
|
|
|
virtual void forward(InputArrayOfArrays inputs, OutputArrayOfArrays outputs,
|
2018-03-15 21:16:56 +08:00
|
|
|
OutputArrayOfArrays internals) CV_OVERRIDE;
|
2018-03-17 00:27:04 +08:00
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual bool supportBackend(int backendId) CV_OVERRIDE;
|
2018-03-17 00:27:04 +08:00
|
|
|
|
|
|
|
private:
|
2019-01-11 01:29:44 +08:00
|
|
|
InferenceEngine::CNNNetwork t_net;
|
2018-03-17 00:27:04 +08:00
|
|
|
};
|
|
|
|
|
2018-02-06 16:57:35 +08:00
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
bool haveInfEngine();
|
|
|
|
|
|
|
|
void forwardInfEngine(Ptr<BackendNode>& node);
|
|
|
|
|
|
|
|
}} // namespace dnn, namespace cv
|
|
|
|
|
|
|
|
#endif // __OPENCV_DNN_OP_INF_ENGINE_HPP__
|