2018-02-06 16:57:35 +08:00
|
|
|
// This file is part of OpenCV project.
|
|
|
|
// It is subject to the license terms in the LICENSE file found in the top-level directory
|
|
|
|
// of this distribution and at http://opencv.org/license.html.
|
|
|
|
//
|
|
|
|
// Copyright (C) 2018, Intel Corporation, all rights reserved.
|
|
|
|
// Third party copyrights are property of their respective owners.
|
|
|
|
|
|
|
|
#ifndef __OPENCV_DNN_OP_INF_ENGINE_HPP__
|
|
|
|
#define __OPENCV_DNN_OP_INF_ENGINE_HPP__
|
|
|
|
|
|
|
|
#ifdef HAVE_INF_ENGINE
|
2018-03-15 21:16:56 +08:00
|
|
|
#if defined(__GNUC__) && __GNUC__ >= 5
|
|
|
|
//#pragma GCC diagnostic push
|
|
|
|
#pragma GCC diagnostic ignored "-Wsuggest-override"
|
|
|
|
#endif
|
2018-02-06 16:57:35 +08:00
|
|
|
#include <inference_engine.hpp>
|
2018-03-15 21:16:56 +08:00
|
|
|
#if defined(__GNUC__) && __GNUC__ >= 5
|
|
|
|
//#pragma GCC diagnostic pop
|
|
|
|
#endif
|
2018-02-06 16:57:35 +08:00
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
namespace cv { namespace dnn {
|
|
|
|
|
|
|
|
#ifdef HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
class InfEngineBackendNet : public InferenceEngine::ICNNNetwork
|
|
|
|
{
|
|
|
|
public:
|
2018-03-17 00:27:04 +08:00
|
|
|
InfEngineBackendNet();
|
|
|
|
|
|
|
|
InfEngineBackendNet(InferenceEngine::CNNNetwork& net);
|
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual void Release() noexcept CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-12 22:35:28 +08:00
|
|
|
void setPrecision(InferenceEngine::Precision p) noexcept;
|
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual InferenceEngine::Precision getPrecision() noexcept CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual void getOutputsInfo(InferenceEngine::OutputsDataMap &out) noexcept /*CV_OVERRIDE*/;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual void getOutputsInfo(InferenceEngine::OutputsDataMap &out) const noexcept /*CV_OVERRIDE*/;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual void getInputsInfo(InferenceEngine::InputsDataMap &inputs) noexcept /*CV_OVERRIDE*/;
|
2018-02-14 19:17:44 +08:00
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual void getInputsInfo(InferenceEngine::InputsDataMap &inputs) const noexcept /*CV_OVERRIDE*/;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual InferenceEngine::InputInfo::Ptr getInput(const std::string &inputName) noexcept CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-05-22 20:18:18 +08:00
|
|
|
virtual void getName(char *pName, size_t len) noexcept;
|
|
|
|
|
|
|
|
virtual void getName(char *pName, size_t len) const noexcept;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual size_t layerCount() noexcept CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual InferenceEngine::DataPtr& getData(const char *dname) noexcept CV_OVERRIDE;
|
|
|
|
|
|
|
|
virtual void addLayer(const InferenceEngine::CNNLayerPtr &layer) noexcept CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
|
|
|
virtual InferenceEngine::StatusCode addOutput(const std::string &layerName,
|
|
|
|
size_t outputIndex = 0,
|
2018-03-15 21:16:56 +08:00
|
|
|
InferenceEngine::ResponseDesc *resp = nullptr) noexcept CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
|
|
|
virtual InferenceEngine::StatusCode getLayerByName(const char *layerName,
|
|
|
|
InferenceEngine::CNNLayerPtr &out,
|
2018-03-15 21:16:56 +08:00
|
|
|
InferenceEngine::ResponseDesc *resp) noexcept CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual void setTargetDevice(InferenceEngine::TargetDevice device) noexcept CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual InferenceEngine::TargetDevice getTargetDevice() noexcept CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual InferenceEngine::StatusCode setBatchSize(const size_t size) noexcept CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual size_t getBatchSize() const noexcept CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-12 22:35:28 +08:00
|
|
|
void init(int targetId);
|
2018-02-06 16:57:35 +08:00
|
|
|
|
|
|
|
void addBlobs(const std::vector<Ptr<BackendWrapper> >& wrappers);
|
|
|
|
|
|
|
|
void forward();
|
|
|
|
|
|
|
|
bool isInitialized();
|
|
|
|
|
|
|
|
private:
|
|
|
|
std::vector<InferenceEngine::CNNLayerPtr> layers;
|
|
|
|
InferenceEngine::InputsDataMap inputs;
|
|
|
|
InferenceEngine::OutputsDataMap outputs;
|
|
|
|
InferenceEngine::BlobMap inpBlobs;
|
|
|
|
InferenceEngine::BlobMap outBlobs;
|
|
|
|
InferenceEngine::BlobMap allBlobs;
|
2018-03-12 22:35:28 +08:00
|
|
|
InferenceEngine::TargetDevice targetDevice;
|
|
|
|
InferenceEngine::Precision precision;
|
2018-06-01 19:10:32 +08:00
|
|
|
InferenceEngine::InferenceEnginePluginPtr enginePtr;
|
|
|
|
InferenceEngine::InferencePlugin plugin;
|
|
|
|
InferenceEngine::ExecutableNetwork netExec;
|
|
|
|
InferenceEngine::InferRequest infRequest;
|
2018-03-17 00:27:04 +08:00
|
|
|
|
|
|
|
void initPlugin(InferenceEngine::ICNNNetwork& net);
|
2018-02-06 16:57:35 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
class InfEngineBackendNode : public BackendNode
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
InfEngineBackendNode(const InferenceEngine::CNNLayerPtr& layer);
|
|
|
|
|
|
|
|
void connect(std::vector<Ptr<BackendWrapper> >& inputs,
|
|
|
|
std::vector<Ptr<BackendWrapper> >& outputs);
|
|
|
|
|
|
|
|
InferenceEngine::CNNLayerPtr layer;
|
|
|
|
// Inference Engine network object that allows to obtain the outputs of this layer.
|
|
|
|
Ptr<InfEngineBackendNet> net;
|
|
|
|
};
|
|
|
|
|
|
|
|
class InfEngineBackendWrapper : public BackendWrapper
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
InfEngineBackendWrapper(int targetId, const Mat& m);
|
|
|
|
|
|
|
|
~InfEngineBackendWrapper();
|
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual void copyToHost() CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual void setHostDirty() CV_OVERRIDE;
|
2018-02-06 16:57:35 +08:00
|
|
|
|
|
|
|
InferenceEngine::DataPtr dataPtr;
|
|
|
|
InferenceEngine::TBlob<float>::Ptr blob;
|
|
|
|
};
|
|
|
|
|
2018-03-12 22:35:28 +08:00
|
|
|
InferenceEngine::TBlob<float>::Ptr wrapToInfEngineBlob(const Mat& m, InferenceEngine::Layout layout = InferenceEngine::Layout::ANY);
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-12 22:35:28 +08:00
|
|
|
InferenceEngine::TBlob<float>::Ptr wrapToInfEngineBlob(const Mat& m, const std::vector<size_t>& shape, InferenceEngine::Layout layout);
|
2018-02-06 16:57:35 +08:00
|
|
|
|
|
|
|
InferenceEngine::DataPtr infEngineDataNode(const Ptr<BackendWrapper>& ptr);
|
|
|
|
|
2018-03-12 22:35:28 +08:00
|
|
|
Mat infEngineBlobToMat(const InferenceEngine::Blob::Ptr& blob);
|
|
|
|
|
|
|
|
// Convert Inference Engine blob with FP32 precision to FP16 precision.
|
|
|
|
// Allocates memory for a new blob.
|
|
|
|
InferenceEngine::TBlob<int16_t>::Ptr convertFp16(const InferenceEngine::Blob::Ptr& blob);
|
2018-02-06 16:57:35 +08:00
|
|
|
|
2018-03-17 00:27:04 +08:00
|
|
|
// This is a fake class to run networks from Model Optimizer. Objects of that
|
|
|
|
// class simulate responses of layers are imported by OpenCV and supported by
|
|
|
|
// Inference Engine. The main difference is that they do not perform forward pass.
|
|
|
|
class InfEngineBackendLayer : public Layer
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
InfEngineBackendLayer(const InferenceEngine::DataPtr& output);
|
|
|
|
|
|
|
|
virtual bool getMemoryShapes(const std::vector<MatShape> &inputs,
|
|
|
|
const int requiredOutputs,
|
|
|
|
std::vector<MatShape> &outputs,
|
2018-03-15 21:16:56 +08:00
|
|
|
std::vector<MatShape> &internals) const CV_OVERRIDE;
|
2018-03-17 00:27:04 +08:00
|
|
|
|
|
|
|
virtual void forward(std::vector<Mat*> &input, std::vector<Mat> &output,
|
2018-03-15 21:16:56 +08:00
|
|
|
std::vector<Mat> &internals) CV_OVERRIDE;
|
2018-03-17 00:27:04 +08:00
|
|
|
|
|
|
|
virtual void forward(InputArrayOfArrays inputs, OutputArrayOfArrays outputs,
|
2018-03-15 21:16:56 +08:00
|
|
|
OutputArrayOfArrays internals) CV_OVERRIDE;
|
2018-03-17 00:27:04 +08:00
|
|
|
|
2018-03-15 21:16:56 +08:00
|
|
|
virtual bool supportBackend(int backendId) CV_OVERRIDE;
|
2018-03-17 00:27:04 +08:00
|
|
|
|
|
|
|
private:
|
|
|
|
InferenceEngine::DataPtr output;
|
|
|
|
};
|
|
|
|
|
2018-02-06 16:57:35 +08:00
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
bool haveInfEngine();
|
|
|
|
|
|
|
|
void forwardInfEngine(Ptr<BackendNode>& node);
|
|
|
|
|
|
|
|
}} // namespace dnn, namespace cv
|
|
|
|
|
|
|
|
#endif // __OPENCV_DNN_OP_INF_ENGINE_HPP__
|