2018-02-06 16:57:35 +08:00
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
2019-03-29 21:42:58 +08:00
// Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
2018-02-06 16:57:35 +08:00
// Third party copyrights are property of their respective owners.
# ifndef __OPENCV_DNN_OP_INF_ENGINE_HPP__
# define __OPENCV_DNN_OP_INF_ENGINE_HPP__
2018-07-20 00:22:23 +08:00
# include "opencv2/core/cvdef.h"
2018-07-28 00:56:35 +08:00
# include "opencv2/core/cvstd.hpp"
# include "opencv2/dnn.hpp"
2018-07-20 00:22:23 +08:00
2019-05-01 19:51:12 +08:00
# include "opencv2/core/async.hpp"
# include "opencv2/core/detail/async_promise.hpp"
2019-03-29 21:42:58 +08:00
# include "opencv2/dnn/utils/inference_engine.hpp"
2018-02-06 16:57:35 +08:00
# ifdef HAVE_INF_ENGINE
2018-07-30 23:21:17 +08:00
2020-02-27 19:28:50 +08:00
# define INF_ENGINE_RELEASE_2020_2 2020020000
2020-04-07 21:17:21 +08:00
# define INF_ENGINE_RELEASE_2020_3 2020030000
2020-06-25 04:58:18 +08:00
# define INF_ENGINE_RELEASE_2020_4 2020040000
2020-11-08 01:27:33 +08:00
# define INF_ENGINE_RELEASE_2021_1 2021010000
2020-12-17 06:22:17 +08:00
# define INF_ENGINE_RELEASE_2021_2 2021020000
2021-03-03 08:32:25 +08:00
# define INF_ENGINE_RELEASE_2021_3 2021030000
2021-06-30 02:48:21 +08:00
# define INF_ENGINE_RELEASE_2021_4 2021040000
2022-12-24 00:58:41 +08:00
# define INF_ENGINE_RELEASE_2022_1 2022010000
2018-07-30 23:21:17 +08:00
# ifndef INF_ENGINE_RELEASE
2021-06-30 02:48:21 +08:00
# warning("IE version have not been provided via command-line. Using 2021.4 by default")
# define INF_ENGINE_RELEASE INF_ENGINE_RELEASE_2021_4
2018-07-30 23:21:17 +08:00
# endif
# define INF_ENGINE_VER_MAJOR_GT(ver) (((INF_ENGINE_RELEASE) / 10000) > ((ver) / 10000))
2018-08-27 20:45:44 +08:00
# define INF_ENGINE_VER_MAJOR_GE(ver) (((INF_ENGINE_RELEASE) / 10000) >= ((ver) / 10000))
2019-01-14 14:55:44 +08:00
# define INF_ENGINE_VER_MAJOR_LT(ver) (((INF_ENGINE_RELEASE) / 10000) < ((ver) / 10000))
2019-04-16 23:13:14 +08:00
# define INF_ENGINE_VER_MAJOR_LE(ver) (((INF_ENGINE_RELEASE) / 10000) <= ((ver) / 10000))
2019-02-11 22:13:39 +08:00
# define INF_ENGINE_VER_MAJOR_EQ(ver) (((INF_ENGINE_RELEASE) / 10000) == ((ver) / 10000))
2019-01-14 14:55:44 +08:00
2019-04-16 23:13:14 +08:00
# if defined(__GNUC__) && __GNUC__ >= 5
//#pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wsuggest-override"
# endif
2022-12-24 00:58:41 +08:00
# if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2022_1)
# include <openvino/openvino.hpp>
# include <openvino/pass/serialize.hpp>
# include <openvino/pass/convert_fp32_to_fp16.hpp>
# else
2019-04-16 23:13:14 +08:00
# include <inference_engine.hpp>
2022-12-24 00:58:41 +08:00
# endif
2019-04-16 23:13:14 +08:00
# if defined(__GNUC__) && __GNUC__ >= 5
//#pragma GCC diagnostic pop
# endif
2018-02-06 16:57:35 +08:00
# endif // HAVE_INF_ENGINE
2022-02-09 22:25:08 +08:00
# define CV_ERROR_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 do { CV_Error(Error::StsNotImplemented, "This OpenCV version is built without Inference Engine NN Builder API support (legacy API is not supported anymore)"); } while (0)
2018-02-06 16:57:35 +08:00
namespace cv { namespace dnn {
2022-03-18 14:19:30 +08:00
CV__DNN_INLINE_NS_BEGIN
namespace openvino {
// TODO: use std::string as parameter
bool checkTarget ( Target target ) ;
} // namespace openvino
CV__DNN_INLINE_NS_END
2018-02-06 16:57:35 +08:00
# ifdef HAVE_INF_ENGINE
2019-12-02 21:16:06 +08:00
Backend & getInferenceEngineBackendTypeParam ( ) ;
2022-12-24 00:58:41 +08:00
# if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2022_1)
Mat infEngineBlobToMat ( const ov : : Tensor & blob ) ;
void infEngineBlobsToMats ( const ov : : TensorVector & blobs ,
std : : vector < Mat > & mats ) ;
# else
2020-02-27 21:02:32 +08:00
Mat infEngineBlobToMat ( const InferenceEngine : : Blob : : Ptr & blob ) ;
void infEngineBlobsToMats ( const std : : vector < InferenceEngine : : Blob : : Ptr > & blobs ,
std : : vector < Mat > & mats ) ;
2022-12-24 00:58:41 +08:00
# endif // OpenVINO >= 2022.1
2020-02-27 21:02:32 +08:00
2019-12-02 21:16:06 +08:00
2019-03-30 03:21:47 +08:00
CV__DNN_INLINE_NS_BEGIN
2019-03-29 21:42:58 +08:00
2022-03-08 06:26:20 +08:00
void switchToOpenVINOBackend ( Net & net ) ;
2019-03-29 21:42:58 +08:00
bool isMyriadX ( ) ;
2021-03-20 19:20:02 +08:00
bool isArmComputePlugin ( ) ;
2019-03-30 03:21:47 +08:00
CV__DNN_INLINE_NS_END
2019-03-29 21:42:58 +08:00
2022-12-24 00:58:41 +08:00
// A series of wrappers for classes from OpenVINO API 2.0.
// Need just for less conditional compilation inserts.
# if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2022_1)
namespace InferenceEngine {
class CNNNetwork {
public :
CNNNetwork ( ) ;
CNNNetwork ( std : : shared_ptr < ov : : Model > model ) ;
std : : shared_ptr < ov : : Model > getFunction ( ) const ;
void serialize ( const std : : string & xmlPath , const std : : string & binPath ) ;
void reshape ( const std : : map < std : : string , std : : vector < size_t > > & shapes ) ;
private :
std : : shared_ptr < ov : : Model > model = nullptr ;
} ;
typedef ov : : InferRequest InferRequest ;
class ExecutableNetwork : public ov : : CompiledModel {
public :
ExecutableNetwork ( ) ;
ExecutableNetwork ( const ov : : CompiledModel & copy ) ;
ov : : InferRequest CreateInferRequest ( ) ;
} ;
class Core : public ov : : Core {
public :
std : : vector < std : : string > GetAvailableDevices ( ) ;
void UnregisterPlugin ( const std : : string & id ) ;
CNNNetwork ReadNetwork ( const std : : string & xmlPath , const std : : string & binPath ) ;
ExecutableNetwork LoadNetwork ( CNNNetwork net , const std : : string & device ,
const std : : map < std : : string , std : : string > & config ) ;
} ;
}
# endif // OpenVINO >= 2022.1
2019-03-29 21:42:58 +08:00
2020-03-13 23:33:27 +08:00
InferenceEngine : : Core & getCore ( const std : : string & id ) ;
2019-12-02 21:16:06 +08:00
template < typename T = size_t >
static inline std : : vector < T > getShape ( const Mat & mat )
{
std : : vector < T > result ( mat . dims ) ;
for ( int i = 0 ; i < mat . dims ; i + + )
result [ i ] = ( T ) mat . size [ i ] ;
return result ;
}
2018-02-06 16:57:35 +08:00
# endif // HAVE_INF_ENGINE
} } // namespace dnn, namespace cv
# endif // __OPENCV_DNN_OP_INF_ENGINE_HPP__