mirror of
https://github.com/opencv/opencv.git
synced 2025-06-08 01:53:19 +08:00
Merge pull request #21745 from alalek:dnn_plugin_openvino
This commit is contained in:
commit
347246901e
@ -1094,6 +1094,18 @@ macro(ocv_list_filterout lst regex)
|
||||
endforeach()
|
||||
endmacro()
|
||||
|
||||
# Usage: ocv_list_filterout_ex(list_name regex1 regex2 ...)
|
||||
macro(ocv_list_filterout_ex lst)
|
||||
foreach(regex ${ARGN})
|
||||
foreach(item ${${lst}})
|
||||
if(item MATCHES "${regex}")
|
||||
list(REMOVE_ITEM ${lst} "${item}")
|
||||
endif()
|
||||
endforeach()
|
||||
endforeach()
|
||||
endmacro()
|
||||
|
||||
|
||||
# filter matching elements from the list
|
||||
macro(ocv_list_filter lst regex)
|
||||
set(dst ${ARGN})
|
||||
|
@ -62,7 +62,7 @@ CV_EXPORTS void glob_relative(const cv::String& directory, const cv::String& pat
|
||||
CV_EXPORTS bool createDirectory(const cv::String& path);
|
||||
CV_EXPORTS bool createDirectories(const cv::String& path);
|
||||
|
||||
#ifdef __OPENCV_BUILD
|
||||
#if defined(__OPENCV_BUILD) || defined(BUILD_PLUGIN)
|
||||
// TODO
|
||||
//CV_EXPORTS cv::String getTempDirectory();
|
||||
|
||||
|
@ -13,18 +13,22 @@ ocv_add_dispatched_file_force_all("int8layers/layers_common" AVX2 AVX512_SKX LAS
|
||||
|
||||
ocv_add_module(dnn opencv_core opencv_imgproc WRAP python java objc js)
|
||||
|
||||
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/cmake/plugin.cmake)
|
||||
|
||||
|
||||
ocv_option(OPENCV_DNN_OPENCL "Build with OpenCL support" HAVE_OPENCL AND NOT APPLE)
|
||||
|
||||
if(OPENCV_DNN_OPENCL AND HAVE_OPENCL)
|
||||
add_definitions(-DCV_OCL4DNN=1)
|
||||
ocv_target_compile_definitions(${the_module} PRIVATE "CV_OCL4DNN=1")
|
||||
endif()
|
||||
|
||||
if(WITH_WEBNN AND HAVE_WEBNN)
|
||||
add_definitions(-DHAVE_WEBNN=1)
|
||||
ocv_target_compile_definitions(${the_module} PRIVATE "HAVE_WEBNN=1")
|
||||
endif()
|
||||
|
||||
if(HAVE_TIMVX)
|
||||
add_definitions(-DHAVE_TIMVX=1)
|
||||
ocv_target_compile_definitions(${the_module} PRIVATE "HAVE_TIMVX=1")
|
||||
endif()
|
||||
|
||||
ocv_option(OPENCV_DNN_CUDA "Build with CUDA support"
|
||||
@ -35,7 +39,7 @@ ocv_option(OPENCV_DNN_CUDA "Build with CUDA support"
|
||||
|
||||
if(OPENCV_DNN_CUDA)
|
||||
if(HAVE_CUDA AND HAVE_CUBLAS AND HAVE_CUDNN)
|
||||
add_definitions(-DCV_CUDA4DNN=1)
|
||||
ocv_target_compile_definitions(${the_module} PRIVATE "CV_CUDA4DNN=1")
|
||||
else()
|
||||
if(NOT HAVE_CUDA)
|
||||
message(SEND_ERROR "DNN: CUDA backend requires CUDA Toolkit. Please resolve dependency or disable OPENCV_DNN_CUDA=OFF")
|
||||
@ -47,12 +51,15 @@ if(OPENCV_DNN_CUDA)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
|
||||
ocv_cmake_hook_append(INIT_MODULE_SOURCES_opencv_dnn "${CMAKE_CURRENT_LIST_DIR}/cmake/hooks/INIT_MODULE_SOURCES_opencv_dnn.cmake")
|
||||
|
||||
|
||||
if(HAVE_TENGINE)
|
||||
add_definitions(-DHAVE_TENGINE=1)
|
||||
ocv_target_compile_definitions(${the_module} PRIVATE "HAVE_TENGINE=1")
|
||||
endif()
|
||||
|
||||
|
||||
if(MSVC)
|
||||
add_definitions( -D_CRT_SECURE_NO_WARNINGS=1 )
|
||||
ocv_warnings_disable(CMAKE_CXX_FLAGS /wd4244 /wd4267 /wd4018 /wd4355 /wd4800 /wd4251 /wd4996 /wd4146
|
||||
@ -87,10 +94,10 @@ if(ANDROID)
|
||||
endif()
|
||||
|
||||
if(NOT BUILD_PROTOBUF)
|
||||
add_definitions(-DOPENCV_DNN_EXTERNAL_PROTOBUF=1)
|
||||
ocv_target_compile_definitions(${the_module} PRIVATE "OPENCV_DNN_EXTERNAL_PROTOBUF=1")
|
||||
endif()
|
||||
|
||||
add_definitions(-DHAVE_PROTOBUF=1)
|
||||
ocv_target_compile_definitions(${the_module} PRIVATE "HAVE_PROTOBUF=1")
|
||||
|
||||
#suppress warnings in autogenerated caffe.pb.* files
|
||||
ocv_warnings_disable(CMAKE_CXX_FLAGS
|
||||
@ -175,12 +182,34 @@ endif()
|
||||
|
||||
set(dnn_runtime_libs "")
|
||||
|
||||
file(GLOB_RECURSE dnn_srcs
|
||||
"${CMAKE_CURRENT_LIST_DIR}/src/*.cpp"
|
||||
)
|
||||
file(GLOB_RECURSE dnn_int_hdrs
|
||||
"${CMAKE_CURRENT_LIST_DIR}/src/*.hpp"
|
||||
"${CMAKE_CURRENT_LIST_DIR}/src/*.h"
|
||||
)
|
||||
set(dnn_plugin_srcs ${dnn_srcs} ${dnn_int_hdrs})
|
||||
ocv_list_filterout_ex(dnn_plugin_srcs
|
||||
"/src/dnn.cpp$|/src/dnn_utils.cpp$|/src/dnn_utils.cpp$|/src/dnn_read.cpp$|/src/registry.cpp$|/src/backend.cpp$"
|
||||
# importers
|
||||
"/src/(caffe|darknet|onnx|tensorflow|torch)/"
|
||||
# executors
|
||||
"/src/(cuda|cuda4dnn|ocl4dnn|vkcom|webnn)/"
|
||||
)
|
||||
|
||||
ocv_option(OPENCV_DNN_OPENVINO "Build with OpenVINO support (2021.4+)" (TARGET ocv.3rdparty.openvino))
|
||||
if(TARGET ocv.3rdparty.openvino AND OPENCV_DNN_OPENVINO)
|
||||
if(NOT HAVE_OPENVINO AND NOT HAVE_NGRAPH)
|
||||
message(FATAL_ERROR "DNN: Inference Engine is not supported without enabled 'nGraph'. Check build configuration.")
|
||||
endif()
|
||||
if("openvino" IN_LIST DNN_PLUGIN_LIST OR DNN_PLUGIN_LIST STREQUAL "all")
|
||||
# plugin doesn't support PCH, separate directory scope is necessary
|
||||
# opencv_world requires absolute path
|
||||
add_subdirectory("${CMAKE_CURRENT_LIST_DIR}/misc/plugin/openvino" "${CMAKE_CURRENT_BINARY_DIR}/dnn_plugin_openvino")
|
||||
elseif(NOT OPENCV_DNN_BUILTIN_BACKEND)
|
||||
list(APPEND dnn_runtime_libs ocv.3rdparty.openvino)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
ocv_glob_module_sources(${sources_options} SOURCES ${fw_srcs} ${webnn_srcs})
|
||||
@ -205,7 +234,7 @@ if(BUILD_PERF_TESTS)
|
||||
)
|
||||
find_package(Caffe QUIET)
|
||||
if (Caffe_FOUND)
|
||||
add_definitions(-DHAVE_CAFFE=1)
|
||||
ocv_target_compile_definitions(opencv_perf_dnn PRIVATE "HAVE_CAFFE=1")
|
||||
ocv_target_link_libraries(opencv_perf_dnn caffe)
|
||||
endif()
|
||||
elseif(OPENCV_DNN_PERF_CLCAFFE
|
||||
@ -213,8 +242,25 @@ if(BUILD_PERF_TESTS)
|
||||
)
|
||||
find_package(Caffe QUIET)
|
||||
if (Caffe_FOUND)
|
||||
add_definitions(-DHAVE_CLCAFFE=1)
|
||||
ocv_target_compile_definitions(opencv_perf_dnn PRIVATE "HAVE_CLCAFFE=1")
|
||||
ocv_target_link_libraries(opencv_perf_dnn caffe)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(DNN_ENABLE_PLUGINS)
|
||||
ocv_target_compile_definitions(${the_module} PRIVATE ENABLE_PLUGINS)
|
||||
if(TARGET opencv_test_dnn)
|
||||
ocv_target_compile_definitions(opencv_test_dnn PRIVATE ENABLE_PLUGINS)
|
||||
endif()
|
||||
if(OPENCV_DEBUG_POSTFIX)
|
||||
ocv_append_source_file_compile_definitions("${CMAKE_CURRENT_LIST_DIR}/src/backend.cpp" "DEBUG_POSTFIX=${OPENCV_DEBUG_POSTFIX}")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
ocv_option(OPENCV_TEST_DNN_OPENVINO "Build test with OpenVINO code" (TARGET ocv.3rdparty.openvino))
|
||||
if(TARGET ocv.3rdparty.openvino AND OPENCV_TEST_DNN_OPENVINO)
|
||||
if(TARGET opencv_test_dnn)
|
||||
ocv_target_link_libraries(opencv_test_dnn ocv.3rdparty.openvino)
|
||||
endif()
|
||||
endif()
|
||||
|
29
modules/dnn/cmake/init.cmake
Normal file
29
modules/dnn/cmake/init.cmake
Normal file
@ -0,0 +1,29 @@
|
||||
if(PROJECT_NAME STREQUAL "OpenCV")
|
||||
set(ENABLE_PLUGINS_DEFAULT ON)
|
||||
if(EMSCRIPTEN OR IOS OR WINRT)
|
||||
set(ENABLE_PLUGINS_DEFAULT OFF)
|
||||
endif()
|
||||
set(DNN_PLUGIN_LIST "" CACHE STRING "List of DNN backends to be compiled as plugins (openvino, etc or special value 'all')")
|
||||
set(DNN_ENABLE_PLUGINS "${ENABLE_PLUGINS_DEFAULT}" CACHE BOOL "Allow building and using of DNN plugins")
|
||||
mark_as_advanced(DNN_PLUGIN_LIST DNN_ENABLE_PLUGINS)
|
||||
|
||||
string(REPLACE "," ";" DNN_PLUGIN_LIST "${DNN_PLUGIN_LIST}") # support comma-separated list (,) too
|
||||
string(TOLOWER "${DNN_PLUGIN_LIST}" DNN_PLUGIN_LIST)
|
||||
if(NOT DNN_ENABLE_PLUGINS)
|
||||
if(DNN_PLUGIN_LIST)
|
||||
message(WARNING "DNN: plugins are disabled through DNN_ENABLE_PLUGINS, so DNN_PLUGIN_LIST='${DNN_PLUGIN_LIST}' is ignored")
|
||||
set(DNN_PLUGIN_LIST "")
|
||||
endif()
|
||||
else()
|
||||
# Make virtual plugins target
|
||||
if(NOT TARGET opencv_dnn_plugins)
|
||||
add_custom_target(opencv_dnn_plugins ALL)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
#
|
||||
# Detect available dependencies
|
||||
#
|
||||
|
||||
# OpenVINO - detected by main CMake scripts (shared with G-API)
|
80
modules/dnn/cmake/plugin.cmake
Normal file
80
modules/dnn/cmake/plugin.cmake
Normal file
@ -0,0 +1,80 @@
|
||||
function(ocv_create_builtin_dnn_plugin name target)
|
||||
|
||||
ocv_debug_message("ocv_create_builtin_dnn_plugin(${ARGV})")
|
||||
|
||||
if(NOT TARGET ${target})
|
||||
message(FATAL_ERROR "${target} does not exist!")
|
||||
endif()
|
||||
if(NOT OpenCV_SOURCE_DIR)
|
||||
message(FATAL_ERROR "OpenCV_SOURCE_DIR must be set to build the plugin!")
|
||||
endif()
|
||||
|
||||
message(STATUS "DNN: add builtin plugin '${name}'")
|
||||
|
||||
set(ENABLE_PRECOMPILED_HEADERS OFF) # no support for PCH in plugins, conflicts with module's source files
|
||||
|
||||
# TODO: update CPU optimizations scripts to support plugins
|
||||
add_definitions(-D__OPENCV_BUILD=1)
|
||||
add_definitions(-DBUILD_PLUGIN=1)
|
||||
include_directories("${OPENCV_MODULE_opencv_dnn_BINARY_DIR}") # Cannot open include file: 'layers/layers_common.simd_declarations.hpp'
|
||||
|
||||
foreach(src ${ARGN})
|
||||
if(EXISTS "${CMAKE_CURRENT_LIST_DIR}/src/${src}")
|
||||
list(APPEND sources "${CMAKE_CURRENT_LIST_DIR}/src/${src}")
|
||||
elseif(IS_ABSOLUTE "${src}")
|
||||
list(APPEND sources "${src}")
|
||||
else()
|
||||
message(FATAL_ERROR "Unknown source: ${src}")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
if(OPENCV_MODULE_${the_module}_SOURCES_DISPATCHED)
|
||||
list(APPEND sources ${OPENCV_MODULE_${the_module}_SOURCES_DISPATCHED})
|
||||
endif()
|
||||
|
||||
set(__${name}_DEPS_EXT "")
|
||||
ocv_compiler_optimization_process_sources(sources __${name}_DEPS_EXT ${name})
|
||||
|
||||
add_library(${name} MODULE ${sources})
|
||||
target_include_directories(${name} PRIVATE "${CMAKE_CURRENT_BINARY_DIR}")
|
||||
target_link_libraries(${name} PRIVATE ${target} ${__${name}_DEPS_EXT})
|
||||
target_link_libraries(${name} PRIVATE ${__plugin_libs})
|
||||
|
||||
foreach(mod opencv_dnn
|
||||
opencv_core
|
||||
opencv_imgproc
|
||||
opencv_dnn
|
||||
)
|
||||
ocv_target_link_libraries(${name} LINK_PRIVATE ${mod})
|
||||
ocv_target_include_directories(${name} "${OPENCV_MODULE_${mod}_LOCATION}/include")
|
||||
endforeach()
|
||||
|
||||
if(WIN32)
|
||||
set(OPENCV_PLUGIN_VERSION "${OPENCV_DLLVERSION}" CACHE STRING "")
|
||||
if(CMAKE_CXX_SIZEOF_DATA_PTR EQUAL 8)
|
||||
set(OPENCV_PLUGIN_ARCH "_64" CACHE STRING "")
|
||||
else()
|
||||
set(OPENCV_PLUGIN_ARCH "" CACHE STRING "")
|
||||
endif()
|
||||
else()
|
||||
set(OPENCV_PLUGIN_VERSION "" CACHE STRING "")
|
||||
set(OPENCV_PLUGIN_ARCH "" CACHE STRING "")
|
||||
endif()
|
||||
|
||||
set_target_properties(${name} PROPERTIES
|
||||
CXX_STANDARD 11
|
||||
CXX_VISIBILITY_PRESET hidden
|
||||
DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
|
||||
OUTPUT_NAME "${name}${OPENCV_PLUGIN_VERSION}${OPENCV_PLUGIN_ARCH}"
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
set_target_properties(${name} PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH})
|
||||
install(TARGETS ${name} OPTIONAL LIBRARY DESTINATION ${OPENCV_BIN_INSTALL_PATH} COMPONENT plugins)
|
||||
else()
|
||||
install(TARGETS ${name} OPTIONAL LIBRARY DESTINATION ${OPENCV_LIB_INSTALL_PATH} COMPONENT plugins)
|
||||
endif()
|
||||
|
||||
add_dependencies(opencv_dnn_plugins ${name})
|
||||
|
||||
endfunction()
|
@ -52,6 +52,11 @@
|
||||
|
||||
namespace cv {
|
||||
namespace dnn {
|
||||
|
||||
namespace accessor {
|
||||
class DnnNetAccessor; // forward declaration
|
||||
}
|
||||
|
||||
CV__DNN_INLINE_NS_BEGIN
|
||||
//! @addtogroup dnn
|
||||
//! @{
|
||||
@ -76,9 +81,11 @@ CV__DNN_INLINE_NS_BEGIN
|
||||
DNN_BACKEND_CUDA,
|
||||
DNN_BACKEND_WEBNN,
|
||||
DNN_BACKEND_TIMVX,
|
||||
#ifdef __OPENCV_BUILD
|
||||
#if defined(__OPENCV_BUILD) || defined(BUILD_PLUGIN)
|
||||
#if !defined(OPENCV_BINDING_PARSER)
|
||||
DNN_BACKEND_INFERENCE_ENGINE_NGRAPH = 1000000, // internal - use DNN_BACKEND_INFERENCE_ENGINE + setInferenceEngineBackendType()
|
||||
DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019, // internal - use DNN_BACKEND_INFERENCE_ENGINE + setInferenceEngineBackendType()
|
||||
#endif
|
||||
#endif
|
||||
};
|
||||
|
||||
@ -840,8 +847,12 @@ CV__DNN_INLINE_NS_BEGIN
|
||||
*/
|
||||
CV_WRAP int64 getPerfProfile(CV_OUT std::vector<double>& timings);
|
||||
|
||||
private:
|
||||
|
||||
struct Impl;
|
||||
inline Impl* getImpl() const { return impl.get(); }
|
||||
inline Impl& getImplRef() const { CV_DbgAssert(impl); return *impl.get(); }
|
||||
friend class accessor::DnnNetAccessor;
|
||||
protected:
|
||||
Ptr<Impl> impl;
|
||||
};
|
||||
|
||||
|
2
modules/dnn/misc/plugin/openvino/CMakeLists.txt
Normal file
2
modules/dnn/misc/plugin/openvino/CMakeLists.txt
Normal file
@ -0,0 +1,2 @@
|
||||
#include_directories("${OPENCV_MODULE_opencv_dnn_BINARY_DIR}") # Cannot open include file: 'layers/layers_common.simd_declarations.hpp'
|
||||
ocv_create_builtin_dnn_plugin(opencv_dnn_openvino ocv.3rdparty.openvino ${dnn_plugin_srcs})
|
@ -113,10 +113,10 @@ class dnn_test(NewOpenCVTests):
|
||||
proto = self.find_dnn_file('dnn/layers/layer_convolution.prototxt')
|
||||
model = self.find_dnn_file('dnn/layers/layer_convolution.caffemodel')
|
||||
net = cv.dnn.readNet(proto, model)
|
||||
try:
|
||||
net.setPreferableBackend(backend)
|
||||
net.setPreferableTarget(target)
|
||||
inp = np.random.standard_normal([1, 2, 10, 11]).astype(np.float32)
|
||||
try:
|
||||
net.setInput(inp)
|
||||
net.forward()
|
||||
except BaseException as e:
|
||||
|
31
modules/dnn/src/backend.cpp
Normal file
31
modules/dnn/src/backend.cpp
Normal file
@ -0,0 +1,31 @@
|
||||
// This file is part of OpenCV project.
|
||||
// It is subject to the license terms in the LICENSE file found in the top-level directory
|
||||
// of this distribution and at http://opencv.org/license.html.
|
||||
#include "precomp.hpp"
|
||||
#include "backend.hpp"
|
||||
|
||||
#include <opencv2/core/private.hpp>
|
||||
|
||||
#include <opencv2/core/utils/configuration.private.hpp>
|
||||
#include <opencv2/core/utils/logger.defines.hpp>
|
||||
#ifdef NDEBUG
|
||||
#define CV_LOG_STRIP_LEVEL CV_LOG_LEVEL_DEBUG + 1
|
||||
#else
|
||||
#define CV_LOG_STRIP_LEVEL CV_LOG_LEVEL_VERBOSE + 1
|
||||
#endif
|
||||
#include <opencv2/core/utils/logger.hpp>
|
||||
|
||||
#include "factory.hpp"
|
||||
|
||||
#include "plugin_api.hpp"
|
||||
#include "plugin_wrapper.impl.hpp"
|
||||
|
||||
|
||||
namespace cv { namespace dnn_backend {
|
||||
|
||||
NetworkBackend::~NetworkBackend()
|
||||
{
|
||||
// nothing
|
||||
}
|
||||
|
||||
}} // namespace cv::dnn_backend
|
43
modules/dnn/src/backend.hpp
Normal file
43
modules/dnn/src/backend.hpp
Normal file
@ -0,0 +1,43 @@
|
||||
// This file is part of OpenCV project.
|
||||
// It is subject to the license terms in the LICENSE file found in the top-level directory
|
||||
// of this distribution and at http://opencv.org/license.html.
|
||||
#ifndef OPENCV_DNN_BACKEND_HPP
|
||||
#define OPENCV_DNN_BACKEND_HPP
|
||||
|
||||
#include <memory>
|
||||
#include <map>
|
||||
|
||||
namespace cv { namespace dnn_backend {
|
||||
|
||||
using namespace cv::dnn;
|
||||
|
||||
class CV_EXPORTS NetworkBackend
|
||||
{
|
||||
public:
|
||||
virtual ~NetworkBackend();
|
||||
|
||||
virtual void switchBackend(Net& net) = 0;
|
||||
|
||||
/**
|
||||
@param loaderID use empty "" for auto
|
||||
@param model see cv::dnn::readNetwork
|
||||
@param config see cv::dnn::readNetwork
|
||||
*/
|
||||
virtual Net readNetwork(const std::string& loaderID, const std::string& model, const std::string& config) = 0;
|
||||
|
||||
/** @overload */
|
||||
virtual Net readNetwork(
|
||||
const std::string& loaderID,
|
||||
const uchar* bufferModelConfigPtr, size_t bufferModelConfigSize,
|
||||
const uchar* bufferWeightsPtr, size_t bufferWeightsSize
|
||||
) = 0;
|
||||
|
||||
// TODO: target as string + configuration
|
||||
virtual bool checkTarget(Target target) = 0;
|
||||
};
|
||||
|
||||
|
||||
} // namespace dnn_backend
|
||||
} // namespace cv
|
||||
|
||||
#endif // OPENCV_DNN_BACKEND_HPP
|
@ -156,6 +156,18 @@ static inline std::string toString(const Mat& blob, const std::string& name = st
|
||||
|
||||
|
||||
CV__DNN_INLINE_NS_END
|
||||
|
||||
namespace accessor {
|
||||
class DnnNetAccessor
|
||||
{
|
||||
public:
|
||||
static inline Ptr<Net::Impl>& getImplPtrRef(Net& net)
|
||||
{
|
||||
return net.impl;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}} // namespace
|
||||
|
||||
#endif // __OPENCV_DNN_COMMON_HPP__
|
||||
|
@ -36,11 +36,7 @@ bool getParam_DNN_OPENCL_ALLOW_ALL_DEVICES()
|
||||
int getParam_DNN_BACKEND_DEFAULT()
|
||||
{
|
||||
static int PARAM_DNN_BACKEND_DEFAULT = (int)utils::getConfigurationParameterSizeT("OPENCV_DNN_BACKEND_DEFAULT",
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
(size_t)DNN_BACKEND_INFERENCE_ENGINE
|
||||
#else
|
||||
(size_t)DNN_BACKEND_OPENCV
|
||||
#endif
|
||||
);
|
||||
return PARAM_DNN_BACKEND_DEFAULT;
|
||||
}
|
||||
|
30
modules/dnn/src/factory.hpp
Normal file
30
modules/dnn/src/factory.hpp
Normal file
@ -0,0 +1,30 @@
|
||||
// This file is part of OpenCV project.
|
||||
// It is subject to the license terms in the LICENSE file found in the top-level directory
|
||||
// of this distribution and at http://opencv.org/license.html.
|
||||
|
||||
#ifndef OPENCV_DNN_FACTORY_HPP
|
||||
#define OPENCV_DNN_FACTORY_HPP
|
||||
|
||||
#include "backend.hpp"
|
||||
|
||||
namespace cv { namespace dnn_backend {
|
||||
|
||||
class IDNNBackendFactory
|
||||
{
|
||||
public:
|
||||
virtual ~IDNNBackendFactory() {}
|
||||
virtual std::shared_ptr<cv::dnn_backend::NetworkBackend> createNetworkBackend() const = 0;
|
||||
};
|
||||
|
||||
//
|
||||
// PluginDNNBackendFactory is implemented in plugin_wrapper
|
||||
//
|
||||
|
||||
std::shared_ptr<IDNNBackendFactory> createPluginDNNBackendFactory(const std::string& baseName);
|
||||
|
||||
/// @brief Returns createPluginDNNBackendFactory()->createNetworkBackend()
|
||||
cv::dnn_backend::NetworkBackend& createPluginDNNNetworkBackend(const std::string& baseName);
|
||||
|
||||
}} // namespace
|
||||
|
||||
#endif // OPENCV_DNN_FACTORY_HPP
|
@ -988,14 +988,6 @@ InferenceEngine::DataPtr ngraphDataOutputNode(
|
||||
return w.dataPtr;
|
||||
}
|
||||
|
||||
void forwardNgraph(const std::vector<Ptr<BackendWrapper> >& outBlobsWrappers,
|
||||
Ptr<BackendNode>& node, bool isAsync)
|
||||
{
|
||||
CV_Assert(!node.empty());
|
||||
Ptr<InfEngineNgraphNode> ieNode = node.dynamicCast<InfEngineNgraphNode>();
|
||||
CV_Assert(!ieNode.empty());
|
||||
ieNode->net->forward(outBlobsWrappers, isAsync);
|
||||
}
|
||||
|
||||
void InfEngineNgraphNet::reset()
|
||||
{
|
||||
@ -1192,12 +1184,6 @@ void InfEngineNgraphNet::forward(const std::vector<Ptr<BackendWrapper> >& outBlo
|
||||
}
|
||||
}
|
||||
|
||||
#else
|
||||
void forwardNgraph(const std::vector<Ptr<BackendWrapper> >& outBlobsWrappers,
|
||||
Ptr<BackendNode>& node, bool isAsync)
|
||||
{
|
||||
CV_Assert(false && "nGraph is not enabled in this OpenCV build");
|
||||
}
|
||||
#endif
|
||||
|
||||
}}
|
||||
|
@ -158,9 +158,6 @@ private:
|
||||
|
||||
#endif // HAVE_DNN_NGRAPH
|
||||
|
||||
void forwardNgraph(const std::vector<Ptr<BackendWrapper> >& outBlobsWrappers,
|
||||
Ptr<BackendNode>& node, bool isAsync);
|
||||
|
||||
}} // namespace cv::dnn
|
||||
|
||||
|
||||
|
@ -42,7 +42,9 @@
|
||||
#include "precomp.hpp"
|
||||
#include <opencv2/dnn/layer.details.hpp>
|
||||
|
||||
#if !defined(BUILD_PLUGIN)
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#endif
|
||||
|
||||
namespace cv {
|
||||
namespace dnn {
|
||||
@ -58,6 +60,7 @@ Mutex& getInitializationMutex()
|
||||
// force initialization (single-threaded environment)
|
||||
Mutex* __initialization_mutex_initializer = &getInitializationMutex();
|
||||
|
||||
#if !defined(BUILD_PLUGIN)
|
||||
namespace {
|
||||
using namespace google::protobuf;
|
||||
class ProtobufShutdown {
|
||||
@ -71,12 +74,15 @@ public:
|
||||
}
|
||||
};
|
||||
} // namespace
|
||||
#endif
|
||||
|
||||
void initializeLayerFactory()
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
#if !defined(BUILD_PLUGIN)
|
||||
static ProtobufShutdown protobufShutdown; CV_UNUSED(protobufShutdown);
|
||||
#endif
|
||||
|
||||
CV_DNN_REGISTER_LAYER_CLASS(Slice, SliceLayer);
|
||||
CV_DNN_REGISTER_LAYER_CLASS(Split, SplitLayer);
|
||||
|
@ -4,8 +4,6 @@
|
||||
|
||||
#include "precomp.hpp"
|
||||
|
||||
#include <opencv2/imgproc.hpp>
|
||||
|
||||
#include <opencv2/dnn/layer_reg.private.hpp> // getLayerFactoryImpl
|
||||
|
||||
|
||||
|
@ -96,21 +96,29 @@ struct LayerData
|
||||
|
||||
int flag;
|
||||
|
||||
Ptr<Layer> getLayerInstance()
|
||||
|
||||
void resetAllocation()
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(type, "type", type.c_str());
|
||||
if (id == 0)
|
||||
return; // skip "input" layer (assertion in Net::Impl::allocateLayers)
|
||||
|
||||
if (layerInstance)
|
||||
return layerInstance;
|
||||
layerInstance.release();
|
||||
outputBlobs.clear();
|
||||
inputBlobs.clear();
|
||||
internals.clear();
|
||||
|
||||
layerInstance = LayerFactory::createLayerInstance(type, params);
|
||||
if (!layerInstance)
|
||||
{
|
||||
CV_Error(Error::StsError, "Can't create layer \"" + name + "\" of type \"" + type + "\"");
|
||||
}
|
||||
outputBlobsWrappers.clear();
|
||||
inputBlobsWrappers.clear();
|
||||
internalBlobsWrappers.clear();
|
||||
|
||||
return layerInstance;
|
||||
backendNodes.clear();
|
||||
|
||||
skip = false;
|
||||
flag = 0;
|
||||
|
||||
#ifdef HAVE_CUDA
|
||||
cudaD2HBackgroundTransfers.clear();
|
||||
#endif
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -75,11 +75,7 @@ Ptr<BackendWrapper> wrapMat(int backendId, int targetId, cv::Mat& m)
|
||||
}
|
||||
else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
|
||||
{
|
||||
#ifdef HAVE_DNN_NGRAPH
|
||||
return Ptr<BackendWrapper>(new NgraphBackendWrapper(targetId, m));
|
||||
#else
|
||||
CV_Error(Error::StsNotImplemented, "This OpenCV version is built without support of OpenVINO / Inference Engine + nGraph");
|
||||
#endif
|
||||
CV_Assert(0 && "Internal error: DNN_BACKEND_INFERENCE_ENGINE_NGRAPH must be implemented through inheritance");
|
||||
}
|
||||
else if (backendId == DNN_BACKEND_WEBNN)
|
||||
{
|
||||
|
@ -120,7 +120,7 @@ Net Net::quantize(InputArrayOfArrays calibData, int inputsDtype, int outputsDtyp
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_Assert(impl);
|
||||
CV_Assert(!empty());
|
||||
return impl->quantize(calibData, inputsDtype, outputsDtype, perChannel);
|
||||
return impl->quantize(*this, calibData, inputsDtype, outputsDtype, perChannel);
|
||||
}
|
||||
|
||||
// FIXIT drop from inference API
|
||||
@ -146,7 +146,7 @@ void Net::setPreferableBackend(int backendId)
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG(backendId);
|
||||
CV_Assert(impl);
|
||||
return impl->setPreferableBackend(backendId);
|
||||
return impl->setPreferableBackend(*this, backendId);
|
||||
}
|
||||
|
||||
void Net::setPreferableTarget(int targetId)
|
||||
|
@ -30,6 +30,12 @@ std::string detail::NetImplBase::getDumpFileNameBase() const
|
||||
}
|
||||
|
||||
|
||||
Net::Impl::~Impl()
|
||||
{
|
||||
// nothing
|
||||
}
|
||||
|
||||
|
||||
Net::Impl::Impl()
|
||||
{
|
||||
// allocate fake net input layer
|
||||
@ -46,9 +52,8 @@ Net::Impl::Impl()
|
||||
netWasQuantized = false;
|
||||
fusion = true;
|
||||
isAsync = false;
|
||||
preferableBackend = DNN_BACKEND_DEFAULT;
|
||||
preferableBackend = (Backend)getParam_DNN_BACKEND_DEFAULT();
|
||||
preferableTarget = DNN_TARGET_CPU;
|
||||
skipInfEngineInit = false;
|
||||
hasDynamicShapes = false;
|
||||
}
|
||||
|
||||
@ -86,22 +91,10 @@ void Net::Impl::clear()
|
||||
}
|
||||
|
||||
|
||||
void Net::Impl::setUpNet(const std::vector<LayerPin>& blobsToKeep_)
|
||||
void Net::Impl::validateBackendAndTarget()
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
if (dumpLevel && networkDumpCounter == 0)
|
||||
{
|
||||
dumpNetworkToFile();
|
||||
}
|
||||
|
||||
if (preferableBackend == DNN_BACKEND_DEFAULT)
|
||||
preferableBackend = (Backend)getParam_DNN_BACKEND_DEFAULT();
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE)
|
||||
preferableBackend = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH; // = getInferenceEngineBackendTypeParam();
|
||||
#endif
|
||||
|
||||
CV_Assert(preferableBackend != DNN_BACKEND_OPENCV ||
|
||||
preferableTarget == DNN_TARGET_CPU ||
|
||||
preferableTarget == DNN_TARGET_OPENCL ||
|
||||
@ -109,19 +102,6 @@ void Net::Impl::setUpNet(const std::vector<LayerPin>& blobsToKeep_)
|
||||
CV_Assert(preferableBackend != DNN_BACKEND_HALIDE ||
|
||||
preferableTarget == DNN_TARGET_CPU ||
|
||||
preferableTarget == DNN_TARGET_OPENCL);
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
|
||||
{
|
||||
CV_Assert(
|
||||
(preferableTarget == DNN_TARGET_CPU && (!isArmComputePlugin() || preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)) ||
|
||||
preferableTarget == DNN_TARGET_OPENCL ||
|
||||
preferableTarget == DNN_TARGET_OPENCL_FP16 ||
|
||||
preferableTarget == DNN_TARGET_MYRIAD ||
|
||||
preferableTarget == DNN_TARGET_HDDL ||
|
||||
preferableTarget == DNN_TARGET_FPGA
|
||||
);
|
||||
}
|
||||
#endif
|
||||
#ifdef HAVE_WEBNN
|
||||
if (preferableBackend == DNN_BACKEND_WEBNN)
|
||||
{
|
||||
@ -136,6 +116,20 @@ void Net::Impl::setUpNet(const std::vector<LayerPin>& blobsToKeep_)
|
||||
CV_Assert(preferableBackend != DNN_BACKEND_TIMVX ||
|
||||
preferableTarget == DNN_TARGET_NPU);
|
||||
|
||||
CV_Assert(preferableBackend != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && "Inheritance internal error");
|
||||
}
|
||||
|
||||
void Net::Impl::setUpNet(const std::vector<LayerPin>& blobsToKeep_)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
if (dumpLevel && networkDumpCounter == 0)
|
||||
{
|
||||
dumpNetworkToFile();
|
||||
}
|
||||
|
||||
validateBackendAndTarget();
|
||||
|
||||
if (!netWasAllocated || this->blobsToKeep != blobsToKeep_)
|
||||
{
|
||||
if (preferableBackend == DNN_BACKEND_OPENCV && IS_DNN_OPENCL_TARGET(preferableTarget))
|
||||
@ -228,14 +222,14 @@ void Net::Impl::setUpNet(const std::vector<LayerPin>& blobsToKeep_)
|
||||
Ptr<Layer> Net::Impl::getLayer(int layerId) const
|
||||
{
|
||||
LayerData& ld = getLayerData(layerId);
|
||||
return ld.getLayerInstance();
|
||||
return getLayerInstance(ld);
|
||||
}
|
||||
|
||||
|
||||
Ptr<Layer> Net::Impl::getLayer(const LayerId& layerId) const
|
||||
{
|
||||
LayerData& ld = getLayerData(layerId);
|
||||
return ld.getLayerInstance();
|
||||
return getLayerInstance(ld);
|
||||
}
|
||||
|
||||
|
||||
@ -327,7 +321,7 @@ int Net::Impl::resolvePinOutputName(LayerData& ld, const String& outName) const
|
||||
{
|
||||
if (outName.empty())
|
||||
return 0;
|
||||
return ld.getLayerInstance()->outputNameToIndex(outName);
|
||||
return getLayerInstance(ld)->outputNameToIndex(outName);
|
||||
}
|
||||
|
||||
|
||||
@ -528,7 +522,7 @@ void Net::Impl::allocateLayer(int lid, const LayersShapesMap& layersShapes)
|
||||
for (int i = 0; i < ld.internalBlobsWrappers.size(); ++i)
|
||||
ld.internalBlobsWrappers[i] = wrap(ld.internals[i]);
|
||||
|
||||
Ptr<Layer> layerPtr = ld.getLayerInstance();
|
||||
Ptr<Layer> layerPtr = getLayerInstance(ld);
|
||||
{
|
||||
std::vector<Mat> inps(ld.inputBlobs.size());
|
||||
for (int i = 0; i < ld.inputBlobs.size(); ++i)
|
||||
@ -813,12 +807,10 @@ void Net::Impl::forwardLayer(LayerData& ld)
|
||||
{
|
||||
forwardHalide(ld.outputBlobsWrappers, node);
|
||||
}
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
else if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
|
||||
{
|
||||
forwardNgraph(ld.outputBlobsWrappers, node, isAsync);
|
||||
CV_Assert(preferableBackend != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && "Inheritance internal error");
|
||||
}
|
||||
#endif
|
||||
else if (preferableBackend == DNN_BACKEND_WEBNN)
|
||||
{
|
||||
forwardWebnn(ld.outputBlobsWrappers, node, isAsync);
|
||||
@ -844,7 +836,7 @@ void Net::Impl::forwardLayer(LayerData& ld)
|
||||
#endif
|
||||
else
|
||||
{
|
||||
CV_Error(Error::StsNotImplemented, "Unknown backend identifier");
|
||||
CV_Error(Error::StsNotImplemented, cv::format("Unknown backend identifier: %d", preferableBackend));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1156,7 +1148,7 @@ void Net::Impl::getLayerShapesRecursively(int id, LayersShapesMap& inOutShapes)
|
||||
ShapesVec& os = layerShapes.out;
|
||||
ShapesVec& ints = layerShapes.internal;
|
||||
int requiredOutputs = layerData.requiredOutputs.size();
|
||||
Ptr<Layer> l = layerData.getLayerInstance();
|
||||
const Ptr<Layer>& l = getLayerInstance(layerData);
|
||||
CV_Assert(l);
|
||||
bool layerSupportInPlace = false;
|
||||
try
|
||||
@ -1310,7 +1302,7 @@ void Net::Impl::updateLayersShapes()
|
||||
const MatShape& shape = layersShapes[inputLayerId].out[inputPin.oid];
|
||||
layerShapes.in.push_back(shape);
|
||||
}
|
||||
layerData.getLayerInstance()->updateMemoryShapes(layerShapes.in);
|
||||
getLayerInstance(layerData)->updateMemoryShapes(layerShapes.in);
|
||||
}
|
||||
CV_LOG_DEBUG(NULL, "Layer " << layerId << ": " << toString(layerShapes.in, "input shapes"));
|
||||
CV_LOG_IF_DEBUG(NULL, !layerShapes.out.empty(), "Layer " << layerId << ": " << toString(layerShapes.out, "output shapes"));
|
||||
@ -1369,30 +1361,7 @@ Mat Net::Impl::getBlob(String outputName) const
|
||||
AsyncArray Net::Impl::getBlobAsync(const LayerPin& pin)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
if (!pin.valid())
|
||||
CV_Error(Error::StsObjectNotFound, "Requested blob not found");
|
||||
|
||||
LayerData& ld = layers[pin.lid];
|
||||
if ((size_t)pin.oid >= ld.outputBlobs.size())
|
||||
{
|
||||
CV_Error(Error::StsOutOfRange, format("Layer \"%s\" produce only %d outputs, "
|
||||
"the #%d was requested",
|
||||
ld.name.c_str(), (int)ld.outputBlobs.size(), (int)pin.oid));
|
||||
}
|
||||
if (preferableTarget != DNN_TARGET_CPU)
|
||||
{
|
||||
CV_Assert(!ld.outputBlobsWrappers.empty() && !ld.outputBlobsWrappers[pin.oid].empty());
|
||||
// Transfer data to CPU if it's require.
|
||||
ld.outputBlobsWrappers[pin.oid]->copyToHost();
|
||||
}
|
||||
CV_Assert(preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
|
||||
|
||||
Ptr<NgraphBackendWrapper> wrapper = ld.outputBlobsWrappers[pin.oid].dynamicCast<NgraphBackendWrapper>();
|
||||
return std::move(wrapper->futureMat);
|
||||
#else
|
||||
CV_Error(Error::StsNotImplemented, "DNN: OpenVINO/nGraph backend is required");
|
||||
#endif // HAVE_INF_ENGINE
|
||||
}
|
||||
|
||||
|
||||
@ -1482,7 +1451,7 @@ void Net::Impl::setInput(InputArray blob, const String& name, double scalefactor
|
||||
Mat Net::Impl::getParam(int layer, int numParam) const
|
||||
{
|
||||
LayerData& ld = getLayerData(layer);
|
||||
std::vector<Mat>& layerBlobs = ld.getLayerInstance()->blobs;
|
||||
std::vector<Mat>& layerBlobs = getLayerInstance(ld)->blobs;
|
||||
CV_Assert(numParam < (int)layerBlobs.size());
|
||||
return layerBlobs[numParam];
|
||||
}
|
||||
@ -1491,7 +1460,8 @@ void Net::Impl::setParam(int layer, int numParam, const Mat& blob)
|
||||
{
|
||||
LayerData& ld = getLayerData(layer);
|
||||
|
||||
std::vector<Mat>& layerBlobs = ld.getLayerInstance()->blobs;
|
||||
// FIXIT we should not modify "execution" instance
|
||||
std::vector<Mat>& layerBlobs = getLayerInstance(ld)->blobs;
|
||||
CV_Assert(numParam < (int)layerBlobs.size());
|
||||
// we don't make strong checks, use this function carefully
|
||||
layerBlobs[numParam] = blob;
|
||||
@ -1958,7 +1928,7 @@ int64 Net::Impl::getFLOPS(const std::vector<MatShape>& netInputShapes) /*const*/
|
||||
|
||||
for (int i = 0; i < ids.size(); i++)
|
||||
{
|
||||
flops += layers[ids[i]].getLayerInstance()->getFLOPS(inShapes[i], outShapes[i]);
|
||||
flops += getLayerInstance(layers[ids[i]])->getFLOPS(inShapes[i], outShapes[i]);
|
||||
}
|
||||
|
||||
return flops;
|
||||
@ -1975,7 +1945,7 @@ int64 Net::Impl::getFLOPS(
|
||||
LayerShapes shapes;
|
||||
getLayerShapes(netInputShapes, layerId, shapes);
|
||||
|
||||
return const_cast<LayerData&>(layer->second).getLayerInstance()->getFLOPS(shapes.in, shapes.out);
|
||||
return getLayerInstance(const_cast<LayerData&>(layer->second))->getFLOPS(shapes.in, shapes.out);
|
||||
}
|
||||
|
||||
|
||||
|
@ -38,7 +38,12 @@ struct Net::Impl : public detail::NetImplBase
|
||||
typedef std::map<int, LayerShapes> LayersShapesMap;
|
||||
typedef std::map<int, LayerData> MapIdToLayerData;
|
||||
|
||||
virtual ~Impl();
|
||||
Impl();
|
||||
Impl(const Impl&) = delete;
|
||||
|
||||
// Inheritance support
|
||||
Ptr<Net::Impl> basePtr_;
|
||||
|
||||
Ptr<DataLayer> netInputLayer;
|
||||
std::vector<LayerPin> blobsToKeep;
|
||||
@ -49,7 +54,6 @@ struct Net::Impl : public detail::NetImplBase
|
||||
int preferableBackend;
|
||||
int preferableTarget;
|
||||
String halideConfigFile;
|
||||
bool skipInfEngineInit;
|
||||
bool hasDynamicShapes;
|
||||
// Map host data to backend specific wrapper.
|
||||
std::map<void*, Ptr<BackendWrapper>> backendWrappers;
|
||||
@ -59,23 +63,52 @@ struct Net::Impl : public detail::NetImplBase
|
||||
bool netWasAllocated;
|
||||
bool netWasQuantized;
|
||||
bool fusion;
|
||||
bool isAsync;
|
||||
bool isAsync; // FIXIT: drop
|
||||
std::vector<int64> layersTimings;
|
||||
|
||||
|
||||
bool empty() const;
|
||||
void setPreferableBackend(int backendId);
|
||||
void setPreferableTarget(int targetId);
|
||||
virtual bool empty() const;
|
||||
virtual void setPreferableBackend(Net& net, int backendId);
|
||||
virtual void setPreferableTarget(int targetId);
|
||||
|
||||
// FIXIT use inheritance
|
||||
Ptr<BackendWrapper> wrap(Mat& host);
|
||||
virtual Ptr<BackendWrapper> wrap(Mat& host);
|
||||
|
||||
|
||||
void clear();
|
||||
virtual void clear();
|
||||
|
||||
|
||||
virtual void validateBackendAndTarget();
|
||||
|
||||
void setUpNet(const std::vector<LayerPin>& blobsToKeep_ = std::vector<LayerPin>());
|
||||
|
||||
|
||||
virtual Ptr<Layer> createLayerInstance(const LayerData& ld) const
|
||||
{
|
||||
return LayerFactory::createLayerInstance(ld.type, const_cast<LayerParams&>(ld.params));
|
||||
}
|
||||
Ptr<Layer> getLayerInstance(LayerData& ld) const
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(type, "type", ld.type.c_str());
|
||||
|
||||
if (ld.layerInstance)
|
||||
return ld.layerInstance;
|
||||
|
||||
ld.layerInstance = createLayerInstance(ld);
|
||||
if (!ld.layerInstance && basePtr_)
|
||||
{
|
||||
ld.layerInstance = basePtr_->createLayerInstance(ld);
|
||||
CV_LOG_IF_DEBUG(NULL, ld.layerInstance, "Created layer \"" + ld.name + "\" of type \"" + ld.type + "\" from upstream layers registry");
|
||||
}
|
||||
if (!ld.layerInstance)
|
||||
{
|
||||
CV_Error(Error::StsError, "Can't create layer \"" + ld.name + "\" of type \"" + ld.type + "\"");
|
||||
}
|
||||
|
||||
return ld.layerInstance;
|
||||
}
|
||||
|
||||
Ptr<Layer> getLayer(int layerId) const;
|
||||
Ptr<Layer> getLayer(const LayerId& layerId) const;
|
||||
|
||||
@ -118,7 +151,7 @@ struct Net::Impl : public detail::NetImplBase
|
||||
|
||||
void setInputsNames(const std::vector<String>& inputBlobNames);
|
||||
void setInputShape(const String& inputName, const MatShape& shape);
|
||||
void setInput(InputArray blob, const String& name, double scalefactor, const Scalar& mean);
|
||||
virtual void setInput(InputArray blob, const String& name, double scalefactor, const Scalar& mean);
|
||||
Mat getParam(int layer, int numParam) const;
|
||||
void setParam(int layer, int numParam, const Mat& blob);
|
||||
std::vector<Ptr<Layer>> getLayerInputs(int layerId) const;
|
||||
@ -130,8 +163,7 @@ struct Net::Impl : public detail::NetImplBase
|
||||
int getLayersCount(const String& layerType) const;
|
||||
|
||||
|
||||
// FIXIT use inheritance
|
||||
void initBackend(const std::vector<LayerPin>& blobsToKeep_);
|
||||
virtual void initBackend(const std::vector<LayerPin>& blobsToKeep_);
|
||||
|
||||
void setHalideScheduler(const String& scheduler);
|
||||
#ifdef HAVE_HALIDE
|
||||
@ -139,11 +171,6 @@ struct Net::Impl : public detail::NetImplBase
|
||||
void initHalideBackend();
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_DNN_NGRAPH
|
||||
void addNgraphOutputs(LayerData& ld);
|
||||
void initNgraphBackend(const std::vector<LayerPin>& blobsToKeep_);
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_WEBNN
|
||||
void addWebnnOutputs(LayerData& ld);
|
||||
void initWebnnBackend(const std::vector<LayerPin>& blobsToKeep_);
|
||||
@ -183,11 +210,11 @@ struct Net::Impl : public detail::NetImplBase
|
||||
// TODO add getter
|
||||
void enableFusion(bool fusion_);
|
||||
|
||||
void fuseLayers(const std::vector<LayerPin>& blobsToKeep_);
|
||||
virtual void fuseLayers(const std::vector<LayerPin>& blobsToKeep_);
|
||||
|
||||
void allocateLayers(const std::vector<LayerPin>& blobsToKeep_);
|
||||
|
||||
void forwardLayer(LayerData& ld);
|
||||
virtual void forwardLayer(LayerData& ld);
|
||||
|
||||
void forwardToLayer(LayerData& ld, bool clearFlags = true);
|
||||
|
||||
@ -243,22 +270,17 @@ struct Net::Impl : public detail::NetImplBase
|
||||
Mat getBlob(String outputName) const;
|
||||
|
||||
#ifdef CV_CXX11
|
||||
AsyncArray getBlobAsync(const LayerPin& pin);
|
||||
virtual AsyncArray getBlobAsync(const LayerPin& pin);
|
||||
|
||||
AsyncArray getBlobAsync(String outputName);
|
||||
#endif // CV_CXX11
|
||||
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
static
|
||||
Net createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNet);
|
||||
#endif
|
||||
|
||||
string dump(bool forceAllocation = false) const;
|
||||
|
||||
void dumpNetworkToFile() const;
|
||||
|
||||
// FIXIT drop from inference API
|
||||
Net quantize(InputArrayOfArrays calibData, int inputsDtype, int outputsDtype, bool perChannel) /*const*/;
|
||||
Net quantize(Net& net, InputArrayOfArrays calibData, int inputsDtype, int outputsDtype, bool perChannel) /*const*/;
|
||||
void getInputDetails(std::vector<float>& scales, std::vector<int>& zeropoints) /*const*/;
|
||||
void getOutputDetails(std::vector<float>& scales, std::vector<int>& zeropoints) /*const*/;
|
||||
|
||||
|
@ -7,6 +7,9 @@
|
||||
#include "net_impl.hpp"
|
||||
#include "legacy_backend.hpp"
|
||||
|
||||
#include "backend.hpp"
|
||||
#include "factory.hpp"
|
||||
|
||||
namespace cv {
|
||||
namespace dnn {
|
||||
CV__DNN_INLINE_NS_BEGIN
|
||||
@ -109,11 +112,7 @@ void Net::Impl::initBackend(const std::vector<LayerPin>& blobsToKeep_)
|
||||
}
|
||||
else if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
|
||||
{
|
||||
#ifdef HAVE_DNN_NGRAPH
|
||||
initNgraphBackend(blobsToKeep_);
|
||||
#else
|
||||
CV_Error(Error::StsNotImplemented, "This OpenCV version is built without support of OpenVINO");
|
||||
#endif
|
||||
CV_Assert(0 && "Inheritance must be used with OpenVINO backend");
|
||||
}
|
||||
else if (preferableBackend == DNN_BACKEND_WEBNN)
|
||||
{
|
||||
@ -154,26 +153,38 @@ void Net::Impl::initBackend(const std::vector<LayerPin>& blobsToKeep_)
|
||||
}
|
||||
|
||||
|
||||
void Net::Impl::setPreferableBackend(int backendId)
|
||||
void Net::Impl::setPreferableBackend(Net& net, int backendId)
|
||||
{
|
||||
if (backendId == DNN_BACKEND_DEFAULT)
|
||||
backendId = (Backend)getParam_DNN_BACKEND_DEFAULT();
|
||||
|
||||
if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
|
||||
backendId = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH; // = getInferenceEngineBackendTypeParam();
|
||||
|
||||
if (netWasQuantized && backendId != DNN_BACKEND_OPENCV && backendId != DNN_BACKEND_TIMVX)
|
||||
{
|
||||
CV_LOG_WARNING(NULL, "DNN: Only default and TIMVX backends support quantized networks");
|
||||
backendId = DNN_BACKEND_OPENCV;
|
||||
}
|
||||
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
|
||||
backendId = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
|
||||
#endif
|
||||
|
||||
if (preferableBackend != backendId)
|
||||
{
|
||||
preferableBackend = backendId;
|
||||
clear();
|
||||
if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
|
||||
{
|
||||
#if defined(HAVE_INF_ENGINE)
|
||||
switchToOpenVINOBackend(net);
|
||||
#elif defined(ENABLE_PLUGINS)
|
||||
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
|
||||
networkBackend.switchBackend(net);
|
||||
#else
|
||||
CV_Error(Error::StsNotImplemented, "OpenVINO backend is not available in the current OpenCV build");
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
preferableBackend = backendId;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -634,7 +634,7 @@ void Net::Impl::fuseLayers(const std::vector<LayerPin>& blobsToKeep_)
|
||||
pin = inp_i_data->inputBlobsId[0];
|
||||
inp_i_data = &layers[pin.lid];
|
||||
}
|
||||
conv_layer = conv_layer && (inp_i_data->getLayerInstance()->type == "Convolution");
|
||||
conv_layer = conv_layer && (getLayerInstance(*inp_i_data)->type == "Convolution");
|
||||
}
|
||||
if (!conv_layer)
|
||||
continue;
|
||||
|
@ -11,17 +11,217 @@
|
||||
|
||||
#include "net_impl.hpp"
|
||||
|
||||
#include "backend.hpp"
|
||||
#include "factory.hpp"
|
||||
|
||||
namespace cv {
|
||||
namespace dnn {
|
||||
CV__DNN_INLINE_NS_BEGIN
|
||||
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
|
||||
// TODO: use "string" target specifier
|
||||
class NetImplOpenVINO CV_FINAL : public Net::Impl
|
||||
{
|
||||
public:
|
||||
typedef Net::Impl Base;
|
||||
|
||||
// this default constructor is used with OpenVINO native loader
|
||||
// TODO: dedicated Impl?
|
||||
NetImplOpenVINO()
|
||||
: Net::Impl()
|
||||
{
|
||||
preferableBackend = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
|
||||
}
|
||||
|
||||
// constructor to derive execution implementation from the loaded network
|
||||
explicit NetImplOpenVINO(const Ptr<Net::Impl>& basePtr)
|
||||
: Net::Impl()
|
||||
{
|
||||
basePtr_ = basePtr;
|
||||
init();
|
||||
}
|
||||
|
||||
void init()
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_Assert(basePtr_);
|
||||
Net::Impl& base = *basePtr_;
|
||||
CV_Assert(!base.netWasAllocated);
|
||||
CV_Assert(!base.netWasQuantized);
|
||||
netInputLayer = base.netInputLayer;
|
||||
blobsToKeep = base.blobsToKeep;
|
||||
layers = base.layers;
|
||||
for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); it++)
|
||||
{
|
||||
LayerData& ld = it->second;
|
||||
ld.resetAllocation();
|
||||
}
|
||||
layerNameToId = base.layerNameToId;
|
||||
outputNameToId = base.outputNameToId;
|
||||
//blobManager = base.blobManager;
|
||||
preferableBackend = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH; //base.preferableBackend;
|
||||
preferableTarget = base.preferableTarget;
|
||||
hasDynamicShapes = base.hasDynamicShapes;
|
||||
CV_Assert(base.backendWrappers.empty()); //backendWrappers = base.backendWrappers;
|
||||
lastLayerId = base.lastLayerId;
|
||||
netWasAllocated = base.netWasAllocated;
|
||||
netWasQuantized = base.netWasQuantized;
|
||||
fusion = base.fusion;
|
||||
}
|
||||
|
||||
|
||||
//bool isAsync; // FIXIT: drop
|
||||
|
||||
|
||||
bool empty() const override
|
||||
{
|
||||
return Base::empty();
|
||||
}
|
||||
void setPreferableBackend(Net& net, int backendId) override
|
||||
{
|
||||
if (backendId == DNN_BACKEND_INFERENCE_ENGINE || backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
|
||||
return; // no-op
|
||||
if (!basePtr_)
|
||||
CV_Error(Error::StsError, "DNN: Can't switch backend of network created by OpenVINO native loader");
|
||||
Ptr<Net::Impl>& impl_ptr_ref = accessor::DnnNetAccessor::getImplPtrRef(net);
|
||||
impl_ptr_ref = basePtr_;
|
||||
basePtr_->setPreferableBackend(net, backendId);
|
||||
}
|
||||
|
||||
void setPreferableTarget(int targetId) override
|
||||
{
|
||||
if (preferableTarget != targetId)
|
||||
{
|
||||
preferableTarget = targetId;
|
||||
clear();
|
||||
}
|
||||
}
|
||||
|
||||
Ptr<BackendWrapper> wrap(Mat& host) override
|
||||
{
|
||||
return Ptr<BackendWrapper>(new NgraphBackendWrapper(preferableTarget, host));
|
||||
}
|
||||
|
||||
|
||||
void clear() override
|
||||
{
|
||||
Base::clear();
|
||||
}
|
||||
|
||||
void validateBackendAndTarget() override
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
CV_Assert(preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
|
||||
CV_Check((int)preferableTarget,
|
||||
preferableTarget == DNN_TARGET_CPU ||
|
||||
preferableTarget == DNN_TARGET_OPENCL ||
|
||||
preferableTarget == DNN_TARGET_OPENCL_FP16 ||
|
||||
preferableTarget == DNN_TARGET_MYRIAD ||
|
||||
preferableTarget == DNN_TARGET_HDDL ||
|
||||
preferableTarget == DNN_TARGET_FPGA,
|
||||
"Unknown OpenVINO target"
|
||||
);
|
||||
}
|
||||
|
||||
Ptr<Layer> createLayerInstance(const LayerData& ld) const override
|
||||
{
|
||||
// try to create layer instance from backend-specific pool (e.g., plugin)
|
||||
Ptr<Layer> instance = LayerFactory::createLayerInstance(ld.type, const_cast<LayerParams&>(ld.params));
|
||||
if (!instance)
|
||||
instance = Base::createLayerInstance(ld);
|
||||
return instance;
|
||||
}
|
||||
|
||||
void addNgraphOutputs(LayerData& ld);
|
||||
|
||||
void initBackend(const std::vector<LayerPin>& blobsToKeep_) override;
|
||||
|
||||
void fuseLayers(const std::vector<LayerPin>& blobsToKeep_) override;
|
||||
|
||||
void forwardLayer(LayerData& ld) override;
|
||||
|
||||
AsyncArray getBlobAsync(const LayerPin& pin) override;
|
||||
|
||||
//string dump(bool forceAllocation = false) const override;
|
||||
|
||||
static
|
||||
Net createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNet);
|
||||
|
||||
}; // NetImplOpenVINO
|
||||
|
||||
|
||||
void NetImplOpenVINO::forwardLayer(LayerData& ld)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
Ptr<Layer> layer = ld.layerInstance;
|
||||
|
||||
if (!ld.skip)
|
||||
{
|
||||
auto it = ld.backendNodes.find(preferableBackend);
|
||||
if (ld.id == 0 || // input layer
|
||||
it == ld.backendNodes.end() // non-supported layer or its mode
|
||||
)
|
||||
{
|
||||
return Base::forwardLayer(ld);
|
||||
}
|
||||
|
||||
CV_Assert(it != ld.backendNodes.end());
|
||||
const Ptr<BackendNode>& node = it->second;
|
||||
CV_Assert(!node.empty());
|
||||
Ptr<InfEngineNgraphNode> ieNode = node.dynamicCast<InfEngineNgraphNode>();
|
||||
CV_Assert(!ieNode.empty());
|
||||
CV_Assert(ieNode->net);
|
||||
|
||||
TickMeter tm;
|
||||
tm.start();
|
||||
|
||||
ieNode->net->forward(ld.outputBlobsWrappers, isAsync);
|
||||
|
||||
tm.stop();
|
||||
int64 t = tm.getTimeTicks();
|
||||
layersTimings[ld.id] = (t > 0) ? t : 1; // zero for skipped layers only
|
||||
}
|
||||
else
|
||||
{
|
||||
layersTimings[ld.id] = 0;
|
||||
}
|
||||
|
||||
ld.flag = 1;
|
||||
}
|
||||
|
||||
AsyncArray NetImplOpenVINO::getBlobAsync(const LayerPin& pin)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
if (!pin.valid())
|
||||
CV_Error(Error::StsObjectNotFound, "Requested blob not found");
|
||||
|
||||
LayerData& ld = layers[pin.lid];
|
||||
if ((size_t)pin.oid >= ld.outputBlobs.size())
|
||||
{
|
||||
CV_Error(Error::StsOutOfRange, format("Layer \"%s\" produce only %d outputs, "
|
||||
"the #%d was requested",
|
||||
ld.name.c_str(), (int)ld.outputBlobs.size(), (int)pin.oid));
|
||||
}
|
||||
if (preferableTarget != DNN_TARGET_CPU)
|
||||
{
|
||||
CV_Assert(!ld.outputBlobsWrappers.empty() && !ld.outputBlobsWrappers[pin.oid].empty());
|
||||
// Transfer data to CPU if it's require.
|
||||
ld.outputBlobsWrappers[pin.oid]->copyToHost();
|
||||
}
|
||||
CV_Assert(preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
|
||||
|
||||
Ptr<NgraphBackendWrapper> wrapper = ld.outputBlobsWrappers[pin.oid].dynamicCast<NgraphBackendWrapper>();
|
||||
return std::move(wrapper->futureMat);
|
||||
}
|
||||
|
||||
|
||||
/** mark input pins as outputs from other subnetworks
|
||||
* FIXIT must be done by DNN engine not ngraph.
|
||||
*/
|
||||
void Net::Impl::addNgraphOutputs(LayerData& ld)
|
||||
void NetImplOpenVINO::addNgraphOutputs(LayerData& ld)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
@ -59,7 +259,7 @@ void Net::Impl::addNgraphOutputs(LayerData& ld)
|
||||
}
|
||||
}
|
||||
|
||||
void Net::Impl::initNgraphBackend(const std::vector<LayerPin>& blobsToKeep_)
|
||||
void NetImplOpenVINO::initBackend(const std::vector<LayerPin>& blobsToKeep_)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_CheckEQ(preferableBackend, DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, "");
|
||||
@ -92,7 +292,7 @@ void Net::Impl::initNgraphBackend(const std::vector<LayerPin>& blobsToKeep_)
|
||||
}
|
||||
}
|
||||
|
||||
if (skipInfEngineInit)
|
||||
if (!basePtr_) // model is loaded by OpenVINO
|
||||
{
|
||||
Ptr<BackendNode> node = layers[lastLayerId].backendNodes[preferableBackend];
|
||||
CV_Assert(!node.empty());
|
||||
@ -399,10 +599,104 @@ void Net::Impl::initNgraphBackend(const std::vector<LayerPin>& blobsToKeep_)
|
||||
}
|
||||
}
|
||||
|
||||
//} // Net::Impl
|
||||
|
||||
#if 0
|
||||
#define printf_(args) printf args
|
||||
#else
|
||||
#define printf_(args)
|
||||
#endif
|
||||
|
||||
void NetImplOpenVINO::fuseLayers(const std::vector<LayerPin>& blobsToKeep_)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
if(!fusion)
|
||||
return;
|
||||
|
||||
CV_Check((int)preferableBackend, preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, "");
|
||||
|
||||
#if 0 // FIXIT mode without fusion is broken due to unsupported layers and handling of "custom" nodes
|
||||
return;
|
||||
#endif
|
||||
|
||||
// scan through all the layers. If there is convolution layer followed by the activation layer,
|
||||
// we try to embed this activation into the convolution and disable separate execution of the activation
|
||||
|
||||
// FIXIT replace by layersToKeep to avoid hacks like "LayerPin(lid, 0)"
|
||||
std::set<LayerPin> pinsToKeep(blobsToKeep_.begin(),
|
||||
blobsToKeep_.end());
|
||||
for (MapIdToLayerData::const_iterator it = layers.begin(); it != layers.end(); it++)
|
||||
{
|
||||
int lid = it->first;
|
||||
LayerData& ld = layers[lid];
|
||||
if (ld.skip)
|
||||
{
|
||||
printf_(("skipped %s: %s\n", ld.layerInstance->name.c_str(), ld.layerInstance->type.c_str()));
|
||||
continue;
|
||||
}
|
||||
printf_(("analyzing %s: %s\n", ld.layerInstance->name.c_str(), ld.layerInstance->type.c_str()));
|
||||
|
||||
// the optimization #1. try to fuse batch norm, scaling and/or activation layers
|
||||
// with the current layer if they follow it. Normally, the are fused with the convolution layer,
|
||||
// but some of them (like activation) may be fused with fully-connected, elemwise (+) and
|
||||
// some other layers.
|
||||
Ptr<Layer>& currLayer = ld.layerInstance;
|
||||
if (ld.consumers.size() == 1 && pinsToKeep.count(LayerPin(lid, 0)) == 0)
|
||||
{
|
||||
LayerData* nextData = &layers[ld.consumers[0].lid];
|
||||
LayerPin lpNext(ld.consumers[0].lid, 0);
|
||||
while (nextData)
|
||||
{
|
||||
if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && pinsToKeep.count(lpNext) != 0)
|
||||
{
|
||||
CV_LOG_DEBUG(NULL, "DNN/IE: skip fusing with 'output' node: " << nextData->name << "@" << nextData->type);
|
||||
break;
|
||||
}
|
||||
|
||||
/* we use `tryFuse` member of convolution layer to fuse eltwise later
|
||||
* it's not intended to be fused here; hence, we stop when we encounter eltwise
|
||||
*/
|
||||
Ptr<Layer> nextLayer = nextData->layerInstance;
|
||||
if (currLayer->tryFuse(nextLayer))
|
||||
{
|
||||
printf_(("\tfused with %s\n", nextLayer->name.c_str()));
|
||||
nextData->skip = true;
|
||||
ld.outputBlobs = layers[lpNext.lid].outputBlobs;
|
||||
ld.outputBlobsWrappers = layers[lpNext.lid].outputBlobsWrappers;
|
||||
if (nextData->consumers.size() == 1)
|
||||
{
|
||||
int nextLayerId = nextData->consumers[0].lid;
|
||||
nextData = &layers[nextLayerId];
|
||||
lpNext = LayerPin(nextLayerId, 0);
|
||||
}
|
||||
else
|
||||
{
|
||||
nextData = 0;
|
||||
break;
|
||||
}
|
||||
}
|
||||
else
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
void switchToOpenVINOBackend(Net& net)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
Ptr<Net::Impl>& impl_ptr_ref = accessor::DnnNetAccessor::getImplPtrRef(net);
|
||||
CV_Assert(impl_ptr_ref);
|
||||
CV_LOG_INFO(NULL, "DNN: switching to OpenVINO backend... (networkID=" << impl_ptr_ref->networkId << ")");
|
||||
Ptr<NetImplOpenVINO> openvino_impl_ptr = makePtr<NetImplOpenVINO>(impl_ptr_ref);
|
||||
impl_ptr_ref = openvino_impl_ptr;
|
||||
}
|
||||
|
||||
|
||||
/*static*/
|
||||
Net Net::Impl::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNet)
|
||||
Net NetImplOpenVINO::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNet)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
|
||||
@ -418,6 +712,10 @@ Net Net::Impl::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNe
|
||||
}
|
||||
|
||||
Net cvNet;
|
||||
Ptr<NetImplOpenVINO> openvino_impl_ptr = makePtr<NetImplOpenVINO>();
|
||||
NetImplOpenVINO& openvino_impl = *openvino_impl_ptr;
|
||||
accessor::DnnNetAccessor::getImplPtrRef(cvNet) = openvino_impl_ptr;
|
||||
|
||||
cvNet.setInputsNames(inputsNames);
|
||||
|
||||
// set empty input to determine input shapes
|
||||
@ -432,7 +730,7 @@ Net Net::Impl::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNe
|
||||
{
|
||||
auto fake_node = std::make_shared<ngraph::op::Parameter>(ngraph::element::f32, ngraph::Shape {});
|
||||
Ptr<InfEngineNgraphNode> backendNodeNGraph(new InfEngineNgraphNode(fake_node));
|
||||
backendNodeNGraph->net = Ptr<InfEngineNgraphNet>(new InfEngineNgraphNet(*(cvNet.impl), ieNet));
|
||||
backendNodeNGraph->net = Ptr<InfEngineNgraphNet>(new InfEngineNgraphNet(openvino_impl, ieNet));
|
||||
backendNode = backendNodeNGraph;
|
||||
}
|
||||
|
||||
@ -450,7 +748,7 @@ Net Net::Impl::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNe
|
||||
LayerParams lp;
|
||||
int lid = cvNet.addLayer(it.first, "", lp);
|
||||
|
||||
LayerData& ld = cvNet.impl->layers[lid];
|
||||
LayerData& ld = openvino_impl.layers[lid];
|
||||
|
||||
{
|
||||
Ptr<Layer> cvLayer(new NgraphBackendLayer(ieNet));
|
||||
@ -498,26 +796,72 @@ Net Net::Impl::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNe
|
||||
|
||||
cvNet.setPreferableBackend(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
|
||||
|
||||
cvNet.impl->skipInfEngineInit = true;
|
||||
return cvNet;
|
||||
}
|
||||
|
||||
|
||||
static
|
||||
Net openvino_readNetwork(const String& modelPath, const String& binPath)
|
||||
{
|
||||
FPDenormalsIgnoreHintScope fp_denormals_ignore_scope;
|
||||
|
||||
InferenceEngine::Core& ie = getCore("");
|
||||
InferenceEngine::CNNNetwork ieNet;
|
||||
try
|
||||
{
|
||||
ieNet = ie.ReadNetwork(modelPath, binPath);
|
||||
}
|
||||
catch (const std::exception& e)
|
||||
{
|
||||
CV_Error(Error::StsError, std::string("DNN: OpenVINO failed to read model '") + modelPath + "': " + e.what());
|
||||
}
|
||||
|
||||
return NetImplOpenVINO::createNetworkFromModelOptimizer(ieNet);
|
||||
}
|
||||
|
||||
|
||||
static
|
||||
Net openvino_readNetwork(
|
||||
const uchar* bufferModelConfigPtr, size_t bufferModelConfigSize,
|
||||
const uchar* bufferWeightsPtr, size_t bufferWeightsSize
|
||||
)
|
||||
{
|
||||
FPDenormalsIgnoreHintScope fp_denormals_ignore_scope;
|
||||
|
||||
InferenceEngine::Core& ie = getCore("");
|
||||
|
||||
std::string model; model.assign((char*)bufferModelConfigPtr, bufferModelConfigSize);
|
||||
|
||||
InferenceEngine::CNNNetwork ieNet;
|
||||
try
|
||||
{
|
||||
InferenceEngine::TensorDesc tensorDesc(InferenceEngine::Precision::U8, { bufferWeightsSize }, InferenceEngine::Layout::C);
|
||||
InferenceEngine::Blob::CPtr weights_blob = InferenceEngine::make_shared_blob<uint8_t>(tensorDesc, (uint8_t*)bufferWeightsPtr, bufferWeightsSize);
|
||||
|
||||
ieNet = ie.ReadNetwork(model, weights_blob);
|
||||
}
|
||||
catch (const std::exception& e)
|
||||
{
|
||||
CV_Error(Error::StsError, std::string("DNN: OpenVINO failed to read model: ") + e.what());
|
||||
}
|
||||
|
||||
return NetImplOpenVINO::createNetworkFromModelOptimizer(ieNet);
|
||||
}
|
||||
|
||||
#endif // HAVE_INF_ENGINE
|
||||
|
||||
Net Net::readFromModelOptimizer(const String& xml, const String& bin)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
#ifndef HAVE_INF_ENGINE
|
||||
#if defined(HAVE_INF_ENGINE)
|
||||
return openvino_readNetwork(xml, bin);
|
||||
#elif defined(ENABLE_PLUGINS)
|
||||
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
|
||||
return networkBackend.readNetwork(std::string(), xml, bin);
|
||||
#else
|
||||
CV_UNUSED(xml); CV_UNUSED(bin);
|
||||
CV_Error(Error::StsError, "Build OpenCV with Inference Engine to enable loading models from Model Optimizer.");
|
||||
#else
|
||||
|
||||
FPDenormalsIgnoreHintScope fp_denormals_ignore_scope;
|
||||
|
||||
InferenceEngine::Core& ie = getCore("");
|
||||
InferenceEngine::CNNNetwork ieNet = ie.ReadNetwork(xml, bin);
|
||||
|
||||
return Impl::createNetworkFromModelOptimizer(ieNet);
|
||||
#endif // HAVE_INF_ENGINE
|
||||
#endif
|
||||
}
|
||||
|
||||
Net Net::readFromModelOptimizer(const std::vector<uchar>& bufferModelConfig, const std::vector<uchar>& bufferWeights)
|
||||
@ -535,34 +879,112 @@ Net Net::readFromModelOptimizer(
|
||||
)
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
#ifndef HAVE_INF_ENGINE
|
||||
#if defined(HAVE_INF_ENGINE)
|
||||
return openvino_readNetwork(bufferModelConfigPtr, bufferModelConfigSize, bufferWeightsPtr, bufferWeightsSize);
|
||||
#elif defined(ENABLE_PLUGINS)
|
||||
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
|
||||
return networkBackend.readNetwork(std::string(), bufferModelConfigPtr, bufferModelConfigSize, bufferWeightsPtr, bufferWeightsSize);
|
||||
#else
|
||||
CV_UNUSED(bufferModelConfigPtr); CV_UNUSED(bufferWeightsPtr);
|
||||
CV_UNUSED(bufferModelConfigSize); CV_UNUSED(bufferModelConfigSize);
|
||||
CV_Error(Error::StsError, "Build OpenCV with Inference Engine to enable loading models from Model Optimizer.");
|
||||
#else
|
||||
|
||||
FPDenormalsIgnoreHintScope fp_denormals_ignore_scope;
|
||||
|
||||
InferenceEngine::Core& ie = getCore("");
|
||||
|
||||
std::string model; model.assign((char*)bufferModelConfigPtr, bufferModelConfigSize);
|
||||
|
||||
InferenceEngine::CNNNetwork ieNet;
|
||||
try
|
||||
{
|
||||
InferenceEngine::TensorDesc tensorDesc(InferenceEngine::Precision::U8, { bufferWeightsSize }, InferenceEngine::Layout::C);
|
||||
InferenceEngine::Blob::CPtr weights_blob = InferenceEngine::make_shared_blob<uint8_t>(tensorDesc, (uint8_t*)bufferWeightsPtr, bufferWeightsSize);
|
||||
|
||||
ieNet = ie.ReadNetwork(model, weights_blob);
|
||||
}
|
||||
catch (const std::exception& e)
|
||||
{
|
||||
CV_Error(Error::StsError, std::string("DNN: IE failed to load model: ") + e.what());
|
||||
}
|
||||
|
||||
return Impl::createNetworkFromModelOptimizer(ieNet);
|
||||
#endif // HAVE_INF_ENGINE
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
CV__DNN_INLINE_NS_END
|
||||
}} // namespace cv::dnn
|
||||
|
||||
|
||||
|
||||
#ifdef BUILD_PLUGIN
|
||||
|
||||
#define ABI_VERSION 0
|
||||
#define API_VERSION 0
|
||||
#include "plugin_api.hpp"
|
||||
|
||||
|
||||
namespace cv { namespace dnn_backend {
|
||||
|
||||
using namespace cv::dnn;
|
||||
|
||||
class NetworkBackendOpenVINO : public NetworkBackend
|
||||
{
|
||||
public:
|
||||
void switchBackend(Net& net) CV_OVERRIDE
|
||||
{
|
||||
cv::dnn::switchToOpenVINOBackend(net);
|
||||
}
|
||||
Net readNetwork(const std::string& loaderID, const std::string& model, const std::string& config) CV_OVERRIDE
|
||||
{
|
||||
if (!loaderID.empty()) // only auto ("") is supported
|
||||
{
|
||||
CV_Error(Error::StsError, "DNN/OpenVINO: unsupported network loader ID: " + loaderID);
|
||||
}
|
||||
return openvino_readNetwork(model, config);
|
||||
}
|
||||
Net readNetwork(
|
||||
const std::string& loaderID,
|
||||
const uchar* bufferModelConfigPtr, size_t bufferModelConfigSize,
|
||||
const uchar* bufferWeightsPtr, size_t bufferWeightsSize
|
||||
) CV_OVERRIDE
|
||||
{
|
||||
if (!loaderID.empty()) // only auto ("") is supported
|
||||
{
|
||||
CV_Error(Error::StsError, "DNN/OpenVINO: unsupported network loader ID: " + loaderID);
|
||||
}
|
||||
return openvino_readNetwork(bufferModelConfigPtr, bufferModelConfigSize, bufferWeightsPtr, bufferWeightsSize);
|
||||
}
|
||||
bool checkTarget(Target target) CV_OVERRIDE
|
||||
{
|
||||
return openvino::checkTarget(target);
|
||||
}
|
||||
};
|
||||
|
||||
static
|
||||
std::shared_ptr<NetworkBackendOpenVINO>& getInstanceNetworkBackendOpenVINO()
|
||||
{
|
||||
static std::shared_ptr<NetworkBackendOpenVINO> g_instance = std::make_shared<NetworkBackendOpenVINO>();
|
||||
return g_instance;
|
||||
}
|
||||
|
||||
|
||||
}} // namespace
|
||||
|
||||
|
||||
static
|
||||
CvResult cv_getInstanceNetworkBackend(CV_OUT CvPluginDNNNetworkBackend* handle) CV_NOEXCEPT
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!handle)
|
||||
return CV_ERROR_FAIL;
|
||||
*handle = cv::dnn_backend::getInstanceNetworkBackendOpenVINO().get();
|
||||
return CV_ERROR_OK;
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
return CV_ERROR_FAIL;
|
||||
}
|
||||
}
|
||||
|
||||
static const OpenCV_DNN_Plugin_API plugin_api =
|
||||
{
|
||||
{
|
||||
sizeof(OpenCV_DNN_Plugin_API), ABI_VERSION, API_VERSION,
|
||||
CV_VERSION_MAJOR, CV_VERSION_MINOR, CV_VERSION_REVISION, CV_VERSION_STATUS,
|
||||
"OpenVINO OpenCV DNN plugin (" CVAUX_STR(INF_ENGINE_RELEASE) ")"
|
||||
},
|
||||
{
|
||||
/* 1*/cv_getInstanceNetworkBackend
|
||||
}
|
||||
};
|
||||
|
||||
const OpenCV_DNN_Plugin_API* CV_API_CALL opencv_dnn_plugin_init_v0(int requested_abi_version, int requested_api_version, void* /*reserved=NULL*/) CV_NOEXCEPT
|
||||
{
|
||||
if (requested_abi_version == ABI_VERSION && requested_api_version <= API_VERSION)
|
||||
return &plugin_api;
|
||||
return NULL;
|
||||
}
|
||||
|
||||
#endif // BUILD_PLUGIN
|
||||
|
@ -33,7 +33,7 @@ void getQuantizationParams(const Mat& src, std::vector<float>& scales, std::vect
|
||||
}
|
||||
|
||||
// FIXIT drop from inference API
|
||||
Net Net::Impl::quantize(InputArrayOfArrays calibData, int inputsDtype, int outputsDtype, bool perChannel)
|
||||
Net Net::Impl::quantize(Net& net, InputArrayOfArrays calibData, int inputsDtype, int outputsDtype, bool perChannel)
|
||||
{
|
||||
// Net can be quantized only once.
|
||||
if (netWasQuantized)
|
||||
@ -47,7 +47,8 @@ Net Net::Impl::quantize(InputArrayOfArrays calibData, int inputsDtype, int outpu
|
||||
int prefTarget = preferableTarget;
|
||||
|
||||
// Disable fusions and use CPU backend to quantize net
|
||||
setPreferableBackend(DNN_BACKEND_OPENCV);
|
||||
// FIXIT: we should not modify original network!
|
||||
setPreferableBackend(net, DNN_BACKEND_OPENCV);
|
||||
setPreferableTarget(DNN_TARGET_CPU);
|
||||
enableFusion(false);
|
||||
|
||||
@ -163,7 +164,7 @@ Net Net::Impl::quantize(InputArrayOfArrays calibData, int inputsDtype, int outpu
|
||||
Net::Impl& dstNet = *(dstNet_.impl);
|
||||
dstNet.netWasQuantized = true;
|
||||
dstNet.setInputsNames(netInputLayer->outNames);
|
||||
dstNet.setPreferableBackend(prefBackend);
|
||||
dstNet.setPreferableBackend(dstNet_, prefBackend);
|
||||
dstNet.setPreferableTarget(prefTarget);
|
||||
dstNet.enableFusion(originalFusion);
|
||||
|
||||
@ -253,7 +254,7 @@ Net Net::Impl::quantize(InputArrayOfArrays calibData, int inputsDtype, int outpu
|
||||
}
|
||||
}
|
||||
// Restore FP32 Net's backend, target and fusion
|
||||
setPreferableBackend(prefBackend);
|
||||
setPreferableBackend(net, prefBackend);
|
||||
setPreferableTarget(prefTarget);
|
||||
enableFusion(originalFusion);
|
||||
return dstNet_;
|
||||
|
@ -11,7 +11,11 @@
|
||||
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
#include <ie_extension.h>
|
||||
#endif // HAVE_INF_ENGINE
|
||||
#elif defined(ENABLE_PLUGINS)
|
||||
// using plugin API
|
||||
#include "backend.hpp"
|
||||
#include "factory.hpp"
|
||||
#endif
|
||||
|
||||
#include <opencv2/core/utils/configuration.private.hpp>
|
||||
#include <opencv2/core/utils/logger.hpp>
|
||||
@ -155,7 +159,6 @@ static bool detectMyriadX_(const std::string& device)
|
||||
}
|
||||
#endif // !defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
|
||||
|
||||
|
||||
#endif // HAVE_INF_ENGINE
|
||||
|
||||
|
||||
@ -281,24 +284,100 @@ bool checkTarget(Target target)
|
||||
|
||||
#else // HAVE_INF_ENGINE
|
||||
|
||||
|
||||
namespace openvino {
|
||||
|
||||
bool checkTarget(Target target)
|
||||
{
|
||||
#if defined(ENABLE_PLUGINS)
|
||||
try
|
||||
{
|
||||
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
|
||||
return networkBackend.checkTarget(target);
|
||||
}
|
||||
catch (const std::exception& e)
|
||||
{
|
||||
CV_LOG_INFO(NULL, "DNN/OpenVINO: checkTarget failed: " << e.what())
|
||||
}
|
||||
#endif
|
||||
return false;
|
||||
}
|
||||
|
||||
} // namespace openvino
|
||||
|
||||
|
||||
cv::String getInferenceEngineBackendType()
|
||||
{
|
||||
#if defined(ENABLE_PLUGINS)
|
||||
try
|
||||
{
|
||||
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
|
||||
CV_UNUSED(networkBackend);
|
||||
return CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
|
||||
}
|
||||
catch (const std::exception& e)
|
||||
{
|
||||
CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
|
||||
}
|
||||
#endif
|
||||
CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
|
||||
}
|
||||
cv::String setInferenceEngineBackendType(const cv::String& newBackendType)
|
||||
{
|
||||
#if defined(ENABLE_PLUGINS)
|
||||
try
|
||||
{
|
||||
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
|
||||
CV_UNUSED(networkBackend);
|
||||
CV_Assert(newBackendType == CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
|
||||
}
|
||||
catch (const std::exception& e)
|
||||
{
|
||||
CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
|
||||
}
|
||||
#endif
|
||||
CV_UNUSED(newBackendType);
|
||||
CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
|
||||
}
|
||||
cv::String getInferenceEngineVPUType()
|
||||
{
|
||||
#if defined(ENABLE_PLUGINS)
|
||||
try
|
||||
{
|
||||
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
|
||||
if (networkBackend.checkTarget(DNN_TARGET_MYRIAD))
|
||||
return CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X; // 2021.4 supports NCS2 only
|
||||
CV_Error(Error::StsError, "DNN/OpenVINO: DNN_TARGET_MYRIAD is not available");
|
||||
}
|
||||
catch (const std::exception& e)
|
||||
{
|
||||
CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
|
||||
}
|
||||
#endif
|
||||
CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
|
||||
}
|
||||
|
||||
cv::String getInferenceEngineCPUType()
|
||||
{
|
||||
#if defined(ENABLE_PLUGINS)
|
||||
try
|
||||
{
|
||||
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
|
||||
CV_UNUSED(networkBackend);
|
||||
#if defined(__arm__) || defined(__aarch64__) || defined(_M_ARM64)
|
||||
return CV_DNN_INFERENCE_ENGINE_CPU_TYPE_ARM_COMPUTE;
|
||||
#else
|
||||
return CV_DNN_INFERENCE_ENGINE_CPU_TYPE_X86;
|
||||
#endif
|
||||
}
|
||||
catch (const std::exception& e)
|
||||
{
|
||||
CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
|
||||
}
|
||||
#endif
|
||||
CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
|
||||
}
|
||||
|
||||
#endif // HAVE_INF_ENGINE
|
||||
|
||||
|
||||
|
@ -60,6 +60,15 @@
|
||||
|
||||
namespace cv { namespace dnn {
|
||||
|
||||
CV__DNN_INLINE_NS_BEGIN
|
||||
namespace openvino {
|
||||
|
||||
// TODO: use std::string as parameter
|
||||
bool checkTarget(Target target);
|
||||
|
||||
} // namespace openvino
|
||||
CV__DNN_INLINE_NS_END
|
||||
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
|
||||
Backend& getInferenceEngineBackendTypeParam();
|
||||
@ -73,12 +82,7 @@ void infEngineBlobsToMats(const std::vector<InferenceEngine::Blob::Ptr>& blobs,
|
||||
|
||||
CV__DNN_INLINE_NS_BEGIN
|
||||
|
||||
namespace openvino {
|
||||
|
||||
// TODO: use std::string as parameter
|
||||
bool checkTarget(Target target);
|
||||
|
||||
} // namespace openvino
|
||||
void switchToOpenVINOBackend(Net& net);
|
||||
|
||||
bool isMyriadX();
|
||||
|
||||
|
72
modules/dnn/src/plugin_api.hpp
Normal file
72
modules/dnn/src/plugin_api.hpp
Normal file
@ -0,0 +1,72 @@
|
||||
// This file is part of OpenCV project.
|
||||
// It is subject to the license terms in the LICENSE file found in the top-level directory
|
||||
// of this distribution and at http://opencv.org/license.html.
|
||||
|
||||
#ifndef DNN_PLUGIN_API_HPP
|
||||
#define DNN_PLUGIN_API_HPP
|
||||
|
||||
#include <opencv2/core/cvdef.h>
|
||||
#include <opencv2/core/llapi/llapi.h>
|
||||
|
||||
#include "backend.hpp"
|
||||
|
||||
#if !defined(BUILD_PLUGIN)
|
||||
|
||||
/// increased for backward-compatible changes, e.g. add new function
|
||||
/// Caller API <= Plugin API -> plugin is fully compatible
|
||||
/// Caller API > Plugin API -> plugin is not fully compatible, caller should use extra checks to use plugins with older API
|
||||
#define API_VERSION 0 // preview
|
||||
|
||||
/// increased for incompatible changes, e.g. remove function argument
|
||||
/// Caller ABI == Plugin ABI -> plugin is compatible
|
||||
/// Caller ABI > Plugin ABI -> plugin is not compatible, caller should use shim code to use old ABI plugins (caller may know how lower ABI works, so it is possible)
|
||||
/// Caller ABI < Plugin ABI -> plugin can't be used (plugin should provide interface with lower ABI to handle that)
|
||||
#define ABI_VERSION 0 // preview
|
||||
|
||||
#else // !defined(BUILD_PLUGIN)
|
||||
|
||||
#if !defined(ABI_VERSION) || !defined(API_VERSION)
|
||||
#error "Plugin must define ABI_VERSION and API_VERSION before including plugin_api.hpp"
|
||||
#endif
|
||||
|
||||
#endif // !defined(BUILD_PLUGIN)
|
||||
|
||||
typedef cv::dnn_backend::NetworkBackend* CvPluginDNNNetworkBackend;
|
||||
|
||||
struct OpenCV_DNN_Plugin_API_v0_0_api_entries
|
||||
{
|
||||
/** @brief Get backend API instance
|
||||
|
||||
@param[out] handle pointer on inference backend API handle
|
||||
|
||||
@note API-CALL 1, API-Version == 0
|
||||
*/
|
||||
CvResult (CV_API_CALL *getInstance)(CV_OUT CvPluginDNNNetworkBackend* handle) CV_NOEXCEPT;
|
||||
}; // OpenCV_DNN_Plugin_API_v0_0_api_entries
|
||||
|
||||
typedef struct OpenCV_DNN_Plugin_API_v0
|
||||
{
|
||||
OpenCV_API_Header api_header;
|
||||
struct OpenCV_DNN_Plugin_API_v0_0_api_entries v0;
|
||||
} OpenCV_DNN_Plugin_API_v0;
|
||||
|
||||
#if ABI_VERSION == 0 && API_VERSION == 0
|
||||
typedef OpenCV_DNN_Plugin_API_v0 OpenCV_DNN_Plugin_API;
|
||||
#else
|
||||
#error "Not supported configuration: check ABI_VERSION/API_VERSION"
|
||||
#endif
|
||||
|
||||
#ifdef BUILD_PLUGIN
|
||||
extern "C" {
|
||||
|
||||
CV_PLUGIN_EXPORTS
|
||||
const OpenCV_DNN_Plugin_API* CV_API_CALL opencv_dnn_plugin_init_v0
|
||||
(int requested_abi_version, int requested_api_version, void* reserved /*NULL*/) CV_NOEXCEPT;
|
||||
|
||||
} // extern "C"
|
||||
#else // BUILD_PLUGIN
|
||||
typedef const OpenCV_DNN_Plugin_API* (CV_API_CALL *FN_opencv_dnn_plugin_init_t)
|
||||
(int requested_abi_version, int requested_api_version, void* reserved /*NULL*/);
|
||||
#endif // BUILD_PLUGIN
|
||||
|
||||
#endif // DNN_PLUGIN_API_HPP
|
319
modules/dnn/src/plugin_wrapper.impl.hpp
Normal file
319
modules/dnn/src/plugin_wrapper.impl.hpp
Normal file
@ -0,0 +1,319 @@
|
||||
// This file is part of OpenCV project.
|
||||
// It is subject to the license terms in the LICENSE file found in the top-level directory
|
||||
// of this distribution and at http://opencv.org/license.html.
|
||||
|
||||
//
|
||||
// Not a standalone header, part of backend.cpp
|
||||
//
|
||||
|
||||
//==================================================================================================
|
||||
// Dynamic backend implementation
|
||||
|
||||
#include "opencv2/core/utils/plugin_loader.private.hpp"
|
||||
|
||||
namespace cv { namespace impl {
|
||||
|
||||
using namespace cv::dnn_backend;
|
||||
|
||||
#if OPENCV_HAVE_FILESYSTEM_SUPPORT && defined(ENABLE_PLUGINS)
|
||||
|
||||
using namespace cv::plugin::impl; // plugin_loader.hpp
|
||||
|
||||
class PluginDNNBackend CV_FINAL: public std::enable_shared_from_this<PluginDNNBackend>
|
||||
{
|
||||
protected:
|
||||
void initPluginAPI()
|
||||
{
|
||||
const char* init_name = "opencv_dnn_plugin_init_v0";
|
||||
FN_opencv_dnn_plugin_init_t fn_init = reinterpret_cast<FN_opencv_dnn_plugin_init_t>(lib_->getSymbol(init_name));
|
||||
if (fn_init)
|
||||
{
|
||||
CV_LOG_DEBUG(NULL, "Found entry: '" << init_name << "'");
|
||||
for (int supported_api_version = API_VERSION; supported_api_version >= 0; supported_api_version--)
|
||||
{
|
||||
plugin_api_ = fn_init(ABI_VERSION, supported_api_version, NULL);
|
||||
if (plugin_api_)
|
||||
break;
|
||||
}
|
||||
if (!plugin_api_)
|
||||
{
|
||||
CV_LOG_INFO(NULL, "DNN: plugin is incompatible (can't be initialized): " << lib_->getName());
|
||||
return;
|
||||
}
|
||||
// NB: force strict minor version check (ABI is not preserved for now)
|
||||
if (!checkCompatibility(plugin_api_->api_header, ABI_VERSION, API_VERSION, true))
|
||||
{
|
||||
plugin_api_ = NULL;
|
||||
return;
|
||||
}
|
||||
CV_LOG_INFO(NULL, "DNN: plugin is ready to use '" << plugin_api_->api_header.api_description << "'");
|
||||
}
|
||||
else
|
||||
{
|
||||
CV_LOG_INFO(NULL, "DNN: plugin is incompatible, missing init function: '" << init_name << "', file: " << lib_->getName());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool checkCompatibility(const OpenCV_API_Header& api_header, unsigned int abi_version, unsigned int api_version, bool checkMinorOpenCVVersion)
|
||||
{
|
||||
if (api_header.opencv_version_major != CV_VERSION_MAJOR)
|
||||
{
|
||||
CV_LOG_ERROR(NULL, "DNN: wrong OpenCV major version used by plugin '" << api_header.api_description << "': " <<
|
||||
cv::format("%d.%d, OpenCV version is '" CV_VERSION "'", api_header.opencv_version_major, api_header.opencv_version_minor))
|
||||
return false;
|
||||
}
|
||||
if (!checkMinorOpenCVVersion)
|
||||
{
|
||||
// no checks for OpenCV minor version
|
||||
}
|
||||
else if (api_header.opencv_version_minor != CV_VERSION_MINOR)
|
||||
{
|
||||
CV_LOG_ERROR(NULL, "DNN: wrong OpenCV minor version used by plugin '" << api_header.api_description << "': " <<
|
||||
cv::format("%d.%d, OpenCV version is '" CV_VERSION "'", api_header.opencv_version_major, api_header.opencv_version_minor))
|
||||
return false;
|
||||
}
|
||||
CV_LOG_DEBUG(NULL, "DNN: initialized '" << api_header.api_description << "': built with "
|
||||
<< cv::format("OpenCV %d.%d (ABI/API = %d/%d)",
|
||||
api_header.opencv_version_major, api_header.opencv_version_minor,
|
||||
api_header.min_api_version, api_header.api_version)
|
||||
<< ", current OpenCV version is '" CV_VERSION "' (ABI/API = " << abi_version << "/" << api_version << ")"
|
||||
);
|
||||
if (api_header.min_api_version != abi_version) // future: range can be here
|
||||
{
|
||||
// actually this should never happen due to checks in plugin's init() function
|
||||
CV_LOG_ERROR(NULL, "DNN: plugin is not supported due to incompatible ABI = " << api_header.min_api_version);
|
||||
return false;
|
||||
}
|
||||
if (api_header.api_version != api_version)
|
||||
{
|
||||
CV_LOG_INFO(NULL, "DNN: NOTE: plugin is supported, but there is API version mismath: "
|
||||
<< cv::format("plugin API level (%d) != OpenCV API level (%d)", api_header.api_version, api_version));
|
||||
if (api_header.api_version < api_version)
|
||||
{
|
||||
CV_LOG_INFO(NULL, "DNN: NOTE: some functionality may be unavailable due to lack of support by plugin implementation");
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public:
|
||||
std::shared_ptr<cv::plugin::impl::DynamicLib> lib_;
|
||||
const OpenCV_DNN_Plugin_API* plugin_api_;
|
||||
|
||||
PluginDNNBackend(const std::shared_ptr<cv::plugin::impl::DynamicLib>& lib)
|
||||
: lib_(lib)
|
||||
, plugin_api_(NULL)
|
||||
{
|
||||
initPluginAPI();
|
||||
}
|
||||
|
||||
std::shared_ptr<cv::dnn_backend::NetworkBackend> createNetworkBackend() const
|
||||
{
|
||||
CV_Assert(plugin_api_);
|
||||
|
||||
CvPluginDNNNetworkBackend instancePtr = NULL;
|
||||
|
||||
if (plugin_api_->v0.getInstance)
|
||||
{
|
||||
if (CV_ERROR_OK == plugin_api_->v0.getInstance(&instancePtr))
|
||||
{
|
||||
CV_Assert(instancePtr);
|
||||
// TODO C++20 "aliasing constructor"
|
||||
return std::shared_ptr<cv::dnn_backend::NetworkBackend>(instancePtr, [](cv::dnn_backend::NetworkBackend*){}); // empty deleter
|
||||
}
|
||||
}
|
||||
return std::shared_ptr<cv::dnn_backend::NetworkBackend>();
|
||||
}
|
||||
|
||||
}; // class PluginDNNBackend
|
||||
|
||||
|
||||
class PluginDNNBackendFactory CV_FINAL: public IDNNBackendFactory
|
||||
{
|
||||
public:
|
||||
std::string baseName_;
|
||||
std::shared_ptr<PluginDNNBackend> backend;
|
||||
bool initialized;
|
||||
public:
|
||||
PluginDNNBackendFactory(const std::string& baseName)
|
||||
: baseName_(baseName)
|
||||
, initialized(false)
|
||||
{
|
||||
// nothing, plugins are loaded on demand
|
||||
}
|
||||
|
||||
std::shared_ptr<cv::dnn_backend::NetworkBackend> createNetworkBackend() const CV_OVERRIDE
|
||||
{
|
||||
if (!initialized)
|
||||
{
|
||||
const_cast<PluginDNNBackendFactory*>(this)->initBackend();
|
||||
}
|
||||
if (backend)
|
||||
return backend->createNetworkBackend();
|
||||
return std::shared_ptr<cv::dnn_backend::NetworkBackend>();
|
||||
}
|
||||
|
||||
protected:
|
||||
void initBackend()
|
||||
{
|
||||
AutoLock lock(getInitializationMutex());
|
||||
try
|
||||
{
|
||||
if (!initialized)
|
||||
loadPlugin();
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
CV_LOG_INFO(NULL, "DNN: exception during plugin loading: " << baseName_ << ". SKIP");
|
||||
}
|
||||
initialized = true;
|
||||
}
|
||||
void loadPlugin();
|
||||
};
|
||||
|
||||
static
|
||||
std::vector<FileSystemPath_t> getPluginCandidates(const std::string& baseName)
|
||||
{
|
||||
using namespace cv::utils;
|
||||
using namespace cv::utils::fs;
|
||||
const std::string baseName_l = toLowerCase(baseName);
|
||||
const std::string baseName_u = toUpperCase(baseName);
|
||||
const FileSystemPath_t baseName_l_fs = toFileSystemPath(baseName_l);
|
||||
std::vector<FileSystemPath_t> paths;
|
||||
// TODO OPENCV_PLUGIN_PATH
|
||||
const std::vector<std::string> paths_ = getConfigurationParameterPaths("OPENCV_DNN_PLUGIN_PATH", std::vector<std::string>());
|
||||
if (paths_.size() != 0)
|
||||
{
|
||||
for (size_t i = 0; i < paths_.size(); i++)
|
||||
{
|
||||
paths.push_back(toFileSystemPath(paths_[i]));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
FileSystemPath_t binaryLocation;
|
||||
if (getBinLocation(binaryLocation))
|
||||
{
|
||||
binaryLocation = getParent(binaryLocation);
|
||||
#ifndef CV_DNN_PLUGIN_SUBDIRECTORY
|
||||
paths.push_back(binaryLocation);
|
||||
#else
|
||||
paths.push_back(binaryLocation + toFileSystemPath("/") + toFileSystemPath(CV_DNN_PLUGIN_SUBDIRECTORY_STR));
|
||||
#endif
|
||||
}
|
||||
}
|
||||
const std::string default_expr = libraryPrefix() + "opencv_dnn_" + baseName_l + "*" + librarySuffix();
|
||||
const std::string plugin_expr = getConfigurationParameterString((std::string("OPENCV_DNN_PLUGIN_") + baseName_u).c_str(), default_expr.c_str());
|
||||
std::vector<FileSystemPath_t> results;
|
||||
#ifdef _WIN32
|
||||
FileSystemPath_t moduleName = toFileSystemPath(libraryPrefix() + "opencv_dnn_" + baseName_l + librarySuffix());
|
||||
if (plugin_expr != default_expr)
|
||||
{
|
||||
moduleName = toFileSystemPath(plugin_expr);
|
||||
results.push_back(moduleName);
|
||||
}
|
||||
for (const FileSystemPath_t& path : paths)
|
||||
{
|
||||
results.push_back(path + L"\\" + moduleName);
|
||||
}
|
||||
results.push_back(moduleName);
|
||||
#else
|
||||
CV_LOG_DEBUG(NULL, "DNN: " << baseName << " plugin's glob is '" << plugin_expr << "', " << paths.size() << " location(s)");
|
||||
for (const std::string& path : paths)
|
||||
{
|
||||
if (path.empty())
|
||||
continue;
|
||||
std::vector<std::string> candidates;
|
||||
cv::glob(utils::fs::join(path, plugin_expr), candidates);
|
||||
// Prefer candidates with higher versions
|
||||
// TODO: implemented accurate versions-based comparator
|
||||
std::sort(candidates.begin(), candidates.end(), std::greater<std::string>());
|
||||
CV_LOG_DEBUG(NULL, " - " << path << ": " << candidates.size());
|
||||
copy(candidates.begin(), candidates.end(), back_inserter(results));
|
||||
}
|
||||
#endif
|
||||
CV_LOG_DEBUG(NULL, "Found " << results.size() << " plugin(s) for " << baseName);
|
||||
return results;
|
||||
}
|
||||
|
||||
void PluginDNNBackendFactory::loadPlugin()
|
||||
{
|
||||
for (const FileSystemPath_t& plugin : getPluginCandidates(baseName_))
|
||||
{
|
||||
auto lib = std::make_shared<cv::plugin::impl::DynamicLib>(plugin);
|
||||
if (!lib->isLoaded())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
try
|
||||
{
|
||||
auto pluginBackend = std::make_shared<PluginDNNBackend>(lib);
|
||||
if (!pluginBackend)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if (pluginBackend->plugin_api_ == NULL)
|
||||
{
|
||||
CV_LOG_ERROR(NULL, "DNN: no compatible plugin API for backend: " << baseName_ << " in " << toPrintablePath(plugin));
|
||||
continue;
|
||||
}
|
||||
// NB: we are going to use backend, so prevent automatic library unloading
|
||||
lib->disableAutomaticLibraryUnloading();
|
||||
backend = pluginBackend;
|
||||
return;
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
CV_LOG_WARNING(NULL, "DNN: exception during plugin initialization: " << toPrintablePath(plugin) << ". SKIP");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif // OPENCV_HAVE_FILESYSTEM_SUPPORT && defined(ENABLE_PLUGINS)
|
||||
|
||||
} // namespace
|
||||
|
||||
|
||||
|
||||
namespace dnn_backend {
|
||||
|
||||
|
||||
std::shared_ptr<IDNNBackendFactory> createPluginDNNBackendFactory(const std::string& baseName)
|
||||
{
|
||||
#if OPENCV_HAVE_FILESYSTEM_SUPPORT && defined(ENABLE_PLUGINS)
|
||||
const std::string baseName_u = toUpperCase(baseName);
|
||||
AutoLock lock(getInitializationMutex());
|
||||
static std::map<std::string, std::shared_ptr<IDNNBackendFactory>> g_plugins_cache;
|
||||
auto it = g_plugins_cache.find(baseName_u);
|
||||
if (it == g_plugins_cache.end())
|
||||
{
|
||||
auto factory = std::make_shared<impl::PluginDNNBackendFactory>(baseName);
|
||||
g_plugins_cache.insert(std::pair<std::string, std::shared_ptr<IDNNBackendFactory>>(baseName_u, factory));
|
||||
return factory;
|
||||
}
|
||||
return it->second;
|
||||
#else
|
||||
CV_UNUSED(baseName);
|
||||
return std::shared_ptr<IDNNBackendFactory>();
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
cv::dnn_backend::NetworkBackend& createPluginDNNNetworkBackend(const std::string& baseName)
|
||||
{
|
||||
auto factory = dnn_backend::createPluginDNNBackendFactory(baseName);
|
||||
if (!factory)
|
||||
{
|
||||
CV_Error(Error::StsNotImplemented, cv::format("Plugin factory is not available: '%s'", baseName.c_str()));
|
||||
}
|
||||
auto backend = factory->createNetworkBackend();
|
||||
if (!backend)
|
||||
{
|
||||
CV_Error(Error::StsNotImplemented, cv::format("Backend (plugin) is not available: '%s'", baseName.c_str()));
|
||||
}
|
||||
return *backend;
|
||||
}
|
||||
|
||||
|
||||
}} // namespace
|
@ -39,8 +39,14 @@
|
||||
//
|
||||
//M*/
|
||||
|
||||
#include <opencv2/core.hpp>
|
||||
#if !defined(BUILD_PLUGIN)
|
||||
#include "cvconfig.h"
|
||||
#else
|
||||
#include <opencv2/core/cvdef.h>
|
||||
#undef __OPENCV_BUILD // allow public API only
|
||||
#endif
|
||||
|
||||
#include <opencv2/core.hpp>
|
||||
|
||||
#ifndef CV_OCL4DNN
|
||||
#define CV_OCL4DNN 0
|
||||
|
@ -14,6 +14,8 @@
|
||||
|
||||
#include "halide_scheduler.hpp"
|
||||
|
||||
#include "backend.hpp"
|
||||
#include "factory.hpp"
|
||||
|
||||
namespace cv {
|
||||
namespace dnn {
|
||||
@ -43,43 +45,46 @@ private:
|
||||
#endif
|
||||
#endif // HAVE_HALIDE
|
||||
|
||||
bool haveBackendOpenVINO = false;
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
if (openvino::checkTarget(DNN_TARGET_CPU))
|
||||
haveBackendOpenVINO = true;
|
||||
#elif defined(ENABLE_PLUGINS)
|
||||
{
|
||||
auto factory = dnn_backend::createPluginDNNBackendFactory("openvino");
|
||||
if (factory)
|
||||
{
|
||||
auto backend = factory->createNetworkBackend();
|
||||
if (backend)
|
||||
haveBackendOpenVINO = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if (haveBackendOpenVINO && openvino::checkTarget(DNN_TARGET_CPU))
|
||||
{
|
||||
#ifdef HAVE_DNN_NGRAPH
|
||||
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_CPU));
|
||||
#endif
|
||||
}
|
||||
if (openvino::checkTarget(DNN_TARGET_MYRIAD))
|
||||
if (haveBackendOpenVINO && openvino::checkTarget(DNN_TARGET_MYRIAD))
|
||||
{
|
||||
#ifdef HAVE_DNN_NGRAPH
|
||||
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_MYRIAD));
|
||||
#endif
|
||||
}
|
||||
if (openvino::checkTarget(DNN_TARGET_HDDL))
|
||||
if (haveBackendOpenVINO && openvino::checkTarget(DNN_TARGET_HDDL))
|
||||
{
|
||||
#ifdef HAVE_DNN_NGRAPH
|
||||
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_HDDL));
|
||||
#endif
|
||||
}
|
||||
#ifdef HAVE_OPENCL
|
||||
if (cv::ocl::useOpenCL() && ocl::Device::getDefault().isIntel())
|
||||
{
|
||||
if (openvino::checkTarget(DNN_TARGET_OPENCL))
|
||||
if (haveBackendOpenVINO && openvino::checkTarget(DNN_TARGET_OPENCL))
|
||||
{
|
||||
#ifdef HAVE_DNN_NGRAPH
|
||||
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_OPENCL));
|
||||
#endif
|
||||
}
|
||||
if (openvino::checkTarget(DNN_TARGET_OPENCL_FP16))
|
||||
if (haveBackendOpenVINO && openvino::checkTarget(DNN_TARGET_OPENCL_FP16))
|
||||
{
|
||||
#ifdef HAVE_DNN_NGRAPH
|
||||
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_OPENCL_FP16));
|
||||
#endif
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#endif // HAVE_INF_ENGINE
|
||||
#endif // HAVE_OPENCL
|
||||
|
||||
#ifdef HAVE_WEBNN
|
||||
if (haveWebnn())
|
||||
@ -132,10 +137,9 @@ std::vector<Target> getAvailableTargets(Backend be)
|
||||
{
|
||||
if (be == DNN_BACKEND_DEFAULT)
|
||||
be = (Backend)getParam_DNN_BACKEND_DEFAULT();
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
|
||||
if (be == DNN_BACKEND_INFERENCE_ENGINE)
|
||||
be = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
|
||||
#endif
|
||||
|
||||
std::vector<Target> result;
|
||||
const BackendRegistry::BackendsList all_backends = getAvailableBackends();
|
||||
|
@ -130,9 +130,7 @@ void normAssertTextDetections(
|
||||
|
||||
void readFileContent(const std::string& filename, CV_OUT std::vector<char>& content);
|
||||
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
bool validateVPUType();
|
||||
#endif
|
||||
|
||||
testing::internal::ParamGenerator< tuple<Backend, Target> > dnnBackendsAndTargets(
|
||||
bool withInferenceEngine = true,
|
||||
|
@ -254,9 +254,7 @@ testing::internal::ParamGenerator< tuple<Backend, Target> > dnnBackendsAndTarget
|
||||
bool withWebnn /*= false*/
|
||||
)
|
||||
{
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
bool withVPU = validateVPUType();
|
||||
#endif
|
||||
|
||||
std::vector< tuple<Backend, Target> > targets;
|
||||
std::vector< Target > available;
|
||||
@ -266,7 +264,6 @@ testing::internal::ParamGenerator< tuple<Backend, Target> > dnnBackendsAndTarget
|
||||
for (std::vector< Target >::const_iterator i = available.begin(); i != available.end(); ++i)
|
||||
targets.push_back(make_tuple(DNN_BACKEND_HALIDE, *i));
|
||||
}
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
if (withInferenceEngine)
|
||||
{
|
||||
available = getAvailableTargets(DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019);
|
||||
@ -288,9 +285,6 @@ testing::internal::ParamGenerator< tuple<Backend, Target> > dnnBackendsAndTarget
|
||||
}
|
||||
|
||||
}
|
||||
#else
|
||||
CV_UNUSED(withInferenceEngine);
|
||||
#endif
|
||||
if (withVkCom)
|
||||
{
|
||||
available = getAvailableTargets(DNN_BACKEND_VKCOM);
|
||||
@ -356,7 +350,6 @@ testing::internal::ParamGenerator< tuple<Backend, Target> > dnnBackendsAndTarget
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef HAVE_INF_ENGINE
|
||||
static std::string getTestInferenceEngineVPUType()
|
||||
{
|
||||
static std::string param_vpu_type = utils::getConfigurationParameterString("OPENCV_TEST_DNN_IE_VPU_TYPE", "");
|
||||
@ -419,7 +412,6 @@ bool validateVPUType()
|
||||
static bool result = validateVPUType_();
|
||||
return result;
|
||||
}
|
||||
#endif // HAVE_INF_ENGINE
|
||||
|
||||
|
||||
void initDNNTests()
|
||||
|
Loading…
Reference in New Issue
Block a user