diff --git a/cmake/OpenCVUtils.cmake b/cmake/OpenCVUtils.cmake index 579c5ce54c..3a81ada6ae 100644 --- a/cmake/OpenCVUtils.cmake +++ b/cmake/OpenCVUtils.cmake @@ -1094,6 +1094,18 @@ macro(ocv_list_filterout lst regex) endforeach() endmacro() +# Usage: ocv_list_filterout_ex(list_name regex1 regex2 ...) +macro(ocv_list_filterout_ex lst) + foreach(regex ${ARGN}) + foreach(item ${${lst}}) + if(item MATCHES "${regex}") + list(REMOVE_ITEM ${lst} "${item}") + endif() + endforeach() + endforeach() +endmacro() + + # filter matching elements from the list macro(ocv_list_filter lst regex) set(dst ${ARGN}) diff --git a/modules/core/include/opencv2/core/utils/filesystem.hpp b/modules/core/include/opencv2/core/utils/filesystem.hpp index a98d2202fc..8619ae4d1a 100644 --- a/modules/core/include/opencv2/core/utils/filesystem.hpp +++ b/modules/core/include/opencv2/core/utils/filesystem.hpp @@ -62,7 +62,7 @@ CV_EXPORTS void glob_relative(const cv::String& directory, const cv::String& pat CV_EXPORTS bool createDirectory(const cv::String& path); CV_EXPORTS bool createDirectories(const cv::String& path); -#ifdef __OPENCV_BUILD +#if defined(__OPENCV_BUILD) || defined(BUILD_PLUGIN) // TODO //CV_EXPORTS cv::String getTempDirectory(); diff --git a/modules/dnn/CMakeLists.txt b/modules/dnn/CMakeLists.txt index 08cda81819..6333646a5a 100644 --- a/modules/dnn/CMakeLists.txt +++ b/modules/dnn/CMakeLists.txt @@ -13,18 +13,22 @@ ocv_add_dispatched_file_force_all("int8layers/layers_common" AVX2 AVX512_SKX LAS ocv_add_module(dnn opencv_core opencv_imgproc WRAP python java objc js) + +include(${CMAKE_CURRENT_LIST_DIR}/cmake/plugin.cmake) + + ocv_option(OPENCV_DNN_OPENCL "Build with OpenCL support" HAVE_OPENCL AND NOT APPLE) if(OPENCV_DNN_OPENCL AND HAVE_OPENCL) - add_definitions(-DCV_OCL4DNN=1) + ocv_target_compile_definitions(${the_module} PRIVATE "CV_OCL4DNN=1") endif() if(WITH_WEBNN AND HAVE_WEBNN) - add_definitions(-DHAVE_WEBNN=1) + ocv_target_compile_definitions(${the_module} PRIVATE "HAVE_WEBNN=1") endif() if(HAVE_TIMVX) - add_definitions(-DHAVE_TIMVX=1) + ocv_target_compile_definitions(${the_module} PRIVATE "HAVE_TIMVX=1") endif() ocv_option(OPENCV_DNN_CUDA "Build with CUDA support" @@ -35,7 +39,7 @@ ocv_option(OPENCV_DNN_CUDA "Build with CUDA support" if(OPENCV_DNN_CUDA) if(HAVE_CUDA AND HAVE_CUBLAS AND HAVE_CUDNN) - add_definitions(-DCV_CUDA4DNN=1) + ocv_target_compile_definitions(${the_module} PRIVATE "CV_CUDA4DNN=1") else() if(NOT HAVE_CUDA) message(SEND_ERROR "DNN: CUDA backend requires CUDA Toolkit. Please resolve dependency or disable OPENCV_DNN_CUDA=OFF") @@ -47,12 +51,15 @@ if(OPENCV_DNN_CUDA) endif() endif() + ocv_cmake_hook_append(INIT_MODULE_SOURCES_opencv_dnn "${CMAKE_CURRENT_LIST_DIR}/cmake/hooks/INIT_MODULE_SOURCES_opencv_dnn.cmake") + if(HAVE_TENGINE) - add_definitions(-DHAVE_TENGINE=1) + ocv_target_compile_definitions(${the_module} PRIVATE "HAVE_TENGINE=1") endif() + if(MSVC) add_definitions( -D_CRT_SECURE_NO_WARNINGS=1 ) ocv_warnings_disable(CMAKE_CXX_FLAGS /wd4244 /wd4267 /wd4018 /wd4355 /wd4800 /wd4251 /wd4996 /wd4146 @@ -87,10 +94,10 @@ if(ANDROID) endif() if(NOT BUILD_PROTOBUF) - add_definitions(-DOPENCV_DNN_EXTERNAL_PROTOBUF=1) + ocv_target_compile_definitions(${the_module} PRIVATE "OPENCV_DNN_EXTERNAL_PROTOBUF=1") endif() -add_definitions(-DHAVE_PROTOBUF=1) +ocv_target_compile_definitions(${the_module} PRIVATE "HAVE_PROTOBUF=1") #suppress warnings in autogenerated caffe.pb.* files ocv_warnings_disable(CMAKE_CXX_FLAGS @@ -175,12 +182,34 @@ endif() set(dnn_runtime_libs "") +file(GLOB_RECURSE dnn_srcs + "${CMAKE_CURRENT_LIST_DIR}/src/*.cpp" +) +file(GLOB_RECURSE dnn_int_hdrs + "${CMAKE_CURRENT_LIST_DIR}/src/*.hpp" + "${CMAKE_CURRENT_LIST_DIR}/src/*.h" +) +set(dnn_plugin_srcs ${dnn_srcs} ${dnn_int_hdrs}) +ocv_list_filterout_ex(dnn_plugin_srcs + "/src/dnn.cpp$|/src/dnn_utils.cpp$|/src/dnn_utils.cpp$|/src/dnn_read.cpp$|/src/registry.cpp$|/src/backend.cpp$" + # importers + "/src/(caffe|darknet|onnx|tensorflow|torch)/" + # executors + "/src/(cuda|cuda4dnn|ocl4dnn|vkcom|webnn)/" +) + ocv_option(OPENCV_DNN_OPENVINO "Build with OpenVINO support (2021.4+)" (TARGET ocv.3rdparty.openvino)) if(TARGET ocv.3rdparty.openvino AND OPENCV_DNN_OPENVINO) if(NOT HAVE_OPENVINO AND NOT HAVE_NGRAPH) message(FATAL_ERROR "DNN: Inference Engine is not supported without enabled 'nGraph'. Check build configuration.") endif() - list(APPEND dnn_runtime_libs ocv.3rdparty.openvino) + if("openvino" IN_LIST DNN_PLUGIN_LIST OR DNN_PLUGIN_LIST STREQUAL "all") + # plugin doesn't support PCH, separate directory scope is necessary + # opencv_world requires absolute path + add_subdirectory("${CMAKE_CURRENT_LIST_DIR}/misc/plugin/openvino" "${CMAKE_CURRENT_BINARY_DIR}/dnn_plugin_openvino") + elseif(NOT OPENCV_DNN_BUILTIN_BACKEND) + list(APPEND dnn_runtime_libs ocv.3rdparty.openvino) + endif() endif() ocv_glob_module_sources(${sources_options} SOURCES ${fw_srcs} ${webnn_srcs}) @@ -205,7 +234,7 @@ if(BUILD_PERF_TESTS) ) find_package(Caffe QUIET) if (Caffe_FOUND) - add_definitions(-DHAVE_CAFFE=1) + ocv_target_compile_definitions(opencv_perf_dnn PRIVATE "HAVE_CAFFE=1") ocv_target_link_libraries(opencv_perf_dnn caffe) endif() elseif(OPENCV_DNN_PERF_CLCAFFE @@ -213,8 +242,25 @@ if(BUILD_PERF_TESTS) ) find_package(Caffe QUIET) if (Caffe_FOUND) - add_definitions(-DHAVE_CLCAFFE=1) + ocv_target_compile_definitions(opencv_perf_dnn PRIVATE "HAVE_CLCAFFE=1") ocv_target_link_libraries(opencv_perf_dnn caffe) endif() endif() endif() + +if(DNN_ENABLE_PLUGINS) + ocv_target_compile_definitions(${the_module} PRIVATE ENABLE_PLUGINS) + if(TARGET opencv_test_dnn) + ocv_target_compile_definitions(opencv_test_dnn PRIVATE ENABLE_PLUGINS) + endif() + if(OPENCV_DEBUG_POSTFIX) + ocv_append_source_file_compile_definitions("${CMAKE_CURRENT_LIST_DIR}/src/backend.cpp" "DEBUG_POSTFIX=${OPENCV_DEBUG_POSTFIX}") + endif() +endif() + +ocv_option(OPENCV_TEST_DNN_OPENVINO "Build test with OpenVINO code" (TARGET ocv.3rdparty.openvino)) +if(TARGET ocv.3rdparty.openvino AND OPENCV_TEST_DNN_OPENVINO) + if(TARGET opencv_test_dnn) + ocv_target_link_libraries(opencv_test_dnn ocv.3rdparty.openvino) + endif() +endif() diff --git a/modules/dnn/cmake/init.cmake b/modules/dnn/cmake/init.cmake new file mode 100644 index 0000000000..f4493c53e9 --- /dev/null +++ b/modules/dnn/cmake/init.cmake @@ -0,0 +1,29 @@ +if(PROJECT_NAME STREQUAL "OpenCV") + set(ENABLE_PLUGINS_DEFAULT ON) + if(EMSCRIPTEN OR IOS OR WINRT) + set(ENABLE_PLUGINS_DEFAULT OFF) + endif() + set(DNN_PLUGIN_LIST "" CACHE STRING "List of DNN backends to be compiled as plugins (openvino, etc or special value 'all')") + set(DNN_ENABLE_PLUGINS "${ENABLE_PLUGINS_DEFAULT}" CACHE BOOL "Allow building and using of DNN plugins") + mark_as_advanced(DNN_PLUGIN_LIST DNN_ENABLE_PLUGINS) + + string(REPLACE "," ";" DNN_PLUGIN_LIST "${DNN_PLUGIN_LIST}") # support comma-separated list (,) too + string(TOLOWER "${DNN_PLUGIN_LIST}" DNN_PLUGIN_LIST) + if(NOT DNN_ENABLE_PLUGINS) + if(DNN_PLUGIN_LIST) + message(WARNING "DNN: plugins are disabled through DNN_ENABLE_PLUGINS, so DNN_PLUGIN_LIST='${DNN_PLUGIN_LIST}' is ignored") + set(DNN_PLUGIN_LIST "") + endif() + else() + # Make virtual plugins target + if(NOT TARGET opencv_dnn_plugins) + add_custom_target(opencv_dnn_plugins ALL) + endif() + endif() +endif() + +# +# Detect available dependencies +# + +# OpenVINO - detected by main CMake scripts (shared with G-API) diff --git a/modules/dnn/cmake/plugin.cmake b/modules/dnn/cmake/plugin.cmake new file mode 100644 index 0000000000..055d21efc3 --- /dev/null +++ b/modules/dnn/cmake/plugin.cmake @@ -0,0 +1,80 @@ +function(ocv_create_builtin_dnn_plugin name target) + + ocv_debug_message("ocv_create_builtin_dnn_plugin(${ARGV})") + + if(NOT TARGET ${target}) + message(FATAL_ERROR "${target} does not exist!") + endif() + if(NOT OpenCV_SOURCE_DIR) + message(FATAL_ERROR "OpenCV_SOURCE_DIR must be set to build the plugin!") + endif() + + message(STATUS "DNN: add builtin plugin '${name}'") + + set(ENABLE_PRECOMPILED_HEADERS OFF) # no support for PCH in plugins, conflicts with module's source files + + # TODO: update CPU optimizations scripts to support plugins + add_definitions(-D__OPENCV_BUILD=1) + add_definitions(-DBUILD_PLUGIN=1) + include_directories("${OPENCV_MODULE_opencv_dnn_BINARY_DIR}") # Cannot open include file: 'layers/layers_common.simd_declarations.hpp' + + foreach(src ${ARGN}) + if(EXISTS "${CMAKE_CURRENT_LIST_DIR}/src/${src}") + list(APPEND sources "${CMAKE_CURRENT_LIST_DIR}/src/${src}") + elseif(IS_ABSOLUTE "${src}") + list(APPEND sources "${src}") + else() + message(FATAL_ERROR "Unknown source: ${src}") + endif() + endforeach() + + if(OPENCV_MODULE_${the_module}_SOURCES_DISPATCHED) + list(APPEND sources ${OPENCV_MODULE_${the_module}_SOURCES_DISPATCHED}) + endif() + + set(__${name}_DEPS_EXT "") + ocv_compiler_optimization_process_sources(sources __${name}_DEPS_EXT ${name}) + + add_library(${name} MODULE ${sources}) + target_include_directories(${name} PRIVATE "${CMAKE_CURRENT_BINARY_DIR}") + target_link_libraries(${name} PRIVATE ${target} ${__${name}_DEPS_EXT}) + target_link_libraries(${name} PRIVATE ${__plugin_libs}) + + foreach(mod opencv_dnn + opencv_core + opencv_imgproc + opencv_dnn + ) + ocv_target_link_libraries(${name} LINK_PRIVATE ${mod}) + ocv_target_include_directories(${name} "${OPENCV_MODULE_${mod}_LOCATION}/include") + endforeach() + + if(WIN32) + set(OPENCV_PLUGIN_VERSION "${OPENCV_DLLVERSION}" CACHE STRING "") + if(CMAKE_CXX_SIZEOF_DATA_PTR EQUAL 8) + set(OPENCV_PLUGIN_ARCH "_64" CACHE STRING "") + else() + set(OPENCV_PLUGIN_ARCH "" CACHE STRING "") + endif() + else() + set(OPENCV_PLUGIN_VERSION "" CACHE STRING "") + set(OPENCV_PLUGIN_ARCH "" CACHE STRING "") + endif() + + set_target_properties(${name} PROPERTIES + CXX_STANDARD 11 + CXX_VISIBILITY_PRESET hidden + DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}" + OUTPUT_NAME "${name}${OPENCV_PLUGIN_VERSION}${OPENCV_PLUGIN_ARCH}" + ) + + if(WIN32) + set_target_properties(${name} PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH}) + install(TARGETS ${name} OPTIONAL LIBRARY DESTINATION ${OPENCV_BIN_INSTALL_PATH} COMPONENT plugins) + else() + install(TARGETS ${name} OPTIONAL LIBRARY DESTINATION ${OPENCV_LIB_INSTALL_PATH} COMPONENT plugins) + endif() + + add_dependencies(opencv_dnn_plugins ${name}) + +endfunction() diff --git a/modules/dnn/include/opencv2/dnn/dnn.hpp b/modules/dnn/include/opencv2/dnn/dnn.hpp index 6ed0d6e70c..78c18c15b7 100644 --- a/modules/dnn/include/opencv2/dnn/dnn.hpp +++ b/modules/dnn/include/opencv2/dnn/dnn.hpp @@ -52,6 +52,11 @@ namespace cv { namespace dnn { + +namespace accessor { +class DnnNetAccessor; // forward declaration +} + CV__DNN_INLINE_NS_BEGIN //! @addtogroup dnn //! @{ @@ -76,9 +81,11 @@ CV__DNN_INLINE_NS_BEGIN DNN_BACKEND_CUDA, DNN_BACKEND_WEBNN, DNN_BACKEND_TIMVX, -#ifdef __OPENCV_BUILD +#if defined(__OPENCV_BUILD) || defined(BUILD_PLUGIN) +#if !defined(OPENCV_BINDING_PARSER) DNN_BACKEND_INFERENCE_ENGINE_NGRAPH = 1000000, // internal - use DNN_BACKEND_INFERENCE_ENGINE + setInferenceEngineBackendType() DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019, // internal - use DNN_BACKEND_INFERENCE_ENGINE + setInferenceEngineBackendType() +#endif #endif }; @@ -840,8 +847,12 @@ CV__DNN_INLINE_NS_BEGIN */ CV_WRAP int64 getPerfProfile(CV_OUT std::vector& timings); - private: + struct Impl; + inline Impl* getImpl() const { return impl.get(); } + inline Impl& getImplRef() const { CV_DbgAssert(impl); return *impl.get(); } + friend class accessor::DnnNetAccessor; + protected: Ptr impl; }; diff --git a/modules/dnn/misc/plugin/openvino/CMakeLists.txt b/modules/dnn/misc/plugin/openvino/CMakeLists.txt new file mode 100644 index 0000000000..398218484e --- /dev/null +++ b/modules/dnn/misc/plugin/openvino/CMakeLists.txt @@ -0,0 +1,2 @@ +#include_directories("${OPENCV_MODULE_opencv_dnn_BINARY_DIR}") # Cannot open include file: 'layers/layers_common.simd_declarations.hpp' +ocv_create_builtin_dnn_plugin(opencv_dnn_openvino ocv.3rdparty.openvino ${dnn_plugin_srcs}) diff --git a/modules/dnn/misc/python/test/test_dnn.py b/modules/dnn/misc/python/test/test_dnn.py index 272121ba36..82d07f402b 100644 --- a/modules/dnn/misc/python/test/test_dnn.py +++ b/modules/dnn/misc/python/test/test_dnn.py @@ -113,10 +113,10 @@ class dnn_test(NewOpenCVTests): proto = self.find_dnn_file('dnn/layers/layer_convolution.prototxt') model = self.find_dnn_file('dnn/layers/layer_convolution.caffemodel') net = cv.dnn.readNet(proto, model) - net.setPreferableBackend(backend) - net.setPreferableTarget(target) - inp = np.random.standard_normal([1, 2, 10, 11]).astype(np.float32) try: + net.setPreferableBackend(backend) + net.setPreferableTarget(target) + inp = np.random.standard_normal([1, 2, 10, 11]).astype(np.float32) net.setInput(inp) net.forward() except BaseException as e: diff --git a/modules/dnn/src/backend.cpp b/modules/dnn/src/backend.cpp new file mode 100644 index 0000000000..f6c6fecdad --- /dev/null +++ b/modules/dnn/src/backend.cpp @@ -0,0 +1,31 @@ +// This file is part of OpenCV project. +// It is subject to the license terms in the LICENSE file found in the top-level directory +// of this distribution and at http://opencv.org/license.html. +#include "precomp.hpp" +#include "backend.hpp" + +#include + +#include +#include +#ifdef NDEBUG +#define CV_LOG_STRIP_LEVEL CV_LOG_LEVEL_DEBUG + 1 +#else +#define CV_LOG_STRIP_LEVEL CV_LOG_LEVEL_VERBOSE + 1 +#endif +#include + +#include "factory.hpp" + +#include "plugin_api.hpp" +#include "plugin_wrapper.impl.hpp" + + +namespace cv { namespace dnn_backend { + +NetworkBackend::~NetworkBackend() +{ + // nothing +} + +}} // namespace cv::dnn_backend diff --git a/modules/dnn/src/backend.hpp b/modules/dnn/src/backend.hpp new file mode 100644 index 0000000000..37cc8a3cc0 --- /dev/null +++ b/modules/dnn/src/backend.hpp @@ -0,0 +1,43 @@ +// This file is part of OpenCV project. +// It is subject to the license terms in the LICENSE file found in the top-level directory +// of this distribution and at http://opencv.org/license.html. +#ifndef OPENCV_DNN_BACKEND_HPP +#define OPENCV_DNN_BACKEND_HPP + +#include +#include + +namespace cv { namespace dnn_backend { + +using namespace cv::dnn; + +class CV_EXPORTS NetworkBackend +{ +public: + virtual ~NetworkBackend(); + + virtual void switchBackend(Net& net) = 0; + + /** + @param loaderID use empty "" for auto + @param model see cv::dnn::readNetwork + @param config see cv::dnn::readNetwork + */ + virtual Net readNetwork(const std::string& loaderID, const std::string& model, const std::string& config) = 0; + + /** @overload */ + virtual Net readNetwork( + const std::string& loaderID, + const uchar* bufferModelConfigPtr, size_t bufferModelConfigSize, + const uchar* bufferWeightsPtr, size_t bufferWeightsSize + ) = 0; + + // TODO: target as string + configuration + virtual bool checkTarget(Target target) = 0; +}; + + +} // namespace dnn_backend +} // namespace cv + +#endif // OPENCV_DNN_BACKEND_HPP diff --git a/modules/dnn/src/dnn_common.hpp b/modules/dnn/src/dnn_common.hpp index b580b9f74b..f5c3cce7ca 100644 --- a/modules/dnn/src/dnn_common.hpp +++ b/modules/dnn/src/dnn_common.hpp @@ -156,6 +156,18 @@ static inline std::string toString(const Mat& blob, const std::string& name = st CV__DNN_INLINE_NS_END + +namespace accessor { +class DnnNetAccessor +{ +public: + static inline Ptr& getImplPtrRef(Net& net) + { + return net.impl; + } +}; +} + }} // namespace #endif // __OPENCV_DNN_COMMON_HPP__ diff --git a/modules/dnn/src/dnn_params.cpp b/modules/dnn/src/dnn_params.cpp index 48e89c6fac..86a43db757 100644 --- a/modules/dnn/src/dnn_params.cpp +++ b/modules/dnn/src/dnn_params.cpp @@ -36,11 +36,7 @@ bool getParam_DNN_OPENCL_ALLOW_ALL_DEVICES() int getParam_DNN_BACKEND_DEFAULT() { static int PARAM_DNN_BACKEND_DEFAULT = (int)utils::getConfigurationParameterSizeT("OPENCV_DNN_BACKEND_DEFAULT", -#ifdef HAVE_INF_ENGINE - (size_t)DNN_BACKEND_INFERENCE_ENGINE -#else (size_t)DNN_BACKEND_OPENCV -#endif ); return PARAM_DNN_BACKEND_DEFAULT; } diff --git a/modules/dnn/src/factory.hpp b/modules/dnn/src/factory.hpp new file mode 100644 index 0000000000..0d8750a8c0 --- /dev/null +++ b/modules/dnn/src/factory.hpp @@ -0,0 +1,30 @@ +// This file is part of OpenCV project. +// It is subject to the license terms in the LICENSE file found in the top-level directory +// of this distribution and at http://opencv.org/license.html. + +#ifndef OPENCV_DNN_FACTORY_HPP +#define OPENCV_DNN_FACTORY_HPP + +#include "backend.hpp" + +namespace cv { namespace dnn_backend { + +class IDNNBackendFactory +{ +public: + virtual ~IDNNBackendFactory() {} + virtual std::shared_ptr createNetworkBackend() const = 0; +}; + +// +// PluginDNNBackendFactory is implemented in plugin_wrapper +// + +std::shared_ptr createPluginDNNBackendFactory(const std::string& baseName); + +/// @brief Returns createPluginDNNBackendFactory()->createNetworkBackend() +cv::dnn_backend::NetworkBackend& createPluginDNNNetworkBackend(const std::string& baseName); + +}} // namespace + +#endif // OPENCV_DNN_FACTORY_HPP diff --git a/modules/dnn/src/ie_ngraph.cpp b/modules/dnn/src/ie_ngraph.cpp index d2bb2f189c..49b0345816 100644 --- a/modules/dnn/src/ie_ngraph.cpp +++ b/modules/dnn/src/ie_ngraph.cpp @@ -988,14 +988,6 @@ InferenceEngine::DataPtr ngraphDataOutputNode( return w.dataPtr; } -void forwardNgraph(const std::vector >& outBlobsWrappers, - Ptr& node, bool isAsync) -{ - CV_Assert(!node.empty()); - Ptr ieNode = node.dynamicCast(); - CV_Assert(!ieNode.empty()); - ieNode->net->forward(outBlobsWrappers, isAsync); -} void InfEngineNgraphNet::reset() { @@ -1192,12 +1184,6 @@ void InfEngineNgraphNet::forward(const std::vector >& outBlo } } -#else -void forwardNgraph(const std::vector >& outBlobsWrappers, - Ptr& node, bool isAsync) -{ - CV_Assert(false && "nGraph is not enabled in this OpenCV build"); -} #endif }} diff --git a/modules/dnn/src/ie_ngraph.hpp b/modules/dnn/src/ie_ngraph.hpp index 0d287a22a5..9ccc182fc8 100644 --- a/modules/dnn/src/ie_ngraph.hpp +++ b/modules/dnn/src/ie_ngraph.hpp @@ -158,9 +158,6 @@ private: #endif // HAVE_DNN_NGRAPH -void forwardNgraph(const std::vector >& outBlobsWrappers, - Ptr& node, bool isAsync); - }} // namespace cv::dnn diff --git a/modules/dnn/src/init.cpp b/modules/dnn/src/init.cpp index 902b6dae88..f77523916b 100644 --- a/modules/dnn/src/init.cpp +++ b/modules/dnn/src/init.cpp @@ -42,7 +42,9 @@ #include "precomp.hpp" #include +#if !defined(BUILD_PLUGIN) #include +#endif namespace cv { namespace dnn { @@ -58,6 +60,7 @@ Mutex& getInitializationMutex() // force initialization (single-threaded environment) Mutex* __initialization_mutex_initializer = &getInitializationMutex(); +#if !defined(BUILD_PLUGIN) namespace { using namespace google::protobuf; class ProtobufShutdown { @@ -71,12 +74,15 @@ public: } }; } // namespace +#endif void initializeLayerFactory() { CV_TRACE_FUNCTION(); +#if !defined(BUILD_PLUGIN) static ProtobufShutdown protobufShutdown; CV_UNUSED(protobufShutdown); +#endif CV_DNN_REGISTER_LAYER_CLASS(Slice, SliceLayer); CV_DNN_REGISTER_LAYER_CLASS(Split, SplitLayer); diff --git a/modules/dnn/src/layer_factory.cpp b/modules/dnn/src/layer_factory.cpp index 5c80cd09ad..e5b835143e 100644 --- a/modules/dnn/src/layer_factory.cpp +++ b/modules/dnn/src/layer_factory.cpp @@ -4,8 +4,6 @@ #include "precomp.hpp" -#include - #include // getLayerFactoryImpl diff --git a/modules/dnn/src/layer_internals.hpp b/modules/dnn/src/layer_internals.hpp index 9ded3543e1..f19b99f260 100644 --- a/modules/dnn/src/layer_internals.hpp +++ b/modules/dnn/src/layer_internals.hpp @@ -96,21 +96,29 @@ struct LayerData int flag; - Ptr getLayerInstance() + + void resetAllocation() { - CV_TRACE_FUNCTION(); - CV_TRACE_ARG_VALUE(type, "type", type.c_str()); + if (id == 0) + return; // skip "input" layer (assertion in Net::Impl::allocateLayers) - if (layerInstance) - return layerInstance; + layerInstance.release(); + outputBlobs.clear(); + inputBlobs.clear(); + internals.clear(); - layerInstance = LayerFactory::createLayerInstance(type, params); - if (!layerInstance) - { - CV_Error(Error::StsError, "Can't create layer \"" + name + "\" of type \"" + type + "\""); - } + outputBlobsWrappers.clear(); + inputBlobsWrappers.clear(); + internalBlobsWrappers.clear(); - return layerInstance; + backendNodes.clear(); + + skip = false; + flag = 0; + +#ifdef HAVE_CUDA + cudaD2HBackgroundTransfers.clear(); +#endif } }; diff --git a/modules/dnn/src/legacy_backend.cpp b/modules/dnn/src/legacy_backend.cpp index fa9407aacd..431c597fab 100644 --- a/modules/dnn/src/legacy_backend.cpp +++ b/modules/dnn/src/legacy_backend.cpp @@ -75,11 +75,7 @@ Ptr wrapMat(int backendId, int targetId, cv::Mat& m) } else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) { -#ifdef HAVE_DNN_NGRAPH - return Ptr(new NgraphBackendWrapper(targetId, m)); -#else - CV_Error(Error::StsNotImplemented, "This OpenCV version is built without support of OpenVINO / Inference Engine + nGraph"); -#endif + CV_Assert(0 && "Internal error: DNN_BACKEND_INFERENCE_ENGINE_NGRAPH must be implemented through inheritance"); } else if (backendId == DNN_BACKEND_WEBNN) { diff --git a/modules/dnn/src/net.cpp b/modules/dnn/src/net.cpp index 33f22744b8..b3cf811a94 100644 --- a/modules/dnn/src/net.cpp +++ b/modules/dnn/src/net.cpp @@ -120,7 +120,7 @@ Net Net::quantize(InputArrayOfArrays calibData, int inputsDtype, int outputsDtyp CV_TRACE_FUNCTION(); CV_Assert(impl); CV_Assert(!empty()); - return impl->quantize(calibData, inputsDtype, outputsDtype, perChannel); + return impl->quantize(*this, calibData, inputsDtype, outputsDtype, perChannel); } // FIXIT drop from inference API @@ -146,7 +146,7 @@ void Net::setPreferableBackend(int backendId) CV_TRACE_FUNCTION(); CV_TRACE_ARG(backendId); CV_Assert(impl); - return impl->setPreferableBackend(backendId); + return impl->setPreferableBackend(*this, backendId); } void Net::setPreferableTarget(int targetId) diff --git a/modules/dnn/src/net_impl.cpp b/modules/dnn/src/net_impl.cpp index 24fb31f03e..3839cba329 100644 --- a/modules/dnn/src/net_impl.cpp +++ b/modules/dnn/src/net_impl.cpp @@ -30,6 +30,12 @@ std::string detail::NetImplBase::getDumpFileNameBase() const } +Net::Impl::~Impl() +{ + // nothing +} + + Net::Impl::Impl() { // allocate fake net input layer @@ -46,9 +52,8 @@ Net::Impl::Impl() netWasQuantized = false; fusion = true; isAsync = false; - preferableBackend = DNN_BACKEND_DEFAULT; + preferableBackend = (Backend)getParam_DNN_BACKEND_DEFAULT(); preferableTarget = DNN_TARGET_CPU; - skipInfEngineInit = false; hasDynamicShapes = false; } @@ -86,22 +91,10 @@ void Net::Impl::clear() } -void Net::Impl::setUpNet(const std::vector& blobsToKeep_) +void Net::Impl::validateBackendAndTarget() { CV_TRACE_FUNCTION(); - if (dumpLevel && networkDumpCounter == 0) - { - dumpNetworkToFile(); - } - - if (preferableBackend == DNN_BACKEND_DEFAULT) - preferableBackend = (Backend)getParam_DNN_BACKEND_DEFAULT(); -#ifdef HAVE_INF_ENGINE - if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE) - preferableBackend = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH; // = getInferenceEngineBackendTypeParam(); -#endif - CV_Assert(preferableBackend != DNN_BACKEND_OPENCV || preferableTarget == DNN_TARGET_CPU || preferableTarget == DNN_TARGET_OPENCL || @@ -109,19 +102,6 @@ void Net::Impl::setUpNet(const std::vector& blobsToKeep_) CV_Assert(preferableBackend != DNN_BACKEND_HALIDE || preferableTarget == DNN_TARGET_CPU || preferableTarget == DNN_TARGET_OPENCL); -#ifdef HAVE_INF_ENGINE - if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) - { - CV_Assert( - (preferableTarget == DNN_TARGET_CPU && (!isArmComputePlugin() || preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)) || - preferableTarget == DNN_TARGET_OPENCL || - preferableTarget == DNN_TARGET_OPENCL_FP16 || - preferableTarget == DNN_TARGET_MYRIAD || - preferableTarget == DNN_TARGET_HDDL || - preferableTarget == DNN_TARGET_FPGA - ); - } -#endif #ifdef HAVE_WEBNN if (preferableBackend == DNN_BACKEND_WEBNN) { @@ -136,6 +116,20 @@ void Net::Impl::setUpNet(const std::vector& blobsToKeep_) CV_Assert(preferableBackend != DNN_BACKEND_TIMVX || preferableTarget == DNN_TARGET_NPU); + CV_Assert(preferableBackend != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && "Inheritance internal error"); +} + +void Net::Impl::setUpNet(const std::vector& blobsToKeep_) +{ + CV_TRACE_FUNCTION(); + + if (dumpLevel && networkDumpCounter == 0) + { + dumpNetworkToFile(); + } + + validateBackendAndTarget(); + if (!netWasAllocated || this->blobsToKeep != blobsToKeep_) { if (preferableBackend == DNN_BACKEND_OPENCV && IS_DNN_OPENCL_TARGET(preferableTarget)) @@ -228,14 +222,14 @@ void Net::Impl::setUpNet(const std::vector& blobsToKeep_) Ptr Net::Impl::getLayer(int layerId) const { LayerData& ld = getLayerData(layerId); - return ld.getLayerInstance(); + return getLayerInstance(ld); } Ptr Net::Impl::getLayer(const LayerId& layerId) const { LayerData& ld = getLayerData(layerId); - return ld.getLayerInstance(); + return getLayerInstance(ld); } @@ -327,7 +321,7 @@ int Net::Impl::resolvePinOutputName(LayerData& ld, const String& outName) const { if (outName.empty()) return 0; - return ld.getLayerInstance()->outputNameToIndex(outName); + return getLayerInstance(ld)->outputNameToIndex(outName); } @@ -528,7 +522,7 @@ void Net::Impl::allocateLayer(int lid, const LayersShapesMap& layersShapes) for (int i = 0; i < ld.internalBlobsWrappers.size(); ++i) ld.internalBlobsWrappers[i] = wrap(ld.internals[i]); - Ptr layerPtr = ld.getLayerInstance(); + Ptr layerPtr = getLayerInstance(ld); { std::vector inps(ld.inputBlobs.size()); for (int i = 0; i < ld.inputBlobs.size(); ++i) @@ -813,12 +807,10 @@ void Net::Impl::forwardLayer(LayerData& ld) { forwardHalide(ld.outputBlobsWrappers, node); } -#ifdef HAVE_INF_ENGINE else if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) { - forwardNgraph(ld.outputBlobsWrappers, node, isAsync); + CV_Assert(preferableBackend != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && "Inheritance internal error"); } -#endif else if (preferableBackend == DNN_BACKEND_WEBNN) { forwardWebnn(ld.outputBlobsWrappers, node, isAsync); @@ -844,7 +836,7 @@ void Net::Impl::forwardLayer(LayerData& ld) #endif else { - CV_Error(Error::StsNotImplemented, "Unknown backend identifier"); + CV_Error(Error::StsNotImplemented, cv::format("Unknown backend identifier: %d", preferableBackend)); } } @@ -1156,7 +1148,7 @@ void Net::Impl::getLayerShapesRecursively(int id, LayersShapesMap& inOutShapes) ShapesVec& os = layerShapes.out; ShapesVec& ints = layerShapes.internal; int requiredOutputs = layerData.requiredOutputs.size(); - Ptr l = layerData.getLayerInstance(); + const Ptr& l = getLayerInstance(layerData); CV_Assert(l); bool layerSupportInPlace = false; try @@ -1310,7 +1302,7 @@ void Net::Impl::updateLayersShapes() const MatShape& shape = layersShapes[inputLayerId].out[inputPin.oid]; layerShapes.in.push_back(shape); } - layerData.getLayerInstance()->updateMemoryShapes(layerShapes.in); + getLayerInstance(layerData)->updateMemoryShapes(layerShapes.in); } CV_LOG_DEBUG(NULL, "Layer " << layerId << ": " << toString(layerShapes.in, "input shapes")); CV_LOG_IF_DEBUG(NULL, !layerShapes.out.empty(), "Layer " << layerId << ": " << toString(layerShapes.out, "output shapes")); @@ -1369,30 +1361,7 @@ Mat Net::Impl::getBlob(String outputName) const AsyncArray Net::Impl::getBlobAsync(const LayerPin& pin) { CV_TRACE_FUNCTION(); -#ifdef HAVE_INF_ENGINE - if (!pin.valid()) - CV_Error(Error::StsObjectNotFound, "Requested blob not found"); - - LayerData& ld = layers[pin.lid]; - if ((size_t)pin.oid >= ld.outputBlobs.size()) - { - CV_Error(Error::StsOutOfRange, format("Layer \"%s\" produce only %d outputs, " - "the #%d was requested", - ld.name.c_str(), (int)ld.outputBlobs.size(), (int)pin.oid)); - } - if (preferableTarget != DNN_TARGET_CPU) - { - CV_Assert(!ld.outputBlobsWrappers.empty() && !ld.outputBlobsWrappers[pin.oid].empty()); - // Transfer data to CPU if it's require. - ld.outputBlobsWrappers[pin.oid]->copyToHost(); - } - CV_Assert(preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH); - - Ptr wrapper = ld.outputBlobsWrappers[pin.oid].dynamicCast(); - return std::move(wrapper->futureMat); -#else CV_Error(Error::StsNotImplemented, "DNN: OpenVINO/nGraph backend is required"); -#endif // HAVE_INF_ENGINE } @@ -1482,7 +1451,7 @@ void Net::Impl::setInput(InputArray blob, const String& name, double scalefactor Mat Net::Impl::getParam(int layer, int numParam) const { LayerData& ld = getLayerData(layer); - std::vector& layerBlobs = ld.getLayerInstance()->blobs; + std::vector& layerBlobs = getLayerInstance(ld)->blobs; CV_Assert(numParam < (int)layerBlobs.size()); return layerBlobs[numParam]; } @@ -1491,7 +1460,8 @@ void Net::Impl::setParam(int layer, int numParam, const Mat& blob) { LayerData& ld = getLayerData(layer); - std::vector& layerBlobs = ld.getLayerInstance()->blobs; + // FIXIT we should not modify "execution" instance + std::vector& layerBlobs = getLayerInstance(ld)->blobs; CV_Assert(numParam < (int)layerBlobs.size()); // we don't make strong checks, use this function carefully layerBlobs[numParam] = blob; @@ -1958,7 +1928,7 @@ int64 Net::Impl::getFLOPS(const std::vector& netInputShapes) /*const*/ for (int i = 0; i < ids.size(); i++) { - flops += layers[ids[i]].getLayerInstance()->getFLOPS(inShapes[i], outShapes[i]); + flops += getLayerInstance(layers[ids[i]])->getFLOPS(inShapes[i], outShapes[i]); } return flops; @@ -1975,7 +1945,7 @@ int64 Net::Impl::getFLOPS( LayerShapes shapes; getLayerShapes(netInputShapes, layerId, shapes); - return const_cast(layer->second).getLayerInstance()->getFLOPS(shapes.in, shapes.out); + return getLayerInstance(const_cast(layer->second))->getFLOPS(shapes.in, shapes.out); } diff --git a/modules/dnn/src/net_impl.hpp b/modules/dnn/src/net_impl.hpp index 5f0563d3c3..290ce50c13 100644 --- a/modules/dnn/src/net_impl.hpp +++ b/modules/dnn/src/net_impl.hpp @@ -38,7 +38,12 @@ struct Net::Impl : public detail::NetImplBase typedef std::map LayersShapesMap; typedef std::map MapIdToLayerData; + virtual ~Impl(); Impl(); + Impl(const Impl&) = delete; + + // Inheritance support + Ptr basePtr_; Ptr netInputLayer; std::vector blobsToKeep; @@ -49,7 +54,6 @@ struct Net::Impl : public detail::NetImplBase int preferableBackend; int preferableTarget; String halideConfigFile; - bool skipInfEngineInit; bool hasDynamicShapes; // Map host data to backend specific wrapper. std::map> backendWrappers; @@ -59,23 +63,52 @@ struct Net::Impl : public detail::NetImplBase bool netWasAllocated; bool netWasQuantized; bool fusion; - bool isAsync; + bool isAsync; // FIXIT: drop std::vector layersTimings; - bool empty() const; - void setPreferableBackend(int backendId); - void setPreferableTarget(int targetId); + virtual bool empty() const; + virtual void setPreferableBackend(Net& net, int backendId); + virtual void setPreferableTarget(int targetId); // FIXIT use inheritance - Ptr wrap(Mat& host); + virtual Ptr wrap(Mat& host); - void clear(); + virtual void clear(); + + + virtual void validateBackendAndTarget(); void setUpNet(const std::vector& blobsToKeep_ = std::vector()); + virtual Ptr createLayerInstance(const LayerData& ld) const + { + return LayerFactory::createLayerInstance(ld.type, const_cast(ld.params)); + } + Ptr getLayerInstance(LayerData& ld) const + { + CV_TRACE_FUNCTION(); + CV_TRACE_ARG_VALUE(type, "type", ld.type.c_str()); + + if (ld.layerInstance) + return ld.layerInstance; + + ld.layerInstance = createLayerInstance(ld); + if (!ld.layerInstance && basePtr_) + { + ld.layerInstance = basePtr_->createLayerInstance(ld); + CV_LOG_IF_DEBUG(NULL, ld.layerInstance, "Created layer \"" + ld.name + "\" of type \"" + ld.type + "\" from upstream layers registry"); + } + if (!ld.layerInstance) + { + CV_Error(Error::StsError, "Can't create layer \"" + ld.name + "\" of type \"" + ld.type + "\""); + } + + return ld.layerInstance; + } + Ptr getLayer(int layerId) const; Ptr getLayer(const LayerId& layerId) const; @@ -118,7 +151,7 @@ struct Net::Impl : public detail::NetImplBase void setInputsNames(const std::vector& inputBlobNames); void setInputShape(const String& inputName, const MatShape& shape); - void setInput(InputArray blob, const String& name, double scalefactor, const Scalar& mean); + virtual void setInput(InputArray blob, const String& name, double scalefactor, const Scalar& mean); Mat getParam(int layer, int numParam) const; void setParam(int layer, int numParam, const Mat& blob); std::vector> getLayerInputs(int layerId) const; @@ -130,8 +163,7 @@ struct Net::Impl : public detail::NetImplBase int getLayersCount(const String& layerType) const; - // FIXIT use inheritance - void initBackend(const std::vector& blobsToKeep_); + virtual void initBackend(const std::vector& blobsToKeep_); void setHalideScheduler(const String& scheduler); #ifdef HAVE_HALIDE @@ -139,11 +171,6 @@ struct Net::Impl : public detail::NetImplBase void initHalideBackend(); #endif -#ifdef HAVE_DNN_NGRAPH - void addNgraphOutputs(LayerData& ld); - void initNgraphBackend(const std::vector& blobsToKeep_); -#endif - #ifdef HAVE_WEBNN void addWebnnOutputs(LayerData& ld); void initWebnnBackend(const std::vector& blobsToKeep_); @@ -183,11 +210,11 @@ struct Net::Impl : public detail::NetImplBase // TODO add getter void enableFusion(bool fusion_); - void fuseLayers(const std::vector& blobsToKeep_); + virtual void fuseLayers(const std::vector& blobsToKeep_); void allocateLayers(const std::vector& blobsToKeep_); - void forwardLayer(LayerData& ld); + virtual void forwardLayer(LayerData& ld); void forwardToLayer(LayerData& ld, bool clearFlags = true); @@ -243,22 +270,17 @@ struct Net::Impl : public detail::NetImplBase Mat getBlob(String outputName) const; #ifdef CV_CXX11 - AsyncArray getBlobAsync(const LayerPin& pin); + virtual AsyncArray getBlobAsync(const LayerPin& pin); AsyncArray getBlobAsync(String outputName); #endif // CV_CXX11 -#ifdef HAVE_INF_ENGINE - static - Net createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNet); -#endif - string dump(bool forceAllocation = false) const; void dumpNetworkToFile() const; // FIXIT drop from inference API - Net quantize(InputArrayOfArrays calibData, int inputsDtype, int outputsDtype, bool perChannel) /*const*/; + Net quantize(Net& net, InputArrayOfArrays calibData, int inputsDtype, int outputsDtype, bool perChannel) /*const*/; void getInputDetails(std::vector& scales, std::vector& zeropoints) /*const*/; void getOutputDetails(std::vector& scales, std::vector& zeropoints) /*const*/; diff --git a/modules/dnn/src/net_impl_backend.cpp b/modules/dnn/src/net_impl_backend.cpp index e26126d86c..1d313c70c4 100644 --- a/modules/dnn/src/net_impl_backend.cpp +++ b/modules/dnn/src/net_impl_backend.cpp @@ -7,6 +7,9 @@ #include "net_impl.hpp" #include "legacy_backend.hpp" +#include "backend.hpp" +#include "factory.hpp" + namespace cv { namespace dnn { CV__DNN_INLINE_NS_BEGIN @@ -109,11 +112,7 @@ void Net::Impl::initBackend(const std::vector& blobsToKeep_) } else if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) { -#ifdef HAVE_DNN_NGRAPH - initNgraphBackend(blobsToKeep_); -#else - CV_Error(Error::StsNotImplemented, "This OpenCV version is built without support of OpenVINO"); -#endif + CV_Assert(0 && "Inheritance must be used with OpenVINO backend"); } else if (preferableBackend == DNN_BACKEND_WEBNN) { @@ -154,26 +153,38 @@ void Net::Impl::initBackend(const std::vector& blobsToKeep_) } -void Net::Impl::setPreferableBackend(int backendId) +void Net::Impl::setPreferableBackend(Net& net, int backendId) { if (backendId == DNN_BACKEND_DEFAULT) backendId = (Backend)getParam_DNN_BACKEND_DEFAULT(); + if (backendId == DNN_BACKEND_INFERENCE_ENGINE) + backendId = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH; // = getInferenceEngineBackendTypeParam(); + if (netWasQuantized && backendId != DNN_BACKEND_OPENCV && backendId != DNN_BACKEND_TIMVX) { CV_LOG_WARNING(NULL, "DNN: Only default and TIMVX backends support quantized networks"); backendId = DNN_BACKEND_OPENCV; } -#ifdef HAVE_INF_ENGINE - if (backendId == DNN_BACKEND_INFERENCE_ENGINE) - backendId = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH; -#endif - if (preferableBackend != backendId) { - preferableBackend = backendId; clear(); + if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) + { +#if defined(HAVE_INF_ENGINE) + switchToOpenVINOBackend(net); +#elif defined(ENABLE_PLUGINS) + auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino"); + networkBackend.switchBackend(net); +#else + CV_Error(Error::StsNotImplemented, "OpenVINO backend is not available in the current OpenCV build"); +#endif + } + else + { + preferableBackend = backendId; + } } } diff --git a/modules/dnn/src/net_impl_fuse.cpp b/modules/dnn/src/net_impl_fuse.cpp index deb356f7b9..79365d0411 100644 --- a/modules/dnn/src/net_impl_fuse.cpp +++ b/modules/dnn/src/net_impl_fuse.cpp @@ -634,7 +634,7 @@ void Net::Impl::fuseLayers(const std::vector& blobsToKeep_) pin = inp_i_data->inputBlobsId[0]; inp_i_data = &layers[pin.lid]; } - conv_layer = conv_layer && (inp_i_data->getLayerInstance()->type == "Convolution"); + conv_layer = conv_layer && (getLayerInstance(*inp_i_data)->type == "Convolution"); } if (!conv_layer) continue; diff --git a/modules/dnn/src/net_openvino.cpp b/modules/dnn/src/net_openvino.cpp index a546b0237d..d55c26a0de 100644 --- a/modules/dnn/src/net_openvino.cpp +++ b/modules/dnn/src/net_openvino.cpp @@ -11,17 +11,217 @@ #include "net_impl.hpp" +#include "backend.hpp" +#include "factory.hpp" + namespace cv { namespace dnn { CV__DNN_INLINE_NS_BEGIN #ifdef HAVE_INF_ENGINE +// TODO: use "string" target specifier +class NetImplOpenVINO CV_FINAL : public Net::Impl +{ +public: + typedef Net::Impl Base; + + // this default constructor is used with OpenVINO native loader + // TODO: dedicated Impl? + NetImplOpenVINO() + : Net::Impl() + { + preferableBackend = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH; + } + + // constructor to derive execution implementation from the loaded network + explicit NetImplOpenVINO(const Ptr& basePtr) + : Net::Impl() + { + basePtr_ = basePtr; + init(); + } + + void init() + { + CV_TRACE_FUNCTION(); + CV_Assert(basePtr_); + Net::Impl& base = *basePtr_; + CV_Assert(!base.netWasAllocated); + CV_Assert(!base.netWasQuantized); + netInputLayer = base.netInputLayer; + blobsToKeep = base.blobsToKeep; + layers = base.layers; + for (MapIdToLayerData::iterator it = layers.begin(); it != layers.end(); it++) + { + LayerData& ld = it->second; + ld.resetAllocation(); + } + layerNameToId = base.layerNameToId; + outputNameToId = base.outputNameToId; + //blobManager = base.blobManager; + preferableBackend = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH; //base.preferableBackend; + preferableTarget = base.preferableTarget; + hasDynamicShapes = base.hasDynamicShapes; + CV_Assert(base.backendWrappers.empty()); //backendWrappers = base.backendWrappers; + lastLayerId = base.lastLayerId; + netWasAllocated = base.netWasAllocated; + netWasQuantized = base.netWasQuantized; + fusion = base.fusion; + } + + + //bool isAsync; // FIXIT: drop + + + bool empty() const override + { + return Base::empty(); + } + void setPreferableBackend(Net& net, int backendId) override + { + if (backendId == DNN_BACKEND_INFERENCE_ENGINE || backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) + return; // no-op + if (!basePtr_) + CV_Error(Error::StsError, "DNN: Can't switch backend of network created by OpenVINO native loader"); + Ptr& impl_ptr_ref = accessor::DnnNetAccessor::getImplPtrRef(net); + impl_ptr_ref = basePtr_; + basePtr_->setPreferableBackend(net, backendId); + } + + void setPreferableTarget(int targetId) override + { + if (preferableTarget != targetId) + { + preferableTarget = targetId; + clear(); + } + } + + Ptr wrap(Mat& host) override + { + return Ptr(new NgraphBackendWrapper(preferableTarget, host)); + } + + + void clear() override + { + Base::clear(); + } + + void validateBackendAndTarget() override + { + CV_TRACE_FUNCTION(); + + CV_Assert(preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH); + CV_Check((int)preferableTarget, + preferableTarget == DNN_TARGET_CPU || + preferableTarget == DNN_TARGET_OPENCL || + preferableTarget == DNN_TARGET_OPENCL_FP16 || + preferableTarget == DNN_TARGET_MYRIAD || + preferableTarget == DNN_TARGET_HDDL || + preferableTarget == DNN_TARGET_FPGA, + "Unknown OpenVINO target" + ); + } + + Ptr createLayerInstance(const LayerData& ld) const override + { + // try to create layer instance from backend-specific pool (e.g., plugin) + Ptr instance = LayerFactory::createLayerInstance(ld.type, const_cast(ld.params)); + if (!instance) + instance = Base::createLayerInstance(ld); + return instance; + } + + void addNgraphOutputs(LayerData& ld); + + void initBackend(const std::vector& blobsToKeep_) override; + + void fuseLayers(const std::vector& blobsToKeep_) override; + + void forwardLayer(LayerData& ld) override; + + AsyncArray getBlobAsync(const LayerPin& pin) override; + + //string dump(bool forceAllocation = false) const override; + + static + Net createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNet); + +}; // NetImplOpenVINO + + +void NetImplOpenVINO::forwardLayer(LayerData& ld) +{ + CV_TRACE_FUNCTION(); + + Ptr layer = ld.layerInstance; + + if (!ld.skip) + { + auto it = ld.backendNodes.find(preferableBackend); + if (ld.id == 0 || // input layer + it == ld.backendNodes.end() // non-supported layer or its mode + ) + { + return Base::forwardLayer(ld); + } + + CV_Assert(it != ld.backendNodes.end()); + const Ptr& node = it->second; + CV_Assert(!node.empty()); + Ptr ieNode = node.dynamicCast(); + CV_Assert(!ieNode.empty()); + CV_Assert(ieNode->net); + + TickMeter tm; + tm.start(); + + ieNode->net->forward(ld.outputBlobsWrappers, isAsync); + + tm.stop(); + int64 t = tm.getTimeTicks(); + layersTimings[ld.id] = (t > 0) ? t : 1; // zero for skipped layers only + } + else + { + layersTimings[ld.id] = 0; + } + + ld.flag = 1; +} + +AsyncArray NetImplOpenVINO::getBlobAsync(const LayerPin& pin) +{ + CV_TRACE_FUNCTION(); + if (!pin.valid()) + CV_Error(Error::StsObjectNotFound, "Requested blob not found"); + + LayerData& ld = layers[pin.lid]; + if ((size_t)pin.oid >= ld.outputBlobs.size()) + { + CV_Error(Error::StsOutOfRange, format("Layer \"%s\" produce only %d outputs, " + "the #%d was requested", + ld.name.c_str(), (int)ld.outputBlobs.size(), (int)pin.oid)); + } + if (preferableTarget != DNN_TARGET_CPU) + { + CV_Assert(!ld.outputBlobsWrappers.empty() && !ld.outputBlobsWrappers[pin.oid].empty()); + // Transfer data to CPU if it's require. + ld.outputBlobsWrappers[pin.oid]->copyToHost(); + } + CV_Assert(preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH); + + Ptr wrapper = ld.outputBlobsWrappers[pin.oid].dynamicCast(); + return std::move(wrapper->futureMat); +} + /** mark input pins as outputs from other subnetworks * FIXIT must be done by DNN engine not ngraph. */ -void Net::Impl::addNgraphOutputs(LayerData& ld) +void NetImplOpenVINO::addNgraphOutputs(LayerData& ld) { CV_TRACE_FUNCTION(); @@ -59,7 +259,7 @@ void Net::Impl::addNgraphOutputs(LayerData& ld) } } -void Net::Impl::initNgraphBackend(const std::vector& blobsToKeep_) +void NetImplOpenVINO::initBackend(const std::vector& blobsToKeep_) { CV_TRACE_FUNCTION(); CV_CheckEQ(preferableBackend, DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, ""); @@ -92,7 +292,7 @@ void Net::Impl::initNgraphBackend(const std::vector& blobsToKeep_) } } - if (skipInfEngineInit) + if (!basePtr_) // model is loaded by OpenVINO { Ptr node = layers[lastLayerId].backendNodes[preferableBackend]; CV_Assert(!node.empty()); @@ -399,10 +599,104 @@ void Net::Impl::initNgraphBackend(const std::vector& blobsToKeep_) } } -//} // Net::Impl + +#if 0 +#define printf_(args) printf args +#else +#define printf_(args) +#endif + +void NetImplOpenVINO::fuseLayers(const std::vector& blobsToKeep_) +{ + CV_TRACE_FUNCTION(); + + if(!fusion) + return; + + CV_Check((int)preferableBackend, preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, ""); + +#if 0 // FIXIT mode without fusion is broken due to unsupported layers and handling of "custom" nodes + return; +#endif + + // scan through all the layers. If there is convolution layer followed by the activation layer, + // we try to embed this activation into the convolution and disable separate execution of the activation + + // FIXIT replace by layersToKeep to avoid hacks like "LayerPin(lid, 0)" + std::set pinsToKeep(blobsToKeep_.begin(), + blobsToKeep_.end()); + for (MapIdToLayerData::const_iterator it = layers.begin(); it != layers.end(); it++) + { + int lid = it->first; + LayerData& ld = layers[lid]; + if (ld.skip) + { + printf_(("skipped %s: %s\n", ld.layerInstance->name.c_str(), ld.layerInstance->type.c_str())); + continue; + } + printf_(("analyzing %s: %s\n", ld.layerInstance->name.c_str(), ld.layerInstance->type.c_str())); + + // the optimization #1. try to fuse batch norm, scaling and/or activation layers + // with the current layer if they follow it. Normally, the are fused with the convolution layer, + // but some of them (like activation) may be fused with fully-connected, elemwise (+) and + // some other layers. + Ptr& currLayer = ld.layerInstance; + if (ld.consumers.size() == 1 && pinsToKeep.count(LayerPin(lid, 0)) == 0) + { + LayerData* nextData = &layers[ld.consumers[0].lid]; + LayerPin lpNext(ld.consumers[0].lid, 0); + while (nextData) + { + if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && pinsToKeep.count(lpNext) != 0) + { + CV_LOG_DEBUG(NULL, "DNN/IE: skip fusing with 'output' node: " << nextData->name << "@" << nextData->type); + break; + } + + /* we use `tryFuse` member of convolution layer to fuse eltwise later + * it's not intended to be fused here; hence, we stop when we encounter eltwise + */ + Ptr nextLayer = nextData->layerInstance; + if (currLayer->tryFuse(nextLayer)) + { + printf_(("\tfused with %s\n", nextLayer->name.c_str())); + nextData->skip = true; + ld.outputBlobs = layers[lpNext.lid].outputBlobs; + ld.outputBlobsWrappers = layers[lpNext.lid].outputBlobsWrappers; + if (nextData->consumers.size() == 1) + { + int nextLayerId = nextData->consumers[0].lid; + nextData = &layers[nextLayerId]; + lpNext = LayerPin(nextLayerId, 0); + } + else + { + nextData = 0; + break; + } + } + else + break; + } + } + } +} + + + +void switchToOpenVINOBackend(Net& net) +{ + CV_TRACE_FUNCTION(); + Ptr& impl_ptr_ref = accessor::DnnNetAccessor::getImplPtrRef(net); + CV_Assert(impl_ptr_ref); + CV_LOG_INFO(NULL, "DNN: switching to OpenVINO backend... (networkID=" << impl_ptr_ref->networkId << ")"); + Ptr openvino_impl_ptr = makePtr(impl_ptr_ref); + impl_ptr_ref = openvino_impl_ptr; +} + /*static*/ -Net Net::Impl::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNet) +Net NetImplOpenVINO::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNet) { CV_TRACE_FUNCTION(); @@ -418,6 +712,10 @@ Net Net::Impl::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNe } Net cvNet; + Ptr openvino_impl_ptr = makePtr(); + NetImplOpenVINO& openvino_impl = *openvino_impl_ptr; + accessor::DnnNetAccessor::getImplPtrRef(cvNet) = openvino_impl_ptr; + cvNet.setInputsNames(inputsNames); // set empty input to determine input shapes @@ -432,7 +730,7 @@ Net Net::Impl::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNe { auto fake_node = std::make_shared(ngraph::element::f32, ngraph::Shape {}); Ptr backendNodeNGraph(new InfEngineNgraphNode(fake_node)); - backendNodeNGraph->net = Ptr(new InfEngineNgraphNet(*(cvNet.impl), ieNet)); + backendNodeNGraph->net = Ptr(new InfEngineNgraphNet(openvino_impl, ieNet)); backendNode = backendNodeNGraph; } @@ -450,7 +748,7 @@ Net Net::Impl::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNe LayerParams lp; int lid = cvNet.addLayer(it.first, "", lp); - LayerData& ld = cvNet.impl->layers[lid]; + LayerData& ld = openvino_impl.layers[lid]; { Ptr cvLayer(new NgraphBackendLayer(ieNet)); @@ -498,26 +796,72 @@ Net Net::Impl::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNe cvNet.setPreferableBackend(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH); - cvNet.impl->skipInfEngineInit = true; return cvNet; } + + +static +Net openvino_readNetwork(const String& modelPath, const String& binPath) +{ + FPDenormalsIgnoreHintScope fp_denormals_ignore_scope; + + InferenceEngine::Core& ie = getCore(""); + InferenceEngine::CNNNetwork ieNet; + try + { + ieNet = ie.ReadNetwork(modelPath, binPath); + } + catch (const std::exception& e) + { + CV_Error(Error::StsError, std::string("DNN: OpenVINO failed to read model '") + modelPath + "': " + e.what()); + } + + return NetImplOpenVINO::createNetworkFromModelOptimizer(ieNet); +} + + +static +Net openvino_readNetwork( + const uchar* bufferModelConfigPtr, size_t bufferModelConfigSize, + const uchar* bufferWeightsPtr, size_t bufferWeightsSize +) +{ + FPDenormalsIgnoreHintScope fp_denormals_ignore_scope; + + InferenceEngine::Core& ie = getCore(""); + + std::string model; model.assign((char*)bufferModelConfigPtr, bufferModelConfigSize); + + InferenceEngine::CNNNetwork ieNet; + try + { + InferenceEngine::TensorDesc tensorDesc(InferenceEngine::Precision::U8, { bufferWeightsSize }, InferenceEngine::Layout::C); + InferenceEngine::Blob::CPtr weights_blob = InferenceEngine::make_shared_blob(tensorDesc, (uint8_t*)bufferWeightsPtr, bufferWeightsSize); + + ieNet = ie.ReadNetwork(model, weights_blob); + } + catch (const std::exception& e) + { + CV_Error(Error::StsError, std::string("DNN: OpenVINO failed to read model: ") + e.what()); + } + + return NetImplOpenVINO::createNetworkFromModelOptimizer(ieNet); +} + #endif // HAVE_INF_ENGINE Net Net::readFromModelOptimizer(const String& xml, const String& bin) { CV_TRACE_FUNCTION(); -#ifndef HAVE_INF_ENGINE +#if defined(HAVE_INF_ENGINE) + return openvino_readNetwork(xml, bin); +#elif defined(ENABLE_PLUGINS) + auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino"); + return networkBackend.readNetwork(std::string(), xml, bin); +#else CV_UNUSED(xml); CV_UNUSED(bin); CV_Error(Error::StsError, "Build OpenCV with Inference Engine to enable loading models from Model Optimizer."); -#else - - FPDenormalsIgnoreHintScope fp_denormals_ignore_scope; - - InferenceEngine::Core& ie = getCore(""); - InferenceEngine::CNNNetwork ieNet = ie.ReadNetwork(xml, bin); - - return Impl::createNetworkFromModelOptimizer(ieNet); -#endif // HAVE_INF_ENGINE +#endif } Net Net::readFromModelOptimizer(const std::vector& bufferModelConfig, const std::vector& bufferWeights) @@ -535,34 +879,112 @@ Net Net::readFromModelOptimizer( ) { CV_TRACE_FUNCTION(); -#ifndef HAVE_INF_ENGINE +#if defined(HAVE_INF_ENGINE) + return openvino_readNetwork(bufferModelConfigPtr, bufferModelConfigSize, bufferWeightsPtr, bufferWeightsSize); +#elif defined(ENABLE_PLUGINS) + auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino"); + return networkBackend.readNetwork(std::string(), bufferModelConfigPtr, bufferModelConfigSize, bufferWeightsPtr, bufferWeightsSize); +#else CV_UNUSED(bufferModelConfigPtr); CV_UNUSED(bufferWeightsPtr); CV_UNUSED(bufferModelConfigSize); CV_UNUSED(bufferModelConfigSize); CV_Error(Error::StsError, "Build OpenCV with Inference Engine to enable loading models from Model Optimizer."); -#else - - FPDenormalsIgnoreHintScope fp_denormals_ignore_scope; - - InferenceEngine::Core& ie = getCore(""); - - std::string model; model.assign((char*)bufferModelConfigPtr, bufferModelConfigSize); - - InferenceEngine::CNNNetwork ieNet; - try - { - InferenceEngine::TensorDesc tensorDesc(InferenceEngine::Precision::U8, { bufferWeightsSize }, InferenceEngine::Layout::C); - InferenceEngine::Blob::CPtr weights_blob = InferenceEngine::make_shared_blob(tensorDesc, (uint8_t*)bufferWeightsPtr, bufferWeightsSize); - - ieNet = ie.ReadNetwork(model, weights_blob); - } - catch (const std::exception& e) - { - CV_Error(Error::StsError, std::string("DNN: IE failed to load model: ") + e.what()); - } - - return Impl::createNetworkFromModelOptimizer(ieNet); -#endif // HAVE_INF_ENGINE +#endif } + CV__DNN_INLINE_NS_END }} // namespace cv::dnn + + + +#ifdef BUILD_PLUGIN + +#define ABI_VERSION 0 +#define API_VERSION 0 +#include "plugin_api.hpp" + + +namespace cv { namespace dnn_backend { + +using namespace cv::dnn; + +class NetworkBackendOpenVINO : public NetworkBackend +{ +public: + void switchBackend(Net& net) CV_OVERRIDE + { + cv::dnn::switchToOpenVINOBackend(net); + } + Net readNetwork(const std::string& loaderID, const std::string& model, const std::string& config) CV_OVERRIDE + { + if (!loaderID.empty()) // only auto ("") is supported + { + CV_Error(Error::StsError, "DNN/OpenVINO: unsupported network loader ID: " + loaderID); + } + return openvino_readNetwork(model, config); + } + Net readNetwork( + const std::string& loaderID, + const uchar* bufferModelConfigPtr, size_t bufferModelConfigSize, + const uchar* bufferWeightsPtr, size_t bufferWeightsSize + ) CV_OVERRIDE + { + if (!loaderID.empty()) // only auto ("") is supported + { + CV_Error(Error::StsError, "DNN/OpenVINO: unsupported network loader ID: " + loaderID); + } + return openvino_readNetwork(bufferModelConfigPtr, bufferModelConfigSize, bufferWeightsPtr, bufferWeightsSize); + } + bool checkTarget(Target target) CV_OVERRIDE + { + return openvino::checkTarget(target); + } +}; + +static +std::shared_ptr& getInstanceNetworkBackendOpenVINO() +{ + static std::shared_ptr g_instance = std::make_shared(); + return g_instance; +} + + +}} // namespace + + +static +CvResult cv_getInstanceNetworkBackend(CV_OUT CvPluginDNNNetworkBackend* handle) CV_NOEXCEPT +{ + try + { + if (!handle) + return CV_ERROR_FAIL; + *handle = cv::dnn_backend::getInstanceNetworkBackendOpenVINO().get(); + return CV_ERROR_OK; + } + catch (...) + { + return CV_ERROR_FAIL; + } +} + +static const OpenCV_DNN_Plugin_API plugin_api = +{ + { + sizeof(OpenCV_DNN_Plugin_API), ABI_VERSION, API_VERSION, + CV_VERSION_MAJOR, CV_VERSION_MINOR, CV_VERSION_REVISION, CV_VERSION_STATUS, + "OpenVINO OpenCV DNN plugin (" CVAUX_STR(INF_ENGINE_RELEASE) ")" + }, + { + /* 1*/cv_getInstanceNetworkBackend + } +}; + +const OpenCV_DNN_Plugin_API* CV_API_CALL opencv_dnn_plugin_init_v0(int requested_abi_version, int requested_api_version, void* /*reserved=NULL*/) CV_NOEXCEPT +{ + if (requested_abi_version == ABI_VERSION && requested_api_version <= API_VERSION) + return &plugin_api; + return NULL; +} + +#endif // BUILD_PLUGIN diff --git a/modules/dnn/src/net_quantization.cpp b/modules/dnn/src/net_quantization.cpp index 8316687412..0add2d2d79 100644 --- a/modules/dnn/src/net_quantization.cpp +++ b/modules/dnn/src/net_quantization.cpp @@ -33,7 +33,7 @@ void getQuantizationParams(const Mat& src, std::vector& scales, std::vect } // FIXIT drop from inference API -Net Net::Impl::quantize(InputArrayOfArrays calibData, int inputsDtype, int outputsDtype, bool perChannel) +Net Net::Impl::quantize(Net& net, InputArrayOfArrays calibData, int inputsDtype, int outputsDtype, bool perChannel) { // Net can be quantized only once. if (netWasQuantized) @@ -47,7 +47,8 @@ Net Net::Impl::quantize(InputArrayOfArrays calibData, int inputsDtype, int outpu int prefTarget = preferableTarget; // Disable fusions and use CPU backend to quantize net - setPreferableBackend(DNN_BACKEND_OPENCV); + // FIXIT: we should not modify original network! + setPreferableBackend(net, DNN_BACKEND_OPENCV); setPreferableTarget(DNN_TARGET_CPU); enableFusion(false); @@ -163,7 +164,7 @@ Net Net::Impl::quantize(InputArrayOfArrays calibData, int inputsDtype, int outpu Net::Impl& dstNet = *(dstNet_.impl); dstNet.netWasQuantized = true; dstNet.setInputsNames(netInputLayer->outNames); - dstNet.setPreferableBackend(prefBackend); + dstNet.setPreferableBackend(dstNet_, prefBackend); dstNet.setPreferableTarget(prefTarget); dstNet.enableFusion(originalFusion); @@ -253,7 +254,7 @@ Net Net::Impl::quantize(InputArrayOfArrays calibData, int inputsDtype, int outpu } } // Restore FP32 Net's backend, target and fusion - setPreferableBackend(prefBackend); + setPreferableBackend(net, prefBackend); setPreferableTarget(prefTarget); enableFusion(originalFusion); return dstNet_; diff --git a/modules/dnn/src/op_inf_engine.cpp b/modules/dnn/src/op_inf_engine.cpp index 8a27dc2221..e237be07cf 100644 --- a/modules/dnn/src/op_inf_engine.cpp +++ b/modules/dnn/src/op_inf_engine.cpp @@ -11,7 +11,11 @@ #ifdef HAVE_INF_ENGINE #include -#endif // HAVE_INF_ENGINE +#elif defined(ENABLE_PLUGINS) +// using plugin API +#include "backend.hpp" +#include "factory.hpp" +#endif #include #include @@ -155,7 +159,6 @@ static bool detectMyriadX_(const std::string& device) } #endif // !defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT) - #endif // HAVE_INF_ENGINE @@ -281,24 +284,100 @@ bool checkTarget(Target target) #else // HAVE_INF_ENGINE + +namespace openvino { + +bool checkTarget(Target target) +{ +#if defined(ENABLE_PLUGINS) + try + { + auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino"); + return networkBackend.checkTarget(target); + } + catch (const std::exception& e) + { + CV_LOG_INFO(NULL, "DNN/OpenVINO: checkTarget failed: " << e.what()) + } +#endif + return false; +} + +} // namespace openvino + + cv::String getInferenceEngineBackendType() { +#if defined(ENABLE_PLUGINS) + try + { + auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino"); + CV_UNUSED(networkBackend); + return CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH; + } + catch (const std::exception& e) + { + CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what()) + } +#endif CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support"); } cv::String setInferenceEngineBackendType(const cv::String& newBackendType) { +#if defined(ENABLE_PLUGINS) + try + { + auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino"); + CV_UNUSED(networkBackend); + CV_Assert(newBackendType == CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH); + } + catch (const std::exception& e) + { + CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what()) + } +#endif CV_UNUSED(newBackendType); CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support"); } cv::String getInferenceEngineVPUType() { +#if defined(ENABLE_PLUGINS) + try + { + auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino"); + if (networkBackend.checkTarget(DNN_TARGET_MYRIAD)) + return CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X; // 2021.4 supports NCS2 only + CV_Error(Error::StsError, "DNN/OpenVINO: DNN_TARGET_MYRIAD is not available"); + } + catch (const std::exception& e) + { + CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what()) + } +#endif CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support"); } cv::String getInferenceEngineCPUType() { +#if defined(ENABLE_PLUGINS) + try + { + auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino"); + CV_UNUSED(networkBackend); +#if defined(__arm__) || defined(__aarch64__) || defined(_M_ARM64) + return CV_DNN_INFERENCE_ENGINE_CPU_TYPE_ARM_COMPUTE; +#else + return CV_DNN_INFERENCE_ENGINE_CPU_TYPE_X86; +#endif + } + catch (const std::exception& e) + { + CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what()) + } +#endif CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support"); } + #endif // HAVE_INF_ENGINE diff --git a/modules/dnn/src/op_inf_engine.hpp b/modules/dnn/src/op_inf_engine.hpp index 856441e71d..6ac4d955cc 100644 --- a/modules/dnn/src/op_inf_engine.hpp +++ b/modules/dnn/src/op_inf_engine.hpp @@ -60,6 +60,15 @@ namespace cv { namespace dnn { +CV__DNN_INLINE_NS_BEGIN +namespace openvino { + +// TODO: use std::string as parameter +bool checkTarget(Target target); + +} // namespace openvino +CV__DNN_INLINE_NS_END + #ifdef HAVE_INF_ENGINE Backend& getInferenceEngineBackendTypeParam(); @@ -73,12 +82,7 @@ void infEngineBlobsToMats(const std::vector& blobs, CV__DNN_INLINE_NS_BEGIN -namespace openvino { - -// TODO: use std::string as parameter -bool checkTarget(Target target); - -} // namespace openvino +void switchToOpenVINOBackend(Net& net); bool isMyriadX(); diff --git a/modules/dnn/src/plugin_api.hpp b/modules/dnn/src/plugin_api.hpp new file mode 100644 index 0000000000..83f4189df2 --- /dev/null +++ b/modules/dnn/src/plugin_api.hpp @@ -0,0 +1,72 @@ +// This file is part of OpenCV project. +// It is subject to the license terms in the LICENSE file found in the top-level directory +// of this distribution and at http://opencv.org/license.html. + +#ifndef DNN_PLUGIN_API_HPP +#define DNN_PLUGIN_API_HPP + +#include +#include + +#include "backend.hpp" + +#if !defined(BUILD_PLUGIN) + +/// increased for backward-compatible changes, e.g. add new function +/// Caller API <= Plugin API -> plugin is fully compatible +/// Caller API > Plugin API -> plugin is not fully compatible, caller should use extra checks to use plugins with older API +#define API_VERSION 0 // preview + +/// increased for incompatible changes, e.g. remove function argument +/// Caller ABI == Plugin ABI -> plugin is compatible +/// Caller ABI > Plugin ABI -> plugin is not compatible, caller should use shim code to use old ABI plugins (caller may know how lower ABI works, so it is possible) +/// Caller ABI < Plugin ABI -> plugin can't be used (plugin should provide interface with lower ABI to handle that) +#define ABI_VERSION 0 // preview + +#else // !defined(BUILD_PLUGIN) + +#if !defined(ABI_VERSION) || !defined(API_VERSION) +#error "Plugin must define ABI_VERSION and API_VERSION before including plugin_api.hpp" +#endif + +#endif // !defined(BUILD_PLUGIN) + +typedef cv::dnn_backend::NetworkBackend* CvPluginDNNNetworkBackend; + +struct OpenCV_DNN_Plugin_API_v0_0_api_entries +{ + /** @brief Get backend API instance + + @param[out] handle pointer on inference backend API handle + + @note API-CALL 1, API-Version == 0 + */ + CvResult (CV_API_CALL *getInstance)(CV_OUT CvPluginDNNNetworkBackend* handle) CV_NOEXCEPT; +}; // OpenCV_DNN_Plugin_API_v0_0_api_entries + +typedef struct OpenCV_DNN_Plugin_API_v0 +{ + OpenCV_API_Header api_header; + struct OpenCV_DNN_Plugin_API_v0_0_api_entries v0; +} OpenCV_DNN_Plugin_API_v0; + +#if ABI_VERSION == 0 && API_VERSION == 0 +typedef OpenCV_DNN_Plugin_API_v0 OpenCV_DNN_Plugin_API; +#else +#error "Not supported configuration: check ABI_VERSION/API_VERSION" +#endif + +#ifdef BUILD_PLUGIN +extern "C" { + +CV_PLUGIN_EXPORTS +const OpenCV_DNN_Plugin_API* CV_API_CALL opencv_dnn_plugin_init_v0 + (int requested_abi_version, int requested_api_version, void* reserved /*NULL*/) CV_NOEXCEPT; + +} // extern "C" +#else // BUILD_PLUGIN +typedef const OpenCV_DNN_Plugin_API* (CV_API_CALL *FN_opencv_dnn_plugin_init_t) + (int requested_abi_version, int requested_api_version, void* reserved /*NULL*/); +#endif // BUILD_PLUGIN + +#endif // DNN_PLUGIN_API_HPP diff --git a/modules/dnn/src/plugin_wrapper.impl.hpp b/modules/dnn/src/plugin_wrapper.impl.hpp new file mode 100644 index 0000000000..94b2e4d219 --- /dev/null +++ b/modules/dnn/src/plugin_wrapper.impl.hpp @@ -0,0 +1,319 @@ +// This file is part of OpenCV project. +// It is subject to the license terms in the LICENSE file found in the top-level directory +// of this distribution and at http://opencv.org/license.html. + +// +// Not a standalone header, part of backend.cpp +// + +//================================================================================================== +// Dynamic backend implementation + +#include "opencv2/core/utils/plugin_loader.private.hpp" + +namespace cv { namespace impl { + +using namespace cv::dnn_backend; + +#if OPENCV_HAVE_FILESYSTEM_SUPPORT && defined(ENABLE_PLUGINS) + +using namespace cv::plugin::impl; // plugin_loader.hpp + +class PluginDNNBackend CV_FINAL: public std::enable_shared_from_this +{ +protected: + void initPluginAPI() + { + const char* init_name = "opencv_dnn_plugin_init_v0"; + FN_opencv_dnn_plugin_init_t fn_init = reinterpret_cast(lib_->getSymbol(init_name)); + if (fn_init) + { + CV_LOG_DEBUG(NULL, "Found entry: '" << init_name << "'"); + for (int supported_api_version = API_VERSION; supported_api_version >= 0; supported_api_version--) + { + plugin_api_ = fn_init(ABI_VERSION, supported_api_version, NULL); + if (plugin_api_) + break; + } + if (!plugin_api_) + { + CV_LOG_INFO(NULL, "DNN: plugin is incompatible (can't be initialized): " << lib_->getName()); + return; + } + // NB: force strict minor version check (ABI is not preserved for now) + if (!checkCompatibility(plugin_api_->api_header, ABI_VERSION, API_VERSION, true)) + { + plugin_api_ = NULL; + return; + } + CV_LOG_INFO(NULL, "DNN: plugin is ready to use '" << plugin_api_->api_header.api_description << "'"); + } + else + { + CV_LOG_INFO(NULL, "DNN: plugin is incompatible, missing init function: '" << init_name << "', file: " << lib_->getName()); + } + } + + + bool checkCompatibility(const OpenCV_API_Header& api_header, unsigned int abi_version, unsigned int api_version, bool checkMinorOpenCVVersion) + { + if (api_header.opencv_version_major != CV_VERSION_MAJOR) + { + CV_LOG_ERROR(NULL, "DNN: wrong OpenCV major version used by plugin '" << api_header.api_description << "': " << + cv::format("%d.%d, OpenCV version is '" CV_VERSION "'", api_header.opencv_version_major, api_header.opencv_version_minor)) + return false; + } + if (!checkMinorOpenCVVersion) + { + // no checks for OpenCV minor version + } + else if (api_header.opencv_version_minor != CV_VERSION_MINOR) + { + CV_LOG_ERROR(NULL, "DNN: wrong OpenCV minor version used by plugin '" << api_header.api_description << "': " << + cv::format("%d.%d, OpenCV version is '" CV_VERSION "'", api_header.opencv_version_major, api_header.opencv_version_minor)) + return false; + } + CV_LOG_DEBUG(NULL, "DNN: initialized '" << api_header.api_description << "': built with " + << cv::format("OpenCV %d.%d (ABI/API = %d/%d)", + api_header.opencv_version_major, api_header.opencv_version_minor, + api_header.min_api_version, api_header.api_version) + << ", current OpenCV version is '" CV_VERSION "' (ABI/API = " << abi_version << "/" << api_version << ")" + ); + if (api_header.min_api_version != abi_version) // future: range can be here + { + // actually this should never happen due to checks in plugin's init() function + CV_LOG_ERROR(NULL, "DNN: plugin is not supported due to incompatible ABI = " << api_header.min_api_version); + return false; + } + if (api_header.api_version != api_version) + { + CV_LOG_INFO(NULL, "DNN: NOTE: plugin is supported, but there is API version mismath: " + << cv::format("plugin API level (%d) != OpenCV API level (%d)", api_header.api_version, api_version)); + if (api_header.api_version < api_version) + { + CV_LOG_INFO(NULL, "DNN: NOTE: some functionality may be unavailable due to lack of support by plugin implementation"); + } + } + return true; + } + +public: + std::shared_ptr lib_; + const OpenCV_DNN_Plugin_API* plugin_api_; + + PluginDNNBackend(const std::shared_ptr& lib) + : lib_(lib) + , plugin_api_(NULL) + { + initPluginAPI(); + } + + std::shared_ptr createNetworkBackend() const + { + CV_Assert(plugin_api_); + + CvPluginDNNNetworkBackend instancePtr = NULL; + + if (plugin_api_->v0.getInstance) + { + if (CV_ERROR_OK == plugin_api_->v0.getInstance(&instancePtr)) + { + CV_Assert(instancePtr); + // TODO C++20 "aliasing constructor" + return std::shared_ptr(instancePtr, [](cv::dnn_backend::NetworkBackend*){}); // empty deleter + } + } + return std::shared_ptr(); + } + +}; // class PluginDNNBackend + + +class PluginDNNBackendFactory CV_FINAL: public IDNNBackendFactory +{ +public: + std::string baseName_; + std::shared_ptr backend; + bool initialized; +public: + PluginDNNBackendFactory(const std::string& baseName) + : baseName_(baseName) + , initialized(false) + { + // nothing, plugins are loaded on demand + } + + std::shared_ptr createNetworkBackend() const CV_OVERRIDE + { + if (!initialized) + { + const_cast(this)->initBackend(); + } + if (backend) + return backend->createNetworkBackend(); + return std::shared_ptr(); + } + +protected: + void initBackend() + { + AutoLock lock(getInitializationMutex()); + try + { + if (!initialized) + loadPlugin(); + } + catch (...) + { + CV_LOG_INFO(NULL, "DNN: exception during plugin loading: " << baseName_ << ". SKIP"); + } + initialized = true; + } + void loadPlugin(); +}; + +static +std::vector getPluginCandidates(const std::string& baseName) +{ + using namespace cv::utils; + using namespace cv::utils::fs; + const std::string baseName_l = toLowerCase(baseName); + const std::string baseName_u = toUpperCase(baseName); + const FileSystemPath_t baseName_l_fs = toFileSystemPath(baseName_l); + std::vector paths; + // TODO OPENCV_PLUGIN_PATH + const std::vector paths_ = getConfigurationParameterPaths("OPENCV_DNN_PLUGIN_PATH", std::vector()); + if (paths_.size() != 0) + { + for (size_t i = 0; i < paths_.size(); i++) + { + paths.push_back(toFileSystemPath(paths_[i])); + } + } + else + { + FileSystemPath_t binaryLocation; + if (getBinLocation(binaryLocation)) + { + binaryLocation = getParent(binaryLocation); +#ifndef CV_DNN_PLUGIN_SUBDIRECTORY + paths.push_back(binaryLocation); +#else + paths.push_back(binaryLocation + toFileSystemPath("/") + toFileSystemPath(CV_DNN_PLUGIN_SUBDIRECTORY_STR)); +#endif + } + } + const std::string default_expr = libraryPrefix() + "opencv_dnn_" + baseName_l + "*" + librarySuffix(); + const std::string plugin_expr = getConfigurationParameterString((std::string("OPENCV_DNN_PLUGIN_") + baseName_u).c_str(), default_expr.c_str()); + std::vector results; +#ifdef _WIN32 + FileSystemPath_t moduleName = toFileSystemPath(libraryPrefix() + "opencv_dnn_" + baseName_l + librarySuffix()); + if (plugin_expr != default_expr) + { + moduleName = toFileSystemPath(plugin_expr); + results.push_back(moduleName); + } + for (const FileSystemPath_t& path : paths) + { + results.push_back(path + L"\\" + moduleName); + } + results.push_back(moduleName); +#else + CV_LOG_DEBUG(NULL, "DNN: " << baseName << " plugin's glob is '" << plugin_expr << "', " << paths.size() << " location(s)"); + for (const std::string& path : paths) + { + if (path.empty()) + continue; + std::vector candidates; + cv::glob(utils::fs::join(path, plugin_expr), candidates); + // Prefer candidates with higher versions + // TODO: implemented accurate versions-based comparator + std::sort(candidates.begin(), candidates.end(), std::greater()); + CV_LOG_DEBUG(NULL, " - " << path << ": " << candidates.size()); + copy(candidates.begin(), candidates.end(), back_inserter(results)); + } +#endif + CV_LOG_DEBUG(NULL, "Found " << results.size() << " plugin(s) for " << baseName); + return results; +} + +void PluginDNNBackendFactory::loadPlugin() +{ + for (const FileSystemPath_t& plugin : getPluginCandidates(baseName_)) + { + auto lib = std::make_shared(plugin); + if (!lib->isLoaded()) + { + continue; + } + try + { + auto pluginBackend = std::make_shared(lib); + if (!pluginBackend) + { + continue; + } + if (pluginBackend->plugin_api_ == NULL) + { + CV_LOG_ERROR(NULL, "DNN: no compatible plugin API for backend: " << baseName_ << " in " << toPrintablePath(plugin)); + continue; + } + // NB: we are going to use backend, so prevent automatic library unloading + lib->disableAutomaticLibraryUnloading(); + backend = pluginBackend; + return; + } + catch (...) + { + CV_LOG_WARNING(NULL, "DNN: exception during plugin initialization: " << toPrintablePath(plugin) << ". SKIP"); + } + } +} + +#endif // OPENCV_HAVE_FILESYSTEM_SUPPORT && defined(ENABLE_PLUGINS) + +} // namespace + + + +namespace dnn_backend { + + +std::shared_ptr createPluginDNNBackendFactory(const std::string& baseName) +{ +#if OPENCV_HAVE_FILESYSTEM_SUPPORT && defined(ENABLE_PLUGINS) + const std::string baseName_u = toUpperCase(baseName); + AutoLock lock(getInitializationMutex()); + static std::map> g_plugins_cache; + auto it = g_plugins_cache.find(baseName_u); + if (it == g_plugins_cache.end()) + { + auto factory = std::make_shared(baseName); + g_plugins_cache.insert(std::pair>(baseName_u, factory)); + return factory; + } + return it->second; +#else + CV_UNUSED(baseName); + return std::shared_ptr(); +#endif +} + + +cv::dnn_backend::NetworkBackend& createPluginDNNNetworkBackend(const std::string& baseName) +{ + auto factory = dnn_backend::createPluginDNNBackendFactory(baseName); + if (!factory) + { + CV_Error(Error::StsNotImplemented, cv::format("Plugin factory is not available: '%s'", baseName.c_str())); + } + auto backend = factory->createNetworkBackend(); + if (!backend) + { + CV_Error(Error::StsNotImplemented, cv::format("Backend (plugin) is not available: '%s'", baseName.c_str())); + } + return *backend; +} + + +}} // namespace diff --git a/modules/dnn/src/precomp.hpp b/modules/dnn/src/precomp.hpp index abcd3745f9..0100eb2c7f 100644 --- a/modules/dnn/src/precomp.hpp +++ b/modules/dnn/src/precomp.hpp @@ -39,8 +39,14 @@ // //M*/ -#include +#if !defined(BUILD_PLUGIN) #include "cvconfig.h" +#else +#include +#undef __OPENCV_BUILD // allow public API only +#endif + +#include #ifndef CV_OCL4DNN #define CV_OCL4DNN 0 diff --git a/modules/dnn/src/registry.cpp b/modules/dnn/src/registry.cpp index 697fca6015..56b96f4c4c 100644 --- a/modules/dnn/src/registry.cpp +++ b/modules/dnn/src/registry.cpp @@ -14,6 +14,8 @@ #include "halide_scheduler.hpp" +#include "backend.hpp" +#include "factory.hpp" namespace cv { namespace dnn { @@ -43,43 +45,46 @@ private: #endif #endif // HAVE_HALIDE + bool haveBackendOpenVINO = false; #ifdef HAVE_INF_ENGINE - if (openvino::checkTarget(DNN_TARGET_CPU)) + haveBackendOpenVINO = true; +#elif defined(ENABLE_PLUGINS) + { + auto factory = dnn_backend::createPluginDNNBackendFactory("openvino"); + if (factory) + { + auto backend = factory->createNetworkBackend(); + if (backend) + haveBackendOpenVINO = true; + } + } +#endif + + if (haveBackendOpenVINO && openvino::checkTarget(DNN_TARGET_CPU)) { -#ifdef HAVE_DNN_NGRAPH backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_CPU)); -#endif } - if (openvino::checkTarget(DNN_TARGET_MYRIAD)) + if (haveBackendOpenVINO && openvino::checkTarget(DNN_TARGET_MYRIAD)) { -#ifdef HAVE_DNN_NGRAPH backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_MYRIAD)); -#endif } - if (openvino::checkTarget(DNN_TARGET_HDDL)) + if (haveBackendOpenVINO && openvino::checkTarget(DNN_TARGET_HDDL)) { -#ifdef HAVE_DNN_NGRAPH backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_HDDL)); -#endif } #ifdef HAVE_OPENCL if (cv::ocl::useOpenCL() && ocl::Device::getDefault().isIntel()) { - if (openvino::checkTarget(DNN_TARGET_OPENCL)) + if (haveBackendOpenVINO && openvino::checkTarget(DNN_TARGET_OPENCL)) { -#ifdef HAVE_DNN_NGRAPH backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_OPENCL)); -#endif } - if (openvino::checkTarget(DNN_TARGET_OPENCL_FP16)) + if (haveBackendOpenVINO && openvino::checkTarget(DNN_TARGET_OPENCL_FP16)) { -#ifdef HAVE_DNN_NGRAPH backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_OPENCL_FP16)); -#endif } } -#endif -#endif // HAVE_INF_ENGINE +#endif // HAVE_OPENCL #ifdef HAVE_WEBNN if (haveWebnn()) @@ -132,10 +137,9 @@ std::vector getAvailableTargets(Backend be) { if (be == DNN_BACKEND_DEFAULT) be = (Backend)getParam_DNN_BACKEND_DEFAULT(); -#ifdef HAVE_INF_ENGINE + if (be == DNN_BACKEND_INFERENCE_ENGINE) be = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH; -#endif std::vector result; const BackendRegistry::BackendsList all_backends = getAvailableBackends(); diff --git a/modules/dnn/test/test_common.hpp b/modules/dnn/test/test_common.hpp index 3d616e17da..e36374bd98 100644 --- a/modules/dnn/test/test_common.hpp +++ b/modules/dnn/test/test_common.hpp @@ -130,9 +130,7 @@ void normAssertTextDetections( void readFileContent(const std::string& filename, CV_OUT std::vector& content); -#ifdef HAVE_INF_ENGINE bool validateVPUType(); -#endif testing::internal::ParamGenerator< tuple > dnnBackendsAndTargets( bool withInferenceEngine = true, diff --git a/modules/dnn/test/test_common.impl.hpp b/modules/dnn/test/test_common.impl.hpp index 35f658cc90..5fdf6c3d1e 100644 --- a/modules/dnn/test/test_common.impl.hpp +++ b/modules/dnn/test/test_common.impl.hpp @@ -254,9 +254,7 @@ testing::internal::ParamGenerator< tuple > dnnBackendsAndTarget bool withWebnn /*= false*/ ) { -#ifdef HAVE_INF_ENGINE bool withVPU = validateVPUType(); -#endif std::vector< tuple > targets; std::vector< Target > available; @@ -266,7 +264,6 @@ testing::internal::ParamGenerator< tuple > dnnBackendsAndTarget for (std::vector< Target >::const_iterator i = available.begin(); i != available.end(); ++i) targets.push_back(make_tuple(DNN_BACKEND_HALIDE, *i)); } -#ifdef HAVE_INF_ENGINE if (withInferenceEngine) { available = getAvailableTargets(DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019); @@ -288,9 +285,6 @@ testing::internal::ParamGenerator< tuple > dnnBackendsAndTarget } } -#else - CV_UNUSED(withInferenceEngine); -#endif if (withVkCom) { available = getAvailableTargets(DNN_BACKEND_VKCOM); @@ -356,7 +350,6 @@ testing::internal::ParamGenerator< tuple > dnnBackendsAndTarget #endif } -#ifdef HAVE_INF_ENGINE static std::string getTestInferenceEngineVPUType() { static std::string param_vpu_type = utils::getConfigurationParameterString("OPENCV_TEST_DNN_IE_VPU_TYPE", ""); @@ -419,7 +412,6 @@ bool validateVPUType() static bool result = validateVPUType_(); return result; } -#endif // HAVE_INF_ENGINE void initDNNTests()