diff --git a/CMakeLists.txt b/CMakeLists.txt index 663d7e01ff..ad6ef10448 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -221,10 +221,13 @@ OCV_OPTION(BUILD_ITT "Build Intel ITT from source" (NOT MI # =================================================== OCV_OPTION(WITH_1394 "Include IEEE1394 support" ON VISIBLE_IF NOT ANDROID AND NOT IOS AND NOT WINRT - VERIFY HAVE_DC1394) + VERIFY HAVE_DC1394_2) OCV_OPTION(WITH_AVFOUNDATION "Use AVFoundation for Video I/O (iOS/Mac)" ON VISIBLE_IF APPLE VERIFY HAVE_AVFOUNDATION) +OCV_OPTION(WITH_CAP_IOS "Enable iOS video capture" ON + VISIBLE_IF IOS + VERIFY HAVE_CAP_IOS) OCV_OPTION(WITH_CAROTENE "Use NVidia carotene acceleration library for ARM platform" ON VISIBLE_IF (ARM OR AARCH64) AND NOT IOS AND NOT (CMAKE_VERSION VERSION_LESS "2.8.11")) OCV_OPTION(WITH_CPUFEATURES "Use cpufeatures Android library" ON @@ -253,10 +256,7 @@ OCV_OPTION(WITH_FFMPEG "Include FFMPEG support" ON VERIFY HAVE_FFMPEG) OCV_OPTION(WITH_GSTREAMER "Include Gstreamer support" ON VISIBLE_IF NOT ANDROID AND NOT IOS AND NOT WINRT - VERIFY HAVE_GSTREAMER AND GSTREAMER_BASE_VERSION VERSION_GREATER "0.99") -OCV_OPTION(WITH_GSTREAMER_0_10 "Enable Gstreamer 0.10 support (instead of 1.x)" OFF - VISIBLE_IF TRUE - VERIFY HAVE_GSTREAMER AND GSTREAMER_BASE_VERSION VERSION_LESS "1.0") + VERIFY HAVE_GSTREAMER AND GSTREAMER_VERSION VERSION_GREATER "0.99") OCV_OPTION(WITH_GTK "Include GTK support" ON VISIBLE_IF UNIX AND NOT APPLE AND NOT ANDROID VERIFY HAVE_GTK) @@ -308,9 +308,6 @@ OCV_OPTION(WITH_GDCM "Include DICOM support" OFF OCV_OPTION(WITH_PVAPI "Include Prosilica GigE support" OFF VISIBLE_IF NOT ANDROID AND NOT IOS AND NOT WINRT VERIFY HAVE_PVAPI) -OCV_OPTION(WITH_GIGEAPI "Include Smartek GigE support" OFF - VISIBLE_IF NOT ANDROID AND NOT IOS AND NOT WINRT - VERIFY HAVE_GIGE_API) OCV_OPTION(WITH_ARAVIS "Include Aravis GigE support" OFF VISIBLE_IF NOT ANDROID AND NOT IOS AND NOT WINRT AND NOT WIN32 VERIFY HAVE_ARAVIS_API) @@ -371,9 +368,6 @@ OCV_OPTION(WITH_OPENCLAMDBLAS "Include AMD OpenCL BLAS library support" ON OCV_OPTION(WITH_DIRECTX "Include DirectX support" ON VISIBLE_IF WIN32 AND NOT WINRT VERIFY HAVE_DIRECTX) -OCV_OPTION(WITH_INTELPERC "Include Intel Perceptual Computing support" OFF - VISIBLE_IF WIN32 AND NOT WINRT - VERIFY HAVE_INTELPERC) OCV_OPTION(WITH_LIBREALSENSE "Include Intel librealsense support" OFF VISIBLE_IF NOT WITH_INTELPERC VERIFY HAVE_LIBREALSENSE) @@ -1351,7 +1345,7 @@ if(WITH_GDAL OR HAVE_GDAL) endif() if(WITH_GDCM OR HAVE_GDCM) - status(" GDCM:" HAVE_GDCM THEN "YES (ver ${GDCM_VERSION})" ELSE "NO") + status(" GDCM:" HAVE_GDCM THEN "YES (${GDCM_VERSION})" ELSE "NO") endif() if(WITH_IMGCODEC_HDR OR DEFINED HAVE_IMGCODEC_HDR) @@ -1375,11 +1369,7 @@ status("") status(" Video I/O:") if(WITH_1394 OR HAVE_DC1394_2) - if (HAVE_DC1394_2) - status(" DC1394:" "YES (ver ${ALIASOF_libdc1394-2_VERSION})") - else() - status(" DC1394:" "NO") - endif() + status(" DC1394:" HAVE_DC1394_2 THEN "YES (${DC1394_2_VERSION})" ELSE NO) endif() if(WITH_FFMPEG OR HAVE_FFMPEG) @@ -1390,79 +1380,64 @@ if(WITH_FFMPEG OR HAVE_FFMPEG) else() status(" FFMPEG:" HAVE_FFMPEG THEN YES ELSE NO) endif() - status(" avcodec:" FFMPEG_libavcodec_FOUND THEN "YES (ver ${FFMPEG_libavcodec_VERSION})" ELSE NO) - status(" avformat:" FFMPEG_libavformat_FOUND THEN "YES (ver ${FFMPEG_libavformat_VERSION})" ELSE NO) - status(" avutil:" FFMPEG_libavutil_FOUND THEN "YES (ver ${FFMPEG_libavutil_VERSION})" ELSE NO) - status(" swscale:" FFMPEG_libswscale_FOUND THEN "YES (ver ${FFMPEG_libswscale_VERSION})" ELSE NO) - status(" avresample:" FFMPEG_libavresample_FOUND THEN "YES (ver ${FFMPEG_libavresample_VERSION})" ELSE NO) + status(" avcodec:" FFMPEG_libavcodec_VERSION THEN "YES (${FFMPEG_libavcodec_VERSION})" ELSE NO) + status(" avformat:" FFMPEG_libavformat_VERSION THEN "YES (${FFMPEG_libavformat_VERSION})" ELSE NO) + status(" avutil:" FFMPEG_libavutil_VERSION THEN "YES (${FFMPEG_libavutil_VERSION})" ELSE NO) + status(" swscale:" FFMPEG_libswscale_VERSION THEN "YES (${FFMPEG_libswscale_VERSION})" ELSE NO) + status(" avresample:" FFMPEG_libavresample_VERSION THEN "YES (${FFMPEG_libavresample_VERSION})" ELSE NO) endif() if(WITH_GSTREAMER OR HAVE_GSTREAMER) - status(" GStreamer:" HAVE_GSTREAMER THEN "" ELSE NO) - if(HAVE_GSTREAMER) - status(" base:" "YES (ver ${GSTREAMER_BASE_VERSION})") - status(" video:" "YES (ver ${GSTREAMER_VIDEO_VERSION})") - status(" app:" "YES (ver ${GSTREAMER_APP_VERSION})") - status(" riff:" "YES (ver ${GSTREAMER_RIFF_VERSION})") - status(" pbutils:" "YES (ver ${GSTREAMER_PBUTILS_VERSION})") - endif(HAVE_GSTREAMER) -endif() - -if(WITH_OPENNI OR HAVE_OPENNI) - status(" OpenNI:" HAVE_OPENNI THEN "YES (ver ${OPENNI_VERSION_STRING}, build ${OPENNI_VERSION_BUILD})" ELSE NO) - status(" OpenNI PrimeSensor Modules:" HAVE_OPENNI_PRIME_SENSOR_MODULE THEN "YES (${OPENNI_PRIME_SENSOR_MODULE})" ELSE NO) + status(" GStreamer:" HAVE_GSTREAMER THEN "YES (${GSTREAMER_VERSION})" ELSE NO) endif() if(WITH_OPENNI2 OR HAVE_OPENNI2) - status(" OpenNI2:" HAVE_OPENNI2 THEN "YES (ver ${OPENNI2_VERSION_STRING}, build ${OPENNI2_VERSION_BUILD})" ELSE NO) + status(" OpenNI2:" HAVE_OPENNI2 THEN "YES (${OPENNI2_VERSION})" ELSE NO) endif() if(WITH_PVAPI OR HAVE_PVAPI) - status(" PvAPI:" HAVE_PVAPI THEN YES ELSE NO) -endif() - -if(WITH_GIGEAPI OR HAVE_GIGE_API) - status(" GigEVisionSDK:" HAVE_GIGE_API THEN YES ELSE NO) + status(" PvAPI:" HAVE_PVAPI THEN YES ELSE NO) endif() if(WITH_ARAVIS OR HAVE_ARAVIS_API) - status(" Aravis SDK:" HAVE_ARAVIS_API THEN "YES (${ARAVIS_LIBRARIES})" ELSE NO) + status(" Aravis SDK:" HAVE_ARAVIS_API THEN "YES (${ARAVIS_VERSION})" ELSE NO) endif() -if(APPLE) - status(" AVFoundation:" HAVE_AVFOUNDATION THEN YES ELSE NO) - if(WITH_QUICKTIME OR HAVE_QUICKTIME) - status(" QuickTime:" HAVE_QUICKTIME THEN YES ELSE NO) - endif() +if(WITH_AVFOUNDATION OR HAVE_AVFOUNDATION) + status(" AVFoundation:" HAVE_AVFOUNDATION THEN YES ELSE NO) endif() -if(WITH_V4L OR HAVE_CAMV4L2 OR HAVE_VIDEOIO) +if(HAVE_CAP_IOS) + status(" iOS capture:" YES) +endif() + +if(WITH_V4L OR HAVE_V4L) ocv_build_features_string(v4l_status IF HAVE_CAMV4L2 THEN "linux/videodev2.h" IF HAVE_VIDEOIO THEN "sys/videoio.h" ELSE "NO") - status(" v4l/v4l2:" "${v4l_status}") + status(" v4l/v4l2:" HAVE_V4L THEN "YES (${v4l_status})" ELSE NO) endif() if(WITH_DSHOW OR HAVE_DSHOW) - status(" DirectShow:" HAVE_DSHOW THEN YES ELSE NO) + status(" DirectShow:" HAVE_DSHOW THEN YES ELSE NO) endif() if(WITH_MSMF OR HAVE_MSMF) - status(" Media Foundation:" HAVE_MSMF THEN YES ELSE NO) - status(" DXVA:" HAVE_MSMF_DXVA THEN YES ELSE NO) + status(" Media Foundation:" HAVE_MSMF THEN YES ELSE NO) + status(" DXVA:" HAVE_MSMF_DXVA THEN YES ELSE NO) endif() if(WITH_XIMEA OR HAVE_XIMEA) - status(" XIMEA:" HAVE_XIMEA THEN YES ELSE NO) + status(" XIMEA:" HAVE_XIMEA THEN YES ELSE NO) endif() if(WITH_XINE OR HAVE_XINE) - status(" Xine:" HAVE_XINE THEN "YES (ver ${ALIASOF_libxine_VERSION})" ELSE NO) + status(" Xine:" HAVE_XINE THEN "YES (${XINE_VERSION})" ELSE NO) endif() -if(WITH_INTELPERC OR HAVE_INTELPERC) - status(" Intel PerC:" HAVE_INTELPERC THEN "YES" ELSE NO) +if(WITH_LIBREALSENSE OR HAVE_LIBREALSENSE) + status(" Intel RealSense:" HAVE_LIBREALSENSE THEN "YES (${LIBREALSENSE_VERSION})" ELSE NO) endif() if(WITH_MFX OR HAVE_MFX) diff --git a/cmake/FindGstreamerWindows.cmake b/cmake/FindGstreamerWindows.cmake deleted file mode 100644 index 055e8a6094..0000000000 --- a/cmake/FindGstreamerWindows.cmake +++ /dev/null @@ -1,139 +0,0 @@ -FILE(TO_CMAKE_PATH "$ENV{GSTREAMER_DIR}" TRY1_DIR) -FILE(TO_CMAKE_PATH "${GSTREAMER_DIR}" TRY2_DIR) -FILE(GLOB GSTREAMER_DIR ${TRY1_DIR} ${TRY2_DIR}) - -FIND_PATH(GSTREAMER_gst_INCLUDE_DIR gst/gst.h - PATHS ${GSTREAMER_DIR}/include/gstreamer-1.0 ${GSTREAMER_DIR}/include /usr/local/include/gstreamer-1.0 /usr/include/gstreamer-1.0 - ENV INCLUDE DOC "Directory containing gst/gst.h include file") - -FIND_PATH(GSTREAMER_glib_INCLUDE_DIR glib.h - PATHS ${GSTREAMER_DIR}/include/glib-2.0/ - ENV INCLUDE DOC "Directory containing glib.h include file") - -FIND_PATH(GSTREAMER_glibconfig_INCLUDE_DIR glibconfig.h - PATHS ${GSTREAMER_DIR}/lib/glib-2.0/include - ENV INCLUDE DOC "Directory containing glibconfig.h include file") - -FIND_PATH(GSTREAMER_gstconfig_INCLUDE_DIR gst/gstconfig.h - PATHS ${GSTREAMER_DIR}/lib/gstreamer-1.0/include ${GSTREAMER_DIR}/include ${GSTREAMER_DIR}/include/gstreamer-1.0 ${GSTREAMER_DIR}/lib/include /usr/local/include/gstreamer-1.0 /usr/include/gstreamer-1.0 /usr/local/lib/include/gstreamer-1.0 /usr/lib/include/gstreamer-1.0 - ENV INCLUDE DOC "Directory containing gst/gstconfig.h include file") - -FIND_LIBRARY(GSTREAMER_gstaudio_LIBRARY NAMES gstaudio libgstaudio-1.0 gstaudio-1.0 - PATHS ${GSTREMAER_DIR}/lib ${GSTREAMER_DIR}/bin ${GSTREAMER_DIR}/win32/bin ${GSTREAMER_DIR}/bin/bin C:/gstreamer/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/lib /usr/local/lib /usr/lib - ENV LIB - DOC "gstaudio library to link with" - NO_SYSTEM_ENVIRONMENT_PATH) - -FIND_LIBRARY(GSTREAMER_gstapp_LIBRARY NAMES gstapp libgstapp-1.0 gstapp-1.0 - PATHS ${GSTREAMER_DIR}/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/bin ${GSTREAMER_DIR}/bin/bin C:/gstreamer/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/lib /usr/local/lib /usr/lib - ENV LIB - DOC "gstapp library to link with" - NO_SYSTEM_ENVIRONMENT_PATH) - -FIND_LIBRARY(GSTREAMER_gstbase_LIBRARY NAMES gstbase libgstbase-1.0 gstbase-1.0 - PATHS ${GSTREAMER_DIR}/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/bin ${GSTREAMER_DIR}/bin/bin C:/gstreamer/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/lib /usr/local/lib /usr/lib - ENV LIB - DOC "gstbase library to link with" - NO_SYSTEM_ENVIRONMENT_PATH) - -FIND_LIBRARY(GLIB_gstcdda_LIBRARY NAMES gstcdda libgstcdda-1.0 gstcdda-1.0 - PATHS ${GSTREAMER_DIR}/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/bin ${GSTREAMER_DIR}/bin/bin C:/gstreamer/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/lib /usr/local/lib /usr/lib - ENV LIB - DOC "gstcdda library to link with" - NO_SYSTEM_ENVIRONMENT_PATH) - -FIND_LIBRARY(GSTREAMER_gstcontroller_LIBRARY NAMES gstcontroller libgstcontroller-1.0 gstcontroller-1.0 - PATHS ${GSTREAMER_DIR}/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/bin ${GSTREAMER_DIR}/bin/bin C:/gstreamer/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/lib /usr/local/lib /usr/lib - ENV LIB - DOC "gstcontroller library to link with" - NO_SYSTEM_ENVIRONMENT_PATH) - - -FIND_LIBRARY(GSTREAMER_gstnet_LIBRARY NAMES gstnet libgstnet-1.0 gstnet-1.0 - PATHS ${GSTREAMER_DIR}/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/bin ${GSTREAMER_DIR}/bin/bin C:/gstreamer/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/lib /usr/local/lib /usr/lib - ENV LIB - DOC "gstnet library to link with" - NO_SYSTEM_ENVIRONMENT_PATH) - -FIND_LIBRARY(GSTREAMER_gstpbutils_LIBRARY NAMES gstpbutils libgstpbutils-1.0 gstpbutils-1.0 - PATHS ${GSTREAMER_DIR}/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/bin ${GSTREAMER_DIR}/bin/bin C:/gstreamer/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/lib /usr/local/lib /usr/lib - ENV LIB - DOC "gstpbutils library to link with" - NO_SYSTEM_ENVIRONMENT_PATH) - -FIND_LIBRARY(GSTREAMER_gstreamer_LIBRARY NAMES gstreamer libgstreamer-1.0 gstreamer-1.0 - PATHS ${GSTREAMER_DIR}/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/bin ${GSTREAMER_DIR}/bin/bin C:/gstreamer/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/lib /usr/local/lib /usr/lib - ENV LIB - DOC "gstreamer library to link with" - NO_SYSTEM_ENVIRONMENT_PATH) - -FIND_LIBRARY(GSTREAMER_gstriff_LIBRARY NAMES gstriff libgstriff-1.0 gstriff-1.0 - PATHS ${GSTREAMER_DIR}/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/bin ${GSTREAMER_DIR}/bin/bin C:/gstreamer/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/lib /usr/local/lib /usr/lib - ENV LIB - DOC "gstriff library to link with" - NO_SYSTEM_ENVIRONMENT_PATH) - -FIND_LIBRARY(GSTREAMER_gstrtp_LIBRARY NAMES gstrtp libgstrtp-1.0 gstrtp-1.0 - PATHS ${GSTREAMER_DIR}/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/bin ${GSTREAMER_DIR}/bin/bin C:/gstreamer/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/lib /usr/local/lib /usr/lib - ENV LIB - DOC "gstrtp library to link with" - NO_SYSTEM_ENVIRONMENT_PATH) - -FIND_LIBRARY(GSTREAMER_gstrtsp_LIBRARY NAMES gstrtsp libgstrtsp-1.0 gstrtsp-1.0 - PATHS ${GSTREAMER_DIR}/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/bin ${GSTREAMER_DIR}/bin/bin C:/gstreamer/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/lib /usr/local/lib /usr/lib - ENV LIB - DOC "gstrtsp library to link with" - NO_SYSTEM_ENVIRONMENT_PATH) - -FIND_LIBRARY(GSTREAMER_gstsdp_LIBRARY NAMES gstsdp libgstsdp-1.0 gstsdp-1.0 - PATHS ${GSTREAMER_DIR}/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/bin ${GSTREAMER_DIR}/bin/bin C:/gstreamer/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/lib /usr/local/lib /usr/lib - ENV LIB - DOC "gstsdp library to link with" - NO_SYSTEM_ENVIRONMENT_PATH) - -FIND_LIBRARY(GSTREAMER_gsttag_LIBRARY NAMES gsttag libgsttag-1.0 gsttag-1.0 - PATHS ${GSTREAMER_DIR}/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/bin ${GSTREAMER_DIR}/bin/bin C:/gstreamer/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/lib /usr/local/lib /usr/lib - ENV LIB - DOC "gsttag library to link with" - NO_SYSTEM_ENVIRONMENT_PATH) - -FIND_LIBRARY(GSTREAMER_gstvideo_LIBRARY NAMES gstvideo libgstvideo-1.0 gstvideo-1.0 - PATHS ${GSTREAMER_DIR}/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/bin ${GSTREAMER_DIR}/bin/bin C:/gstreamer/bin ${GSTREAMER_DIR}/lib ${GSTREAMER_DIR}/win32/lib /usr/local/lib /usr/lib - ENV LIB - DOC "gstvideo library to link with" - NO_SYSTEM_ENVIRONMENT_PATH) - -FIND_LIBRARY(GLIB_LIBRARY NAMES libglib-2.0 glib-2.0 - PATHS ${GSTREAMER_DIR}/lib - ENV LIB - DOC "Glib library" - NO_SYSTEM_ENVIRONMENT_PATH) - -FIND_LIBRARY(GOBJECT_LIBRARY NAMES libobject-2.0 gobject-2.0 - PATHS ${GSTREAMER_DIR}/lib - ENV LIB - DOC "Glib library" - NO_SYSTEM_ENVIRONMENT_PATH) - -IF (GSTREAMER_gst_INCLUDE_DIR AND GSTREAMER_gstconfig_INCLUDE_DIR AND - GSTREAMER_gstaudio_LIBRARY AND GSTREAMER_gstbase_LIBRARY AND GSTREAMER_gstcontroller_LIBRARY AND GSTREAMER_gstnet_LIBRARY - AND GSTREAMER_gstpbutils_LIBRARY AND GSTREAMER_gstreamer_LIBRARY AND - GSTREAMER_gstriff_LIBRARY AND GSTREAMER_gstrtp_LIBRARY AND GSTREAMER_gstrtsp_LIBRARY AND GSTREAMER_gstsdp_LIBRARY AND - GSTREAMER_gsttag_LIBRARY AND GSTREAMER_gstvideo_LIBRARY AND GLIB_LIBRARY AND GSTREAMER_gstapp_LIBRARY AND GOBJECT_LIBRARY) - SET(GSTREAMER_INCLUDE_DIR ${GSTREAMER_gst_INCLUDE_DIR} ${GSTREAMER_gstconfig_INCLUDE_DIR} ${GSTREAMER_glib_INCLUDE_DIR} ${GSTREAMER_glibconfig_INCLUDE_DIR}) - - list(REMOVE_DUPLICATES GSTREAMER_INCLUDE_DIR) - SET(GSTREAMER_LIBRARIES ${GSTREAMER_gstaudio_LIBRARY} ${GSTREAMER_gstbase_LIBRARY} - ${GSTREAMER_gstcontroller_LIBRARY} ${GSTREAMER_gstdataprotocol_LIBRARY} ${GSTREAMER_gstinterfaces_LIBRARY} - ${GSTREAMER_gstnet_LIBRARY} ${GSTREAMER_gstpbutils_LIBRARY} - ${GSTREAMER_gstreamer_LIBRARY} ${GSTREAMER_gstriff_LIBRARY} ${GSTREAMER_gstrtp_LIBRARY} - ${GSTREAMER_gstrtsp_LIBRARY} ${GSTREAMER_gstsdp_LIBRARY} ${GSTREAMER_gsttag_LIBRARY} ${GSTREAMER_gstvideo_LIBRARY} ${GLIB_LIBRARY} - ${GSTREAMER_gstapp_LIBRARY} ${GOBJECT_LIBRARY}) - - list(REMOVE_DUPLICATES GSTREAMER_LIBRARIES) - SET(GSTREAMER_FOUND TRUE) -ENDIF (GSTREAMER_gst_INCLUDE_DIR AND GSTREAMER_gstconfig_INCLUDE_DIR AND - GSTREAMER_gstaudio_LIBRARY AND GSTREAMER_gstbase_LIBRARY AND GSTREAMER_gstcontroller_LIBRARY - AND GSTREAMER_gstnet_LIBRARY AND GSTREAMER_gstpbutils_LIBRARY AND GSTREAMER_gstreamer_LIBRARY AND - GSTREAMER_gstriff_LIBRARY AND GSTREAMER_gstrtp_LIBRARY AND GSTREAMER_gstrtsp_LIBRARY AND GSTREAMER_gstsdp_LIBRARY AND - GSTREAMER_gsttag_LIBRARY AND GSTREAMER_gstvideo_LIBRARY AND GLIB_LIBRARY AND GSTREAMER_gstapp_LIBRARY AND GOBJECT_LIBRARY) \ No newline at end of file diff --git a/cmake/OpenCVDetectMediaSDK.cmake b/cmake/OpenCVDetectMediaSDK.cmake deleted file mode 100644 index 97f898cdf3..0000000000 --- a/cmake/OpenCVDetectMediaSDK.cmake +++ /dev/null @@ -1,59 +0,0 @@ -set(HAVE_MFX 0) - -if (UNIX) - set(root "$ENV{MFX_HOME}") -elseif(WIN32) - set(root "$ENV{INTELMEDIASDKROOT}") -endif() - -# TODO: ICC? MINGW? ARM? IOS? -if(WIN32) - if(X86_64) - set(arch "x64") - else() - set(arch "win32") - endif() -elseif(UNIX) - set(arch "lin_x64") -else() - # ??? -endif() - -find_path(MFX_INCLUDE mfxdefs.h PATHS "${root}/include" NO_DEFAULT_PATH) -message(STATUS "MFX_INCLUDE: ${MFX_INCLUDE} (${root}/include)") -find_library(MFX_LIBRARY NAMES mfx PATHS "${root}/lib/${arch}" NO_DEFAULT_PATH) -if(MSVC) - if(MSVC14) - find_library(MFX_LIBRARY NAMES libmfx_vs2015.lib PATHS "${root}/lib/${arch}" NO_DEFAULT_PATH) - else() - find_library(MFX_LIBRARY NAMES libmfx.lib PATHS "${root}/lib/${arch}" NO_DEFAULT_PATH) - endif() -endif() - -if(NOT MFX_INCLUDE OR NOT MFX_LIBRARY) - return() -endif() - -set(deps) - -if (UNIX) - find_library(MFX_VA_LIBRARY va) - find_library(MFX_VA_DRM_LIBRARY va-drm) - if (NOT MFX_VA_LIBRARY OR NOT MFX_VA_DRM_LIBRARY) - return() - endif() - add_library(mfx-va UNKNOWN IMPORTED) - set_target_properties(mfx-va PROPERTIES IMPORTED_LOCATION "${MFX_VA_LIBRARY}") - add_library(mfx-va-drm UNKNOWN IMPORTED) - set_target_properties(mfx-va-drm PROPERTIES IMPORTED_LOCATION "${MFX_VA_DRM_LIBRARY}") - list(APPEND deps mfx-va mfx-va-drm "-Wl,--exclude-libs=libmfx") -endif() - -add_library(mfx UNKNOWN IMPORTED) -set_target_properties(mfx PROPERTIES - IMPORTED_LOCATION "${MFX_LIBRARY}" - INTERFACE_INCLUDE_DIRECTORIES "${MFX_INCLUDE}" - INTERFACE_LINK_LIBRARIES "${deps}" -) - -set(HAVE_MFX 1) diff --git a/cmake/OpenCVFindIntelPerCSDK.cmake b/cmake/OpenCVFindIntelPerCSDK.cmake deleted file mode 100644 index 7243105601..0000000000 --- a/cmake/OpenCVFindIntelPerCSDK.cmake +++ /dev/null @@ -1,20 +0,0 @@ -# Main variables: -# INTELPERC_LIBRARIES and INTELPERC_INCLUDE to link Intel Perceptial Computing SDK modules -# HAVE_INTELPERC for conditional compilation OpenCV with/without Intel Perceptial Computing SDK - -if(X86_64) - find_path(INTELPERC_INCLUDE_DIR "pxcsession.h" PATHS "$ENV{PCSDK_DIR}include" DOC "Path to Intel Perceptual Computing SDK interface headers") - find_file(INTELPERC_LIBRARIES "libpxc.lib" PATHS "$ENV{PCSDK_DIR}lib/x64" DOC "Path to Intel Perceptual Computing SDK interface libraries") -else() - find_path(INTELPERC_INCLUDE_DIR "pxcsession.h" PATHS "$ENV{PCSDK_DIR}include" DOC "Path to Intel Perceptual Computing SDK interface headers") - find_file(INTELPERC_LIBRARIES "libpxc.lib" PATHS "$ENV{PCSDK_DIR}lib/Win32" DOC "Path to Intel Perceptual Computing SDK interface libraries") -endif() - -if(INTELPERC_INCLUDE_DIR AND INTELPERC_LIBRARIES) - set(HAVE_INTELPERC TRUE) -else() - set(HAVE_INTELPERC FALSE) - message(WARNING "Intel Perceptual Computing SDK library directory (set by INTELPERC_LIB_DIR variable) is not found or does not have Intel Perceptual Computing SDK libraries.") -endif() #if(INTELPERC_INCLUDE_DIR AND INTELPERC_LIBRARIES) - -mark_as_advanced(FORCE INTELPERC_LIBRARIES INTELPERC_INCLUDE_DIR) \ No newline at end of file diff --git a/cmake/OpenCVFindLibRealsense.cmake b/cmake/OpenCVFindLibRealsense.cmake deleted file mode 100644 index 32cff063f3..0000000000 --- a/cmake/OpenCVFindLibRealsense.cmake +++ /dev/null @@ -1,15 +0,0 @@ -# Main variables: -# LIBREALSENSE_LIBRARIES and LIBREALSENSE_INCLUDE to link Intel librealsense modules -# HAVE_LIBREALSENSE for conditional compilation OpenCV with/without librealsense - -find_path(LIBREALSENSE_INCLUDE_DIR "librealsense2/rs.hpp" PATHS "$ENV{LIBREALSENSE_INCLUDE}" DOC "Path to librealsense interface headers") -find_library(LIBREALSENSE_LIBRARIES "realsense2" PATHS "$ENV{LIBREALSENSE_LIB}" DOC "Path to librealsense interface libraries") - -if(LIBREALSENSE_INCLUDE_DIR AND LIBREALSENSE_LIBRARIES) - set(HAVE_LIBREALSENSE TRUE) -else() - set(HAVE_LIBREALSENSE FALSE) - message( WARNING, " librealsense include directory (set by LIBREALSENSE_INCLUDE_DIR variable) is not found or does not have librealsense include files." ) -endif() #if(LIBREALSENSE_INCLUDE_DIR AND LIBREALSENSE_LIBRARIES) - -mark_as_advanced(FORCE LIBREALSENSE_LIBRARIES LIBREALSENSE_INCLUDE_DIR) \ No newline at end of file diff --git a/cmake/OpenCVFindLibsVideo.cmake b/cmake/OpenCVFindLibsVideo.cmake index 35dda449b4..5436c4cf48 100644 --- a/cmake/OpenCVFindLibsVideo.cmake +++ b/cmake/OpenCVFindLibsVideo.cmake @@ -1,297 +1,8 @@ -# ---------------------------------------------------------------------------- -# Detect 3rd-party video IO libraries -# ---------------------------------------------------------------------------- - -# --- GStreamer --- -ocv_clear_vars(HAVE_GSTREAMER) -# try to find gstreamer 1.x first if 0.10 was not requested -if(WITH_GSTREAMER AND NOT WITH_GSTREAMER_0_10) - if(WIN32) - SET(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH};${CMAKE_CURRENT_LIST_DIR}") - FIND_PACKAGE(GstreamerWindows) - IF(GSTREAMER_gstbase_LIBRARY AND GSTREAMER_gstvideo_LIBRARY AND GSTREAMER_gstapp_LIBRARY AND GSTREAMER_gstpbutils_LIBRARY AND GSTREAMER_gstriff_LIBRARY) - set(HAVE_GSTREAMER TRUE) - set(GSTREAMER_BASE_VERSION 1.0) - set(GSTREAMER_VIDEO_VERSION 1.0) - set(GSTREAMER_APP_VERSION 1.0) - set(GSTREAMER_RIFF_VERSION 1.0) - set(GSTREAMER_PBUTILS_VERSION 1.0) - ENDIF(GSTREAMER_gstbase_LIBRARY AND GSTREAMER_gstvideo_LIBRARY AND GSTREAMER_gstapp_LIBRARY AND GSTREAMER_gstpbutils_LIBRARY AND GSTREAMER_gstriff_LIBRARY) - - else(WIN32) - CHECK_MODULE(gstreamer-base-1.0 HAVE_GSTREAMER_BASE VIDEOIO) - CHECK_MODULE(gstreamer-video-1.0 HAVE_GSTREAMER_VIDEO VIDEOIO) - CHECK_MODULE(gstreamer-app-1.0 HAVE_GSTREAMER_APP VIDEOIO) - CHECK_MODULE(gstreamer-riff-1.0 HAVE_GSTREAMER_RIFF VIDEOIO) - CHECK_MODULE(gstreamer-pbutils-1.0 HAVE_GSTREAMER_PBUTILS VIDEOIO) - - if(HAVE_GSTREAMER_BASE AND HAVE_GSTREAMER_VIDEO AND HAVE_GSTREAMER_APP AND HAVE_GSTREAMER_RIFF AND HAVE_GSTREAMER_PBUTILS) - set(HAVE_GSTREAMER TRUE) - set(GSTREAMER_BASE_VERSION ${ALIASOF_gstreamer-base-1.0_VERSION}) - set(GSTREAMER_VIDEO_VERSION ${ALIASOF_gstreamer-video-1.0_VERSION}) - set(GSTREAMER_APP_VERSION ${ALIASOF_gstreamer-app-1.0_VERSION}) - set(GSTREAMER_RIFF_VERSION ${ALIASOF_gstreamer-riff-1.0_VERSION}) - set(GSTREAMER_PBUTILS_VERSION ${ALIASOF_gstreamer-pbutils-1.0_VERSION}) - endif() - endif(WIN32) -endif(WITH_GSTREAMER AND NOT WITH_GSTREAMER_0_10) - -# if gstreamer 1.x was not found, or we specified we wanted 0.10, try to find it -if(WITH_GSTREAMER AND NOT HAVE_GSTREAMER OR WITH_GSTREAMER_0_10) - CHECK_MODULE(gstreamer-base-0.10 HAVE_GSTREAMER_BASE VIDEOIO) - CHECK_MODULE(gstreamer-video-0.10 HAVE_GSTREAMER_VIDEO VIDEOIO) - CHECK_MODULE(gstreamer-app-0.10 HAVE_GSTREAMER_APP VIDEOIO) - CHECK_MODULE(gstreamer-riff-0.10 HAVE_GSTREAMER_RIFF VIDEOIO) - CHECK_MODULE(gstreamer-pbutils-0.10 HAVE_GSTREAMER_PBUTILS VIDEOIO) - - if(HAVE_GSTREAMER_BASE AND HAVE_GSTREAMER_VIDEO AND HAVE_GSTREAMER_APP AND HAVE_GSTREAMER_RIFF AND HAVE_GSTREAMER_PBUTILS) - set(HAVE_GSTREAMER TRUE) - set(GSTREAMER_BASE_VERSION ${ALIASOF_gstreamer-base-0.10_VERSION}) - set(GSTREAMER_VIDEO_VERSION ${ALIASOF_gstreamer-video-0.10_VERSION}) - set(GSTREAMER_APP_VERSION ${ALIASOF_gstreamer-app-0.10_VERSION}) - set(GSTREAMER_RIFF_VERSION ${ALIASOF_gstreamer-riff-0.10_VERSION}) - set(GSTREAMER_PBUTILS_VERSION ${ALIASOF_gstreamer-pbutils-0.10_VERSION}) - endif() -endif(WITH_GSTREAMER AND NOT HAVE_GSTREAMER OR WITH_GSTREAMER_0_10) - -# --- PvApi --- -ocv_clear_vars(HAVE_PVAPI) -if(WITH_PVAPI) - find_path(PVAPI_INCLUDE_PATH "PvApi.h" - PATHS /usr/local /opt /usr ENV ProgramFiles ENV ProgramW6432 - PATH_SUFFIXES include "Allied Vision Technologies/GigESDK/inc-pc" "AVT GigE SDK/inc-pc" "GigESDK/inc-pc" - DOC "The path to PvAPI header") - - if(PVAPI_INCLUDE_PATH) - if(X86 AND NOT WIN32) - set(PVAPI_SDK_SUBDIR x86) - elseif(X86_64) - set(PVAPI_SDK_SUBDIR x64) - elseif(ARM) - set(PVAPI_SDK_SUBDIR arm) - endif() - - get_filename_component(_PVAPI_LIBRARY_HINT "${PVAPI_INCLUDE_PATH}/../lib-pc" ABSOLUTE) - - find_library(PVAPI_LIBRARY NAMES "PvAPI" PATHS "${_PVAPI_LIBRARY_HINT}") - - if(PVAPI_LIBRARY) - if(WIN32) - if(MINGW) - set(PVAPI_DEFINITIONS "-DPVDECL=__stdcall") - endif(MINGW) - endif() - set(HAVE_PVAPI TRUE) - endif() - endif(PVAPI_INCLUDE_PATH) -endif(WITH_PVAPI) - -# --- GigEVisionSDK --- -ocv_clear_vars(HAVE_GIGE_API) -if(WITH_GIGEAPI) - find_path(GIGEAPI_INCLUDE_PATH "GigEVisionSDK.h" - PATHS /usr/local /var /opt /usr ENV ProgramFiles ENV ProgramW6432 - PATH_SUFFIXES include "Smartek Vision Technologies/GigEVisionSDK/gige_cpp" "GigEVisionSDK/gige_cpp" "GigEVisionSDK/gige_c" - DOC "The path to Smartek GigEVisionSDK header") - FIND_LIBRARY(GIGEAPI_LIBRARIES NAMES GigEVisionSDK) - if(GIGEAPI_LIBRARIES AND GIGEAPI_INCLUDE_PATH) - set(HAVE_GIGE_API TRUE) - endif() -endif(WITH_GIGEAPI) - -# --- Aravis SDK --- -ocv_clear_vars(HAVE_ARAVIS_API) -if(WITH_ARAVIS) - check_module(glib-2.0 HAVE_ARAVIS_GLIB VIDEOIO) - if(HAVE_ARAVIS_GLIB) - find_path(ARAVIS_INCLUDE_PATH "arv.h" - PATHS /usr/local /var /opt /usr ENV ProgramFiles ENV ProgramW6432 - PATH_SUFFIXES include "aravis-0.6" "aravis-0.4" - DOC "The path to Aravis SDK headers") - find_library(ARAVIS_LIBRARIES NAMES "aravis-0.6" "aravis-0.4" ) - if(ARAVIS_LIBRARIES AND ARAVIS_INCLUDE_PATH) - set(HAVE_ARAVIS_API TRUE) - endif() - else() - message("Can not build Aravis support without glib2") - endif() -endif(WITH_ARAVIS) - -# --- Dc1394 --- -ocv_clear_vars(HAVE_DC1394 HAVE_DC1394_2) -if(WITH_1394) - if(WIN32 AND MINGW) - find_path(CMU1394_INCLUDE_PATH "/1394common.h" - PATH_SUFFIXES include - DOC "The path to cmu1394 headers") - find_path(DC1394_2_INCLUDE_PATH "/dc1394/dc1394.h" - PATH_SUFFIXES include - DOC "The path to DC1394 2.x headers") - if(CMU1394_INCLUDE_PATH AND DC1394_2_INCLUDE_PATH) - set(CMU1394_LIB_DIR "${CMU1394_INCLUDE_PATH}/../lib" CACHE PATH "Full path of CMU1394 library directory") - set(DC1394_2_LIB_DIR "${DC1394_2_INCLUDE_PATH}/../lib" CACHE PATH "Full path of DC1394 2.x library directory") - if(EXISTS "${CMU1394_LIB_DIR}/lib1394camera.a" AND EXISTS "${DC1394_2_LIB_DIR}/libdc1394.a") - set(HAVE_DC1394_2 TRUE) - endif() - endif() - if(HAVE_DC1394_2) - ocv_parse_pkg("libdc1394-2" "${DC1394_2_LIB_DIR}/pkgconfig" "") - ocv_include_directories(${DC1394_2_INCLUDE_PATH}) - set(VIDEOIO_LIBRARIES ${VIDEOIO_LIBRARIES} - "${DC1394_2_LIB_DIR}/libdc1394.a" - "${CMU1394_LIB_DIR}/lib1394camera.a") - endif(HAVE_DC1394_2) - else(WIN32 AND MINGW) - CHECK_MODULE(libdc1394-2 HAVE_DC1394_2 VIDEOIO) - if(NOT HAVE_DC1394_2) - CHECK_MODULE(libdc1394 HAVE_DC1394 VIDEOIO) - endif() - endif(WIN32 AND MINGW) -endif(WITH_1394) - -# --- xine --- -ocv_clear_vars(HAVE_XINE) -if(WITH_XINE) - CHECK_MODULE(libxine HAVE_XINE VIDEOIO) -endif(WITH_XINE) - -# --- V4L --- -ocv_clear_vars(HAVE_CAMV4L2 HAVE_VIDEOIO) -if(WITH_V4L) - CHECK_INCLUDE_FILE(linux/videodev2.h HAVE_CAMV4L2) - CHECK_INCLUDE_FILE(sys/videoio.h HAVE_VIDEOIO) -endif(WITH_V4L) - -# --- OpenNI --- -ocv_clear_vars(HAVE_OPENNI HAVE_OPENNI_PRIME_SENSOR_MODULE) -if(WITH_OPENNI) - include("${OpenCV_SOURCE_DIR}/cmake/OpenCVFindOpenNI.cmake") -endif(WITH_OPENNI) - -ocv_clear_vars(HAVE_OPENNI2) -if(WITH_OPENNI2) - include("${OpenCV_SOURCE_DIR}/cmake/OpenCVFindOpenNI2.cmake") -endif(WITH_OPENNI2) - -# --- XIMEA --- -ocv_clear_vars(HAVE_XIMEA) -if(WITH_XIMEA) - include("${OpenCV_SOURCE_DIR}/cmake/OpenCVFindXimea.cmake") - if(XIMEA_FOUND) - set(HAVE_XIMEA TRUE) - endif() -endif(WITH_XIMEA) - -# --- FFMPEG --- -ocv_clear_vars(HAVE_FFMPEG) -if(WITH_FFMPEG) # try FFmpeg autodetection - if(OPENCV_FFMPEG_USE_FIND_PACKAGE) - if(OPENCV_FFMPEG_USE_FIND_PACKAGE STREQUAL "1" OR OPENCV_FFMPEG_USE_FIND_PACKAGE STREQUAL "ON") - set(OPENCV_FFMPEG_USE_FIND_PACKAGE "FFMPEG") - endif() - find_package(${OPENCV_FFMPEG_USE_FIND_PACKAGE}) # Required components: AVCODEC AVFORMAT AVUTIL SWSCALE - if(FFMPEG_FOUND OR FFmpeg_FOUND) - set(HAVE_FFMPEG TRUE) - else() - message(STATUS "Can't find FFmpeg via find_package(${OPENCV_FFMPEG_USE_FIND_PACKAGE})") - endif() - elseif(WIN32 AND NOT ARM AND NOT OPENCV_FFMPEG_SKIP_DOWNLOAD) - include("${OpenCV_SOURCE_DIR}/3rdparty/ffmpeg/ffmpeg.cmake") - download_win_ffmpeg(FFMPEG_CMAKE_SCRIPT) - if(FFMPEG_CMAKE_SCRIPT) - set(HAVE_FFMPEG TRUE) - set(HAVE_FFMPEG_WRAPPER 1) - include("${FFMPEG_CMAKE_SCRIPT}") - endif() - elseif(PKG_CONFIG_FOUND) - ocv_check_modules(FFMPEG libavcodec libavformat libavutil libswscale) - ocv_check_modules(FFMPEG_libavresample libavresample) - if(FFMPEG_libavresample_FOUND) - ocv_append_build_options(FFMPEG FFMPEG_libavresample) - endif() - else() - message(STATUS "Can't find ffmpeg - 'pkg-config' utility is missing") - endif() -endif() -if(HAVE_FFMPEG - AND NOT HAVE_FFMPEG_WRAPPER -) - try_compile(__VALID_FFMPEG - "${OpenCV_BINARY_DIR}" - "${OpenCV_SOURCE_DIR}/cmake/checks/ffmpeg_test.cpp" - CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${FFMPEG_INCLUDE_DIRS}" - "-DLINK_DIRECTORIES:STRING=${FFMPEG_LIBRARY_DIRS}" - "-DLINK_LIBRARIES:STRING=${FFMPEG_LIBRARIES}" - OUTPUT_VARIABLE TRY_OUT - ) - if(NOT __VALID_FFMPEG) - #message(FATAL_ERROR "FFMPEG: test check build log:\n${TRY_OUT}") - message(STATUS "WARNING: Can't build ffmpeg test code") - set(HAVE_FFMPEG FALSE) - else() - ocv_append_build_options(VIDEOIO FFMPEG) - endif() -endif() - -# --- VideoInput/DirectShow --- -if(WITH_DSHOW) - if(MSVC_VERSION GREATER 1499) - set(HAVE_DSHOW 1) - elseif(NOT HAVE_DSHOW) - check_include_file(DShow.h HAVE_DSHOW) - endif() -endif(WITH_DSHOW) - -# --- VideoInput/Microsoft Media Foundation --- -ocv_clear_vars(HAVE_MSMF) -if(WITH_MSMF) - check_include_file(Mfapi.h HAVE_MSMF) - set(HAVE_MSMF_DXVA "") - if(WITH_MSMF_DXVA) - check_include_file(D3D11.h D3D11_found) - check_include_file(D3d11_4.h D3D11_4_found) - if(D3D11_found AND D3D11_4_found) - set(HAVE_MSMF_DXVA YES) - endif() - endif() -endif() - # --- Extra HighGUI and VideoIO libs on Windows --- if(WIN32) list(APPEND HIGHGUI_LIBRARIES comctl32 gdi32 ole32 setupapi ws2_32) - if(MINGW64) - list(APPEND VIDEOIO_LIBRARIES avifil32 avicap32 winmm msvfw32) - elseif(MINGW) - list(APPEND VIDEOIO_LIBRARIES winmm) - endif() endif(WIN32) -if(APPLE) - if(WITH_AVFOUNDATION) - set(HAVE_AVFOUNDATION YES) - endif() -endif(APPLE) - -# --- Intel librealsense --- -if(WITH_LIBREALSENSE) - include("${OpenCV_SOURCE_DIR}/cmake/OpenCVFindLibRealsense.cmake") -endif(WITH_LIBREALSENSE) - -# --- Intel Perceptual Computing SDK --- -if(WITH_INTELPERC) - include("${OpenCV_SOURCE_DIR}/cmake/OpenCVFindIntelPerCSDK.cmake") -endif(WITH_INTELPERC) - -if(WITH_MFX) - include("${OpenCV_SOURCE_DIR}/cmake/OpenCVDetectMediaSDK.cmake") -endif() - -# --- gPhoto2 --- -ocv_clear_vars(HAVE_GPHOTO2) -if(WITH_GPHOTO2) - CHECK_MODULE(libgphoto2 HAVE_GPHOTO2 VIDEOIO) -endif(WITH_GPHOTO2) - # --- VA & VA_INTEL --- if(WITH_VA_INTEL) include("${OpenCV_SOURCE_DIR}/cmake/OpenCVFindVA_INTEL.cmake") diff --git a/cmake/OpenCVFindOpenNI.cmake b/cmake/OpenCVFindOpenNI.cmake deleted file mode 100644 index 754186894f..0000000000 --- a/cmake/OpenCVFindOpenNI.cmake +++ /dev/null @@ -1,89 +0,0 @@ -# Main variables: -# OPENNI_LIBRARY and OPENNI_INCLUDES to link OpenCV modules with OpenNI -# HAVE_OPENNI for conditional compilation OpenCV with/without OpenNI - -if(NOT "${OPENNI_LIB_DIR}" STREQUAL "${OPENNI_LIB_DIR_INTERNAL}") - unset(OPENNI_LIBRARY CACHE) - unset(OPENNI_LIB_DIR CACHE) -endif() - -if(NOT "${OPENNI_INCLUDE_DIR}" STREQUAL "${OPENNI_INCLUDE_DIR_INTERNAL}") - unset(OPENNI_INCLUDES CACHE) - unset(OPENNI_INCLUDE_DIR CACHE) -endif() - -if(NOT "${OPENNI_PRIME_SENSOR_MODULE_BIN_DIR}" STREQUAL "${OPENNI_PRIME_SENSOR_MODULE_BIN_DIR_INTERNAL}") - unset(OPENNI_PRIME_SENSOR_MODULE CACHE) - unset(OPENNI_PRIME_SENSOR_MODULE_BIN_DIR CACHE) -endif() - -if(WIN32) - if(NOT (MSVC64 OR MINGW64)) - find_file(OPENNI_INCLUDES "XnCppWrapper.h" PATHS "$ENV{OPEN_NI_INSTALL_PATH}Include" DOC "OpenNI c++ interface header") - find_library(OPENNI_LIBRARY "OpenNI" PATHS $ENV{OPEN_NI_LIB} DOC "OpenNI library") - else() - find_file(OPENNI_INCLUDES "XnCppWrapper.h" PATHS "$ENV{OPEN_NI_INSTALL_PATH64}Include" DOC "OpenNI c++ interface header") - find_library(OPENNI_LIBRARY "OpenNI64" PATHS $ENV{OPEN_NI_LIB64} DOC "OpenNI library") - endif() -elseif(UNIX OR APPLE) - find_file(OPENNI_INCLUDES "XnCppWrapper.h" PATHS "/usr/include/ni" "/usr/include/openni" DOC "OpenNI c++ interface header") - find_library(OPENNI_LIBRARY "OpenNI" PATHS "/usr/lib" DOC "OpenNI library") -endif() - -if(OPENNI_LIBRARY AND OPENNI_INCLUDES) - set(HAVE_OPENNI TRUE) - # the check: are PrimeSensor Modules for OpenNI installed - if(WIN32) - if(NOT (MSVC64 OR MINGW64)) - find_file(OPENNI_PRIME_SENSOR_MODULE "XnCore.dll" PATHS "$ENV{OPEN_NI_INSTALL_PATH}../PrimeSense/Sensor/Bin" "$ENV{OPEN_NI_INSTALL_PATH}../PrimeSense/SensorKinect/Bin" DOC "Core library of PrimeSensor Modules for OpenNI") - else() - find_file(OPENNI_PRIME_SENSOR_MODULE "XnCore64.dll" PATHS "$ENV{OPEN_NI_INSTALL_PATH64}../PrimeSense/Sensor/Bin64" "$ENV{OPEN_NI_INSTALL_PATH64}../PrimeSense/SensorKinect/Bin64" DOC "Core library of PrimeSensor Modules for OpenNI") - endif() - elseif(UNIX OR APPLE) - find_library(OPENNI_PRIME_SENSOR_MODULE "XnCore" PATHS "/usr/lib" DOC "Core library of PrimeSensor Modules for OpenNI") - endif() - - if(OPENNI_PRIME_SENSOR_MODULE) - set(HAVE_OPENNI_PRIME_SENSOR_MODULE TRUE) - endif() -endif() #if(OPENNI_LIBRARY AND OPENNI_INCLUDES) - -get_filename_component(OPENNI_LIB_DIR "${OPENNI_LIBRARY}" PATH) -get_filename_component(OPENNI_INCLUDE_DIR ${OPENNI_INCLUDES} PATH) -get_filename_component(OPENNI_PRIME_SENSOR_MODULE_BIN_DIR "${OPENNI_PRIME_SENSOR_MODULE}" PATH) - -if(HAVE_OPENNI) - set(OPENNI_LIB_DIR "${OPENNI_LIB_DIR}" CACHE PATH "Path to OpenNI libraries" FORCE) - set(OPENNI_INCLUDE_DIR "${OPENNI_INCLUDE_DIR}" CACHE PATH "Path to OpenNI headers" FORCE) - set(OPENNI_PRIME_SENSOR_MODULE_BIN_DIR "${OPENNI_PRIME_SENSOR_MODULE_BIN_DIR}" CACHE PATH "Path to OpenNI PrimeSensor Module binaries" FORCE) -endif() - -if(OPENNI_LIBRARY) - set(OPENNI_LIB_DIR_INTERNAL "${OPENNI_LIB_DIR}" CACHE INTERNAL "This is the value of the last time OPENNI_LIB_DIR was set successfully." FORCE) -else() - message( WARNING, " OpenNI library directory (set by OPENNI_LIB_DIR variable) is not found or does not have OpenNI libraries." ) -endif() - -if(OPENNI_INCLUDES) - set(OPENNI_INCLUDE_DIR_INTERNAL "${OPENNI_INCLUDE_DIR}" CACHE INTERNAL "This is the value of the last time OPENNI_INCLUDE_DIR was set successfully." FORCE) -else() - message( WARNING, " OpenNI include directory (set by OPENNI_INCLUDE_DIR variable) is not found or does not have OpenNI include files." ) -endif() - -if(OPENNI_PRIME_SENSOR_MODULE) - set(OPENNI_PRIME_SENSOR_MODULE_BIN_DIR_INTERNAL "${OPENNI_PRIME_SENSOR_MODULE_BIN_DIR}" CACHE INTERNAL "This is the value of the last time OPENNI_PRIME_SENSOR_MODULE_BIN_DIR was set successfully." FORCE) -else() - message( WARNING, " PrimeSensor Module binaries directory (set by OPENNI_PRIME_SENSOR_MODULE_BIN_DIR variable) is not found or does not have PrimeSensor Module binaries." ) -endif() - -mark_as_advanced(FORCE OPENNI_PRIME_SENSOR_MODULE) -mark_as_advanced(FORCE OPENNI_LIBRARY) -mark_as_advanced(FORCE OPENNI_INCLUDES) - -if(HAVE_OPENNI) - ocv_parse_header("${OPENNI_INCLUDE_DIR}/XnVersion.h" OPENNI_VERSION_LINES XN_MAJOR_VERSION XN_MINOR_VERSION XN_MAINTENANCE_VERSION XN_BUILD_VERSION) - if(XN_MAJOR_VERSION) - set(OPENNI_VERSION_STRING ${XN_MAJOR_VERSION}.${XN_MINOR_VERSION}.${XN_MAINTENANCE_VERSION} CACHE INTERNAL "OpenNI version") - set(OPENNI_VERSION_BUILD ${XN_BUILD_VERSION} CACHE INTERNAL "OpenNI build version") - endif() -endif() diff --git a/cmake/OpenCVFindOpenNI2.cmake b/cmake/OpenCVFindOpenNI2.cmake deleted file mode 100644 index 8f1b54750d..0000000000 --- a/cmake/OpenCVFindOpenNI2.cmake +++ /dev/null @@ -1,61 +0,0 @@ -# Main variables: -# OPENNI2_LIBRARY and OPENNI2_INCLUDES to link OpenCV modules with OpenNI2 -# HAVE_OPENNI2 for conditional compilation OpenCV with/without OpenNI2 - -if(NOT "${OPENNI2_LIB_DIR}" STREQUAL "${OPENNI2_LIB_DIR_INTERNAL}") - unset(OPENNI2_LIBRARY CACHE) - unset(OPENNI2_LIB_DIR CACHE) -endif() - -if(NOT "${OPENNI2_INCLUDE_DIR}" STREQUAL "${OPENNI2_INCLUDE_DIR_INTERNAL}") - unset(OPENNI2_INCLUDES CACHE) - unset(OPENNI2_INCLUDE_DIR CACHE) -endif() - -if(WIN32) - if(NOT (MSVC64 OR MINGW64)) - find_file(OPENNI2_INCLUDES "OpenNI.h" PATHS $ENV{OPENNI2_INCLUDE} "$ENV{OPEN_NI_INSTALL_PATH}Include" DOC "OpenNI2 c++ interface header") - find_library(OPENNI2_LIBRARY "OpenNI2" PATHS $ENV{OPENNI2_LIB} DOC "OpenNI2 library") - else() - find_file(OPENNI2_INCLUDES "OpenNI.h" PATHS $ENV{OPENNI2_INCLUDE64} "$ENV{OPEN_NI_INSTALL_PATH64}Include" DOC "OpenNI2 c++ interface header") - find_library(OPENNI2_LIBRARY "OpenNI2" PATHS $ENV{OPENNI2_LIB64} DOC "OpenNI2 library") - endif() -elseif(UNIX OR APPLE) - find_file(OPENNI2_INCLUDES "OpenNI.h" PATHS "/usr/include/ni2" "/usr/include/openni2" $ENV{OPENNI2_INCLUDE} DOC "OpenNI2 c++ interface header") - find_library(OPENNI2_LIBRARY "OpenNI2" PATHS "/usr/lib" $ENV{OPENNI2_REDIST} DOC "OpenNI2 library") -endif() - -if(OPENNI2_LIBRARY AND OPENNI2_INCLUDES) - set(HAVE_OPENNI2 TRUE) -endif() #if(OPENNI2_LIBRARY AND OPENNI2_INCLUDES) - -get_filename_component(OPENNI2_LIB_DIR "${OPENNI2_LIBRARY}" PATH) -get_filename_component(OPENNI2_INCLUDE_DIR ${OPENNI2_INCLUDES} PATH) - -if(HAVE_OPENNI2) - set(OPENNI2_LIB_DIR "${OPENNI2_LIB_DIR}" CACHE PATH "Path to OpenNI2 libraries" FORCE) - set(OPENNI2_INCLUDE_DIR "${OPENNI2_INCLUDE_DIR}" CACHE PATH "Path to OpenNI2 headers" FORCE) -endif() - -if(OPENNI2_LIBRARY) - set(OPENNI2_LIB_DIR_INTERNAL "${OPENNI2_LIB_DIR}" CACHE INTERNAL "This is the value of the last time OPENNI_LIB_DIR was set successfully." FORCE) -else() - message( WARNING, " OpenNI2 library directory (set by OPENNI2_LIB_DIR variable) is not found or does not have OpenNI2 libraries." ) -endif() - -if(OPENNI2_INCLUDES) - set(OPENNI2_INCLUDE_DIR_INTERNAL "${OPENNI2_INCLUDE_DIR}" CACHE INTERNAL "This is the value of the last time OPENNI2_INCLUDE_DIR was set successfully." FORCE) -else() - message( WARNING, " OpenNI2 include directory (set by OPENNI2_INCLUDE_DIR variable) is not found or does not have OpenNI2 include files." ) -endif() - -mark_as_advanced(FORCE OPENNI2_LIBRARY) -mark_as_advanced(FORCE OPENNI2_INCLUDES) - -if(HAVE_OPENNI2) - ocv_parse_header("${OPENNI2_INCLUDE_DIR}/OniVersion.h" ONI_VERSION_LINE ONI_VERSION_MAJOR ONI_VERSION_MINOR ONI_VERSION_MAINTENANCE ONI_VERSION_BUILD) - if(ONI_VERSION_MAJOR) - set(OPENNI2_VERSION_STRING ${ONI_VERSION_MAJOR}.${ONI_VERSION_MINOR}.${ONI_VERSION_MAINTENANCE} CACHE INTERNAL "OpenNI2 version") - set(OPENNI2_VERSION_BUILD ${ONI_VERSION_BUILD} CACHE INTERNAL "OpenNI2 build version") - endif() -endif() diff --git a/cmake/OpenCVFindXimea.cmake b/cmake/OpenCVFindXimea.cmake index 2d93292c19..e69de29bb2 100644 --- a/cmake/OpenCVFindXimea.cmake +++ b/cmake/OpenCVFindXimea.cmake @@ -1,52 +0,0 @@ -# - Find XIMEA -# This module finds if XIMEA Software package is installed -# and determines where the binaries and header files are. -# This code sets the following variables: -# -# XIMEA_FOUND - True if XIMEA API found -# XIMEA_PATH: - Path to the XIMEA API folder -# XIMEA_LIBRARY_DIR - XIMEA libraries folder -# -# Created: 5 Aug 2011 by Marian Zajko (marian.zajko@ximea.com) -# Updated: 25 June 2012 by Igor Kuzmin (parafin@ximea.com) -# Updated: 22 October 2012 by Marian Zajko (marian.zajko@ximea.com) -# - -set(XIMEA_FOUND) -set(XIMEA_PATH) -set(XIMEA_LIBRARY_DIR) - -if(WIN32) - # Try to find the XIMEA API path in registry. - GET_FILENAME_COMPONENT(XIMEA_PATH "[HKEY_CURRENT_USER\\Software\\XIMEA\\CamSupport\\API;Path]" ABSOLUTE) - - if(EXISTS ${XIMEA_PATH}) - set(XIMEA_FOUND 1) - # set LIB folders - if(X86_64) - set(XIMEA_LIBRARY_DIR "${XIMEA_PATH}/x64") - else() - set(XIMEA_LIBRARY_DIR "${XIMEA_PATH}/x86") - endif() - else() - set(XIMEA_FOUND 0) - endif() -elseif(APPLE) - if(EXISTS /Library/Frameworks/m3api.framework) - set(XIMEA_FOUND 1) - else() - set(XIMEA_FOUND 0) - endif() -else() - if(EXISTS /opt/XIMEA) - set(XIMEA_FOUND 1) - # set folders - set(XIMEA_PATH /opt/XIMEA/include) - else() - set(XIMEA_FOUND 0) - endif() -endif() - -mark_as_advanced(FORCE XIMEA_FOUND) -mark_as_advanced(FORCE XIMEA_PATH) -mark_as_advanced(FORCE XIMEA_LIBRARY_DIR) diff --git a/cmake/OpenCVGenConfig.cmake b/cmake/OpenCVGenConfig.cmake index d78f649839..f452678adb 100644 --- a/cmake/OpenCVGenConfig.cmake +++ b/cmake/OpenCVGenConfig.cmake @@ -41,7 +41,7 @@ foreach(m ${OPENCV_MODULES_BUILD}) endif() endforeach() -export(TARGETS ${OpenCVModules_TARGETS} FILE "${CMAKE_BINARY_DIR}/OpenCVModules.cmake") +export(EXPORT OpenCVModules FILE "${CMAKE_BINARY_DIR}/OpenCVModules.cmake") if(TARGET ippicv AND NOT BUILD_SHARED_LIBS) set(USE_IPPICV TRUE) diff --git a/cmake/OpenCVUtils.cmake b/cmake/OpenCVUtils.cmake index 6c3b5bb946..26596758b4 100644 --- a/cmake/OpenCVUtils.cmake +++ b/cmake/OpenCVUtils.cmake @@ -1243,14 +1243,6 @@ macro(ocv_parse_header2 LIBNAME HDR_PATH VARNAME) endif() endmacro() -# read single version info from the pkg file -macro(ocv_parse_pkg LIBNAME PKG_PATH SCOPE) - if(EXISTS "${PKG_PATH}/${LIBNAME}.pc") - file(STRINGS "${PKG_PATH}/${LIBNAME}.pc" line_to_parse REGEX "^Version:[ \t]+[0-9.]*.*$" LIMIT_COUNT 1) - STRING(REGEX REPLACE ".*Version: ([^ ]+).*" "\\1" ALIASOF_${LIBNAME}_VERSION "${line_to_parse}" ) - endif() -endmacro() - ################################################################################################ # short command to setup source group function(ocv_source_group group) diff --git a/cmake/templates/cvconfig.h.in b/cmake/templates/cvconfig.h.in index 60db8361cd..271bf275ef 100644 --- a/cmake/templates/cvconfig.h.in +++ b/cmake/templates/cvconfig.h.in @@ -22,12 +22,6 @@ /* Compile for 'virtual' NVIDIA PTX architectures */ #define CUDA_ARCH_PTX "${OPENCV_CUDA_ARCH_PTX}" -/* AVFoundation video libraries */ -#cmakedefine HAVE_AVFOUNDATION - -/* V4L2 capturing support */ -#cmakedefine HAVE_CAMV4L2 - /* AMD's Basic Linear Algebra Subprograms Library*/ #cmakedefine HAVE_CLAMDBLAS @@ -49,12 +43,6 @@ /* NVIDIA CUDA Fast Fourier Transform (FFT) API*/ #cmakedefine HAVE_CUFFT -/* IEEE1394 capturing support */ -#cmakedefine HAVE_DC1394 - -/* IEEE1394 capturing support - libdc1394 v2.x */ -#cmakedefine HAVE_DC1394_2 - /* DirectX */ #cmakedefine HAVE_DIRECTX #cmakedefine HAVE_DIRECTX_NV12 @@ -62,21 +50,12 @@ #cmakedefine HAVE_D3D10 #cmakedefine HAVE_D3D9 -/* DirectShow Video Capture library */ -#cmakedefine HAVE_DSHOW - /* Eigen Matrix & Linear Algebra Library */ #cmakedefine HAVE_EIGEN -/* FFMpeg video library */ -#cmakedefine HAVE_FFMPEG - /* Geospatial Data Abstraction Library */ #cmakedefine HAVE_GDAL -/* GStreamer multimedia framework */ -#cmakedefine HAVE_GSTREAMER - /* GTK+ 2.0 Thread support */ #cmakedefine HAVE_GTHREAD @@ -92,9 +71,6 @@ /* Define to 1 if you have the header file. */ #cmakedefine HAVE_INTTYPES_H 1 -/* Intel Perceptual Computing SDK library */ -#cmakedefine HAVE_INTELPERC - /* Intel Integrated Performance Primitives */ #cmakedefine HAVE_IPP #cmakedefine HAVE_IPP_ICV @@ -113,9 +89,6 @@ /* GDCM DICOM codec */ #cmakedefine HAVE_GDCM -/* Microsoft Media Foundation Capture library */ -#cmakedefine HAVE_MSMF - /* NVIDIA Video Decoding API*/ #cmakedefine HAVE_NVCUVID @@ -133,15 +106,6 @@ /* OpenGL support*/ #cmakedefine HAVE_OPENGL -/* OpenNI library */ -#cmakedefine HAVE_OPENNI - -/* OpenNI library */ -#cmakedefine HAVE_OPENNI2 - -/* librealsense library */ -#cmakedefine HAVE_LIBREALSENSE - /* PNG codec */ #cmakedefine HAVE_PNG @@ -166,34 +130,19 @@ /* TIFF codec */ #cmakedefine HAVE_TIFF -/* V4L2 capturing support in videoio.h */ -#cmakedefine HAVE_VIDEOIO - /* Win32 UI */ #cmakedefine HAVE_WIN32UI -/* XIMEA camera support */ -#cmakedefine HAVE_XIMEA - -/* Xine video library */ -#cmakedefine HAVE_XINE - /* Define if your processor stores words with the most significant byte first (like Motorola and SPARC, unlike Intel and VAX). */ #cmakedefine WORDS_BIGENDIAN -/* gPhoto2 library */ -#cmakedefine HAVE_GPHOTO2 - /* VA library (libva) */ #cmakedefine HAVE_VA /* Intel VA-API/OpenCL */ #cmakedefine HAVE_VA_INTEL -/* Intel Media SDK */ -#cmakedefine HAVE_MFX - /* Lapack */ #cmakedefine HAVE_LAPACK @@ -203,23 +152,6 @@ /* OpenVX */ #cmakedefine HAVE_OPENVX -#if defined(HAVE_XINE) || \ - defined(HAVE_GSTREAMER) || \ - defined(HAVE_AVFOUNDATION) || \ - /*defined(HAVE_OPENNI) || too specialized */ \ - defined(HAVE_FFMPEG) || \ - defined(HAVE_MSMF) -#define HAVE_VIDEO_INPUT -#endif - -#if /*defined(HAVE_XINE) || */\ - defined(HAVE_GSTREAMER) || \ - defined(HAVE_AVFOUNDATION) || \ - defined(HAVE_FFMPEG) || \ - defined(HAVE_MSMF) -#define HAVE_VIDEO_OUTPUT -#endif - /* OpenCV trace utilities */ #cmakedefine OPENCV_TRACE diff --git a/doc/tutorials/videoio/intelperc.markdown b/doc/tutorials/videoio/intelperc.markdown index 69e316d535..6077a64a4e 100644 --- a/doc/tutorials/videoio/intelperc.markdown +++ b/doc/tutorials/videoio/intelperc.markdown @@ -1,6 +1,8 @@ Using Creative Senz3D and other Intel Perceptual Computing SDK compatible depth sensors {#tutorial_intelperc} ======================================================================================= +**Note**: this tutorial is partially obsolete since PerC SDK has been replaced with RealSense SDK + Depth sensors compatible with Intel Perceptual Computing SDK are supported through VideoCapture class. Depth map, RGB image and some other formats of output can be retrieved by using familiar interface of VideoCapture. diff --git a/modules/video/perf/opencl/perf_bgfg_knn.cpp b/modules/video/perf/opencl/perf_bgfg_knn.cpp index 30419af422..833db66d86 100644 --- a/modules/video/perf/opencl/perf_bgfg_knn.cpp +++ b/modules/video/perf/opencl/perf_bgfg_knn.cpp @@ -6,7 +6,6 @@ #include "opencv2/ts/ocl_perf.hpp" #ifdef HAVE_OPENCL -#ifdef HAVE_VIDEO_INPUT #include "../perf_bgfg_utils.hpp" namespace cvtest { @@ -32,7 +31,8 @@ OCL_PERF_TEST_P(KNN_Apply, KNN, Combine(Values("gpu/video/768x576.avi", "gpu/vid vector frame_buffer(nFrame); cv::VideoCapture cap(inputFile); - ASSERT_TRUE(cap.isOpened()); + if (!cap.isOpened()) + throw SkipTestException("Video file can not be opened"); prepareData(cap, cn, frame_buffer); UMat u_foreground; @@ -65,7 +65,8 @@ OCL_PERF_TEST_P(KNN_GetBackgroundImage, KNN, Values( vector frame_buffer(nFrame); cv::VideoCapture cap(inputFile); - ASSERT_TRUE(cap.isOpened()); + if (!cap.isOpened()) + throw SkipTestException("Video file can not be opened"); prepareData(cap, cn, frame_buffer, skipFrames); UMat u_foreground, u_background; @@ -92,4 +93,3 @@ OCL_PERF_TEST_P(KNN_GetBackgroundImage, KNN, Values( }}// namespace cvtest::ocl #endif -#endif diff --git a/modules/video/perf/opencl/perf_bgfg_mog2.cpp b/modules/video/perf/opencl/perf_bgfg_mog2.cpp index 9952be79f6..8e5f095257 100644 --- a/modules/video/perf/opencl/perf_bgfg_mog2.cpp +++ b/modules/video/perf/opencl/perf_bgfg_mog2.cpp @@ -6,7 +6,6 @@ #include "opencv2/ts/ocl_perf.hpp" #ifdef HAVE_OPENCL -#ifdef HAVE_VIDEO_INPUT #include "../perf_bgfg_utils.hpp" namespace opencv_test { @@ -32,7 +31,8 @@ OCL_PERF_TEST_P(MOG2_Apply, Mog2, Combine(Values("gpu/video/768x576.avi", "gpu/v vector frame_buffer(nFrame); cv::VideoCapture cap(inputFile); - ASSERT_TRUE(cap.isOpened()); + if (!cap.isOpened()) + throw SkipTestException("Video file can not be opened"); prepareData(cap, cn, frame_buffer); UMat u_foreground; @@ -65,7 +65,8 @@ OCL_PERF_TEST_P(MOG2_GetBackgroundImage, Mog2, Values( vector frame_buffer(nFrame); cv::VideoCapture cap(inputFile); - ASSERT_TRUE(cap.isOpened()); + if (!cap.isOpened()) + throw SkipTestException("Video file can not be opened"); prepareData(cap, cn, frame_buffer, skipFrames); UMat u_foreground, u_background; @@ -92,4 +93,3 @@ OCL_PERF_TEST_P(MOG2_GetBackgroundImage, Mog2, Values( }}// namespace opencv_test::ocl #endif -#endif diff --git a/modules/video/perf/perf_bgfg_knn.cpp b/modules/video/perf/perf_bgfg_knn.cpp index d9ead09fd9..23bb1fe0a5 100644 --- a/modules/video/perf/perf_bgfg_knn.cpp +++ b/modules/video/perf/perf_bgfg_knn.cpp @@ -4,7 +4,6 @@ #include "perf_precomp.hpp" -#ifdef HAVE_VIDEO_INPUT #include "perf_bgfg_utils.hpp" namespace opencv_test { namespace { @@ -27,7 +26,8 @@ PERF_TEST_P(KNN_Apply, KNN, Combine(Values("gpu/video/768x576.avi", "gpu/video/1 vector frame_buffer(nFrame); cv::VideoCapture cap(inputFile); - ASSERT_TRUE(cap.isOpened()); + if (!cap.isOpened()) + throw SkipTestException("Video file can not be opened"); prepareData(cap, cn, frame_buffer); Mat foreground; @@ -60,7 +60,8 @@ PERF_TEST_P(KNN_GetBackgroundImage, KNN, Values( vector frame_buffer(nFrame); cv::VideoCapture cap(inputFile); - ASSERT_TRUE(cap.isOpened()); + if (!cap.isOpened()) + throw SkipTestException("Video file can not be opened"); prepareData(cap, cn, frame_buffer, skipFrames); Mat foreground, background; @@ -85,5 +86,3 @@ PERF_TEST_P(KNN_GetBackgroundImage, KNN, Values( } }}// namespace - -#endif diff --git a/modules/video/perf/perf_bgfg_mog2.cpp b/modules/video/perf/perf_bgfg_mog2.cpp index 92e5d0283f..f911a9c74e 100644 --- a/modules/video/perf/perf_bgfg_mog2.cpp +++ b/modules/video/perf/perf_bgfg_mog2.cpp @@ -4,7 +4,6 @@ #include "perf_precomp.hpp" -#ifdef HAVE_VIDEO_INPUT #include "perf_bgfg_utils.hpp" namespace opencv_test { namespace { @@ -27,7 +26,8 @@ PERF_TEST_P(MOG2_Apply, Mog2, Combine(Values("gpu/video/768x576.avi", "gpu/video vector frame_buffer(nFrame); cv::VideoCapture cap(inputFile); - ASSERT_TRUE(cap.isOpened()); + if (!cap.isOpened()) + throw SkipTestException("Video file can not be opened"); prepareData(cap, cn, frame_buffer); Mat foreground; @@ -60,7 +60,8 @@ PERF_TEST_P(MOG2_GetBackgroundImage, Mog2, Values( vector frame_buffer(nFrame); cv::VideoCapture cap(inputFile); - ASSERT_TRUE(cap.isOpened()); + if (!cap.isOpened()) + throw SkipTestException("Video file can not be opened"); prepareData(cap, cn, frame_buffer, skipFrames); Mat foreground, background; @@ -85,5 +86,3 @@ PERF_TEST_P(MOG2_GetBackgroundImage, Mog2, Values( } }}// namespace - -#endif diff --git a/modules/video/test/ocl/test_bgfg_mog2.cpp b/modules/video/test/ocl/test_bgfg_mog2.cpp index 7fad29bbb8..b35a82049c 100644 --- a/modules/video/test/ocl/test_bgfg_mog2.cpp +++ b/modules/video/test/ocl/test_bgfg_mog2.cpp @@ -2,7 +2,6 @@ #include "opencv2/ts/ocl_test.hpp" #ifdef HAVE_OPENCL -#ifdef HAVE_VIDEO_INPUT namespace opencv_test { namespace ocl { @@ -33,7 +32,8 @@ OCL_TEST_P(Mog2_Update, Accuracy) { string inputFile = string(TS::ptr()->get_data_path()) + "video/768x576.avi"; VideoCapture cap(inputFile); - ASSERT_TRUE(cap.isOpened()); + if (!cap.isOpened()) + throw SkipTestException("Video file can not be opened"); Ptr mog2_cpu = createBackgroundSubtractorMOG2(); Ptr mog2_ocl = createBackgroundSubtractorMOG2(); @@ -90,7 +90,8 @@ OCL_TEST_P(Mog2_getBackgroundImage, Accuracy) { string inputFile = string(TS::ptr()->get_data_path()) + "video/768x576.avi"; VideoCapture cap(inputFile); - ASSERT_TRUE(cap.isOpened()); + if (!cap.isOpened()) + throw SkipTestException("Video file can not be opened"); Ptr mog2_cpu = createBackgroundSubtractorMOG2(); Ptr mog2_ocl = createBackgroundSubtractorMOG2(); @@ -142,4 +143,3 @@ OCL_INSTANTIATE_TEST_CASE_P(OCL_Video, Mog2_getBackgroundImage, Combine( }}// namespace opencv_test::ocl #endif -#endif diff --git a/modules/videoio/CMakeLists.txt b/modules/videoio/CMakeLists.txt index b1b82f43b1..ebec00f1da 100644 --- a/modules/videoio/CMakeLists.txt +++ b/modules/videoio/CMakeLists.txt @@ -1,44 +1,29 @@ -set(the_description "Media I/O") ocv_add_module(videoio opencv_imgproc opencv_imgcodecs WRAP java python) -# ---------------------------------------------------------------------------- -# CMake file for videoio. See root CMakeLists.txt -# Some parts taken from version of Hartmut Seichter, HIT Lab NZ. -# Jose Luis Blanco, 2008 -# ---------------------------------------------------------------------------- +set(videoio_hdrs ${CMAKE_CURRENT_LIST_DIR}/src/precomp.hpp) + +set(videoio_srcs + "${CMAKE_CURRENT_LIST_DIR}/src/videoio_registry.cpp" + "${CMAKE_CURRENT_LIST_DIR}/src/videoio_c.cpp" + "${CMAKE_CURRENT_LIST_DIR}/src/cap.cpp" + "${CMAKE_CURRENT_LIST_DIR}/src/cap_images.cpp" + "${CMAKE_CURRENT_LIST_DIR}/src/cap_mjpeg_encoder.cpp" + "${CMAKE_CURRENT_LIST_DIR}/src/cap_mjpeg_decoder.cpp" + "${CMAKE_CURRENT_LIST_DIR}/src/container_avi.cpp") + +file(GLOB videoio_ext_hdrs + "${CMAKE_CURRENT_LIST_DIR}/include/opencv2/*.hpp" + "${CMAKE_CURRENT_LIST_DIR}/include/opencv2/${name}/*.hpp" + "${CMAKE_CURRENT_LIST_DIR}/include/opencv2/${name}/*.h" + "${CMAKE_CURRENT_LIST_DIR}/include/opencv2/${name}/legacy/*.h") + +# Removing WinRT API headers by default +list(REMOVE_ITEM videoio_ext_hdrs "${CMAKE_CURRENT_LIST_DIR}/include/opencv2/${name}/cap_winrt.hpp") if(DEFINED WINRT AND NOT DEFINED ENABLE_WINRT_MODE_NATIVE) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /ZW") endif() -if(APPLE) - ocv_include_directories(${ZLIB_INCLUDE_DIRS}) - list(APPEND VIDEOIO_LIBRARIES ${ZLIB_LIBRARIES}) -endif() - -set(videoio_hdrs - ${CMAKE_CURRENT_LIST_DIR}/src/precomp.hpp - ) -set(videoio_srcs - ${CMAKE_CURRENT_LIST_DIR}/src/videoio_registry.cpp - ${CMAKE_CURRENT_LIST_DIR}/src/videoio_c.cpp - ${CMAKE_CURRENT_LIST_DIR}/src/cap.cpp - ${CMAKE_CURRENT_LIST_DIR}/src/cap_images.cpp - ${CMAKE_CURRENT_LIST_DIR}/src/cap_mjpeg_encoder.cpp - ${CMAKE_CURRENT_LIST_DIR}/src/cap_mjpeg_decoder.cpp - ${CMAKE_CURRENT_LIST_DIR}/src/container_avi.cpp - ) - -file(GLOB videoio_ext_hdrs - "${CMAKE_CURRENT_LIST_DIR}/include/opencv2/*.hpp" - "${CMAKE_CURRENT_LIST_DIR}/include/opencv2/${name}/*.hpp" - "${CMAKE_CURRENT_LIST_DIR}/include/opencv2/${name}/*.h" - "${CMAKE_CURRENT_LIST_DIR}/include/opencv2/${name}/legacy/*.h" -) - -# Removing WinRT API headers by default -list(REMOVE_ITEM videoio_ext_hdrs "${CMAKE_CURRENT_LIST_DIR}/include/opencv2/${name}/cap_winrt.hpp") - # Dependencies used by the implementation referenced # below are not available on WinRT 8.0. # Enabling it for WiRT 8.1+ only. @@ -65,226 +50,134 @@ if(DEFINED WINRT AND NOT DEFINED WINRT_8_0 AND NOT DEFINED ENABLE_WINRT_MODE_NAT ${CMAKE_CURRENT_LIST_DIR}/src/cap_winrt/MediaStreamSink.hpp) endif() -if(HAVE_MFX) - list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_mfx_common.cpp) - list(APPEND videoio_hdrs ${CMAKE_CURRENT_LIST_DIR}/src/cap_mfx_common.hpp) - list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_mfx_reader.cpp) - list(APPEND videoio_hdrs ${CMAKE_CURRENT_LIST_DIR}/src/cap_mfx_reader.hpp) - list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_mfx_writer.cpp) - list(APPEND videoio_hdrs ${CMAKE_CURRENT_LIST_DIR}/src/cap_mfx_writer.hpp) - list(APPEND VIDEOIO_LIBRARIES mfx) +set(tgts) + +if(TARGET ocv.3rdparty.mediasdk) + list(APPEND videoio_srcs + ${CMAKE_CURRENT_LIST_DIR}/src/cap_mfx_common.cpp + ${CMAKE_CURRENT_LIST_DIR}/src/cap_mfx_reader.cpp + ${CMAKE_CURRENT_LIST_DIR}/src/cap_mfx_writer.cpp) + list(APPEND videoio_hdrs + ${CMAKE_CURRENT_LIST_DIR}/src/cap_mfx_common.hpp + ${CMAKE_CURRENT_LIST_DIR}/src/cap_mfx_reader.hpp + ${CMAKE_CURRENT_LIST_DIR}/src/cap_mfx_writer.hpp) + list(APPEND tgts ocv.3rdparty.mediasdk) endif() -if (WIN32 AND HAVE_DSHOW) +if(TARGET ocv.3rdparty.dshow) list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_dshow.cpp) list(APPEND videoio_hdrs ${CMAKE_CURRENT_LIST_DIR}/src/cap_dshow.hpp) - if (MINGW64) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DSTRSAFE_NO_DEPRECATE") - endif() + list(APPEND tgts ocv.3rdparty.dshow) endif() -if(WIN32 AND HAVE_MSMF) +if(TARGET ocv.3rdparty.msmf) list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_msmf.hpp) list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_msmf.cpp) - if(HAVE_MSMF_DXVA) - add_definitions(-DHAVE_MSMF_DXVA) - endif() + list(APPEND tgts ocv.3rdparty.msmf) endif() -if(HAVE_XINE) +if(TARGET ocv.3rdparty.xine) list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_xine.cpp) -endif(HAVE_XINE) - -if(HAVE_DC1394_2) - list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_dc1394_v2.cpp) -endif(HAVE_DC1394_2) - -if(HAVE_GSTREAMER) - IF(WIN32) - INCLUDE_DIRECTORIES(${GSTREAMER_INCLUDE_DIR}) - list(APPEND VIDEOIO_LIBRARIES ${GSTREAMER_LIBRARIES}) - ENDIF(WIN32) - list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_gstreamer.cpp) -endif(HAVE_GSTREAMER) - -if(HAVE_CAMV4L2 OR HAVE_VIDEOIO) - list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_v4l.cpp) + list(APPEND tgts ocv.3rdparty.xine) endif() -if(HAVE_OPENNI) - list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_openni.cpp) - ocv_include_directories(${OPENNI_INCLUDE_DIR}) - list(APPEND VIDEOIO_LIBRARIES ${OPENNI_LIBRARY}) -endif(HAVE_OPENNI) +if(TARGET ocv.3rdparty.dc1394_2) + list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_dc1394_v2.cpp) + list(APPEND tgts ocv.3rdparty.dc1394_2) +endif() -if(HAVE_OPENNI2) +if(TARGET ocv.3rdparty.gstreamer) + list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_gstreamer.cpp) + list(APPEND tgts ocv.3rdparty.gstreamer) +endif() + +if(TARGET ocv.3rdparty.v4l) + list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_v4l.cpp) + list(APPEND tgts ocv.3rdparty.v4l) +endif() + +if(TARGET ocv.3rdparty.openni2) list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_openni2.cpp) - ocv_include_directories(${OPENNI2_INCLUDE_DIR}) - list(APPEND VIDEOIO_LIBRARIES ${OPENNI2_LIBRARY}) -endif(HAVE_OPENNI2) + list(APPEND tgts ocv.3rdparty.openni2) +endif() -if(HAVE_XIMEA) +if(TARGET ocv.3rdparty.ximea) list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_ximea.cpp) - if(XIMEA_PATH) - ocv_include_directories(${XIMEA_PATH}) - endif() - if(XIMEA_LIBRARY_DIR) - link_directories("${XIMEA_LIBRARY_DIR}") - endif() - if(WIN32 AND X86_64) - list(APPEND VIDEOIO_LIBRARIES xiapi64) - elseif(WIN32) - list(APPEND VIDEOIO_LIBRARIES xiapi32) - elseif(APPLE) - list(APPEND VIDEOIO_LIBRARIES "-framework m3api") - else() - list(APPEND VIDEOIO_LIBRARIES m3api) - endif() -endif(HAVE_XIMEA) + list(APPEND tgts ocv.3rdparty.ximea) +endif() -if(HAVE_FFMPEG) +if(TARGET ocv.3rdparty.ffmpeg) list(APPEND videoio_hdrs ${CMAKE_CURRENT_LIST_DIR}/src/cap_ffmpeg_impl.hpp) list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_ffmpeg.cpp) - if(UNIX AND BZIP2_LIBRARIES) - list(APPEND VIDEOIO_LIBRARIES ${BZIP2_LIBRARIES}) - endif() - if(APPLE) - list(APPEND VIDEOIO_LIBRARIES "-framework VideoDecodeAcceleration" bz2) - endif() - if(HAVE_FFMPEG_WRAPPER) - add_definitions(-DHAVE_FFMPEG_WRAPPER=1) - endif() -endif(HAVE_FFMPEG) - -if(HAVE_PVAPI) - add_definitions(-DHAVE_PVAPI) - add_definitions(${PVAPI_DEFINITIONS}) - ocv_include_directories(${PVAPI_INCLUDE_PATH}) - set(videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_pvapi.cpp ${videoio_srcs}) - list(APPEND VIDEOIO_LIBRARIES ${PVAPI_LIBRARY}) + list(APPEND tgts ocv.3rdparty.ffmpeg) endif() -if(HAVE_GIGE_API) - add_definitions(-DHAVE_GIGE_API) - ocv_include_directories(${GIGEAPI_INCLUDE_PATH}) - set(videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_giganetix.cpp ${videoio_srcs}) - list(APPEND VIDEOIO_LIBRARIES ${GIGEAPI_LIBRARIES}) - list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_giganetix.cpp) -endif(HAVE_GIGE_API) +if(TARGET ocv.3rdparty.pvapi) + set(videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_pvapi.cpp ${videoio_srcs}) + list(APPEND tgts ocv.3rdparty.pvapi) +endif() -if(HAVE_ARAVIS_API) - add_definitions(-DHAVE_ARAVIS_API) - ocv_include_directories(${ARAVIS_INCLUDE_PATH}) - set(videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_aravis.cpp ${videoio_srcs}) - list(APPEND VIDEOIO_LIBRARIES ${ARAVIS_LIBRARIES}) +if(TARGET ocv.3rdparty.aravis) list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_aravis.cpp) -endif(HAVE_ARAVIS_API) + list(APPEND tgts ocv.3rdparty.aravis) +endif() -if(HAVE_AVFOUNDATION) +if(TARGET ocv.3rdparty.avfoundation) if(IOS) list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_avfoundation.mm) - list(APPEND VIDEOIO_LIBRARIES "-framework AVFoundation" "-framework QuartzCore") else() list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_avfoundation_mac.mm) - list(APPEND VIDEOIO_LIBRARIES "-framework Cocoa" "-framework Accelerate" "-framework AVFoundation" "-framework CoreGraphics" "-framework CoreMedia" "-framework CoreVideo" "-framework QuartzCore") endif() + list(APPEND tgts ocv.3rdparty.avfoundation) endif() -if(HAVE_INTELPERC) - list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_intelperc.cpp) - ocv_include_directories(${INTELPERC_INCLUDE_DIR}) - list(APPEND VIDEOIO_LIBRARIES ${INTELPERC_LIBRARIES}) -endif(HAVE_INTELPERC) - -if(HAVE_LIBREALSENSE) +if(TARGET ocv.3rdparty.librealsense) list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_librealsense.cpp) - ocv_include_directories(${LIBREALSENSE_INCLUDE_DIR}) - list(APPEND VIDEOIO_LIBRARIES ${LIBREALSENSE_LIBRARIES}) -endif(HAVE_LIBREALSENSE) + list(APPEND tgts ocv.3rdparty.librealsense) +endif() -if(HAVE_GPHOTO2) +if(TARGET ocv.3rdparty.gphoto2) list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_gphoto2.cpp) -endif(HAVE_GPHOTO2) + list(APPEND tgts ocv.3rdparty.gphoto2) +endif() -if(IOS) +if(TARGET ocv.3rdparty.cap_ios) list(APPEND videoio_srcs - ${CMAKE_CURRENT_LIST_DIR}/src/cap_ios_abstract_camera.mm - ${CMAKE_CURRENT_LIST_DIR}/src/cap_ios_photo_camera.mm - ${CMAKE_CURRENT_LIST_DIR}/src/cap_ios_video_camera.mm) - - list(APPEND VIDEOIO_LIBRARIES "-framework Accelerate" "-framework AVFoundation" "-framework CoreGraphics" "-framework CoreImage" "-framework CoreMedia" "-framework CoreVideo" "-framework QuartzCore" "-framework UIKit") + ${CMAKE_CURRENT_LIST_DIR}/src/cap_ios_abstract_camera.mm + ${CMAKE_CURRENT_LIST_DIR}/src/cap_ios_photo_camera.mm + ${CMAKE_CURRENT_LIST_DIR}/src/cap_ios_video_camera.mm) + list(APPEND tgts ocv.3rdparty.cap_ios) endif() -if(UNIX) - #these variables are set by CHECK_MODULE macro - foreach(P ${VIDEOIO_INCLUDE_DIRS}) - ocv_include_directories(${P}) - endforeach() - - foreach(P ${VIDEOIO_LIBRARY_DIRS}) - link_directories(${P}) - endforeach() -endif() - -source_group("Src" FILES ${videoio_srcs} ${videoio_hdrs}) -source_group("Include" FILES ${videoio_ext_hdrs}) -ocv_set_module_sources(HEADERS ${videoio_ext_hdrs} SOURCES ${videoio_srcs} ${videoio_hdrs}) +ocv_set_module_sources(HEADERS ${videoio_ext_hdrs} ${videoio_hdrs} SOURCES ${videoio_srcs}) ocv_module_include_directories() +ocv_create_module() +ocv_add_accuracy_tests(${tgts}) +ocv_add_perf_tests(${tgts}) -ocv_create_module(${VIDEOIO_LIBRARIES}) - -macro(ocv_videoio_configure_target) -if(APPLE) - add_apple_compiler_options(the_module) -endif() - -if(MSVC) - set_target_properties(${the_module} PROPERTIES LINK_FLAGS "/NODEFAULTLIB:atlthunk.lib /NODEFAULTLIB:atlsd.lib /NODEFAULTLIB:libcmt.lib /DEBUG") - if(DEFINED WINRT AND NOT DEFINED ENABLE_WINRT_MODE_NATIVE) - set_target_properties(${the_module} PROPERTIES VS_DESKTOP_EXTENSIONS_VERSION "${CMAKE_VS_WINDOWS_TARGET_PLATFORM_VERSION}") - endif() -endif() - -ocv_warnings_disable(CMAKE_CXX_FLAGS -Wno-deprecated-declarations) +ocv_target_link_libraries(${the_module} LINK_PRIVATE ${tgts}) +# copy FFmpeg dll to the output folder if(WIN32 AND HAVE_FFMPEG_WRAPPER) - #copy ffmpeg dll to the output folder if(MSVC64 OR MINGW64) set(FFMPEG_SUFFIX _64) endif() - set(ffmpeg_dir "${OpenCV_BINARY_DIR}/3rdparty/ffmpeg") set(ffmpeg_bare_name "opencv_ffmpeg${FFMPEG_SUFFIX}.dll") set(ffmpeg_bare_name_ver "opencv_ffmpeg${OPENCV_DLLVERSION}${FFMPEG_SUFFIX}.dll") set(ffmpeg_path "${ffmpeg_dir}/${ffmpeg_bare_name}") - if(MSVC_IDE) - add_custom_command(TARGET ${the_module} POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy_if_different "${ffmpeg_path}" "${EXECUTABLE_OUTPUT_PATH}/Release/${ffmpeg_bare_name_ver}" - COMMAND ${CMAKE_COMMAND} -E copy_if_different "${ffmpeg_path}" "${EXECUTABLE_OUTPUT_PATH}/Debug/${ffmpeg_bare_name_ver}" - COMMENT "Copying ${ffmpeg_path} to the output directory") + execute_process( + COMMAND ${CMAKE_COMMAND} -E copy_if_different "${ffmpeg_path}" "${EXECUTABLE_OUTPUT_PATH}/Release/${ffmpeg_bare_name_ver}" + COMMAND ${CMAKE_COMMAND} -E copy_if_different "${ffmpeg_path}" "${EXECUTABLE_OUTPUT_PATH}/Debug/${ffmpeg_bare_name_ver}") elseif(MSVC AND (CMAKE_GENERATOR MATCHES "Visual")) - add_custom_command(TARGET ${the_module} POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy_if_different "${ffmpeg_path}" "${EXECUTABLE_OUTPUT_PATH}/${CMAKE_BUILD_TYPE}/${ffmpeg_bare_name_ver}" - COMMENT "Copying ${ffmpeg_path} to the output directory") + execute_process(COMMAND ${CMAKE_COMMAND} -E copy_if_different "${ffmpeg_path}" "${EXECUTABLE_OUTPUT_PATH}/${CMAKE_BUILD_TYPE}/${ffmpeg_bare_name_ver}") else() - add_custom_command(TARGET ${the_module} POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy_if_different "${ffmpeg_path}" "${EXECUTABLE_OUTPUT_PATH}/${ffmpeg_bare_name_ver}" - COMMENT "Copying ${ffmpeg_path} to the output directory") + execute_process(COMMAND ${CMAKE_COMMAND} -E copy_if_different "${ffmpeg_path}" "${EXECUTABLE_OUTPUT_PATH}/${ffmpeg_bare_name_ver}") endif() - install(FILES "${ffmpeg_path}" DESTINATION ${OPENCV_BIN_INSTALL_PATH} COMPONENT libs RENAME "${ffmpeg_bare_name_ver}") - if(INSTALL_CREATE_DISTRIB) install(FILES "${ffmpeg_dir}/opencv_ffmpeg.dll" DESTINATION "bin/" COMPONENT libs RENAME "opencv_ffmpeg${OPENCV_DLLVERSION}.dll") install(FILES "${ffmpeg_dir}/opencv_ffmpeg_64.dll" DESTINATION "bin/" COMPONENT libs RENAME "opencv_ffmpeg${OPENCV_DLLVERSION}_64.dll") endif() endif() -endmacro() - -if(NOT BUILD_opencv_world) - ocv_videoio_configure_target() -endif() - -ocv_add_accuracy_tests() -ocv_add_perf_tests() diff --git a/modules/videoio/cmake/detect_aravis.cmake b/modules/videoio/cmake/detect_aravis.cmake new file mode 100644 index 0000000000..9994df8339 --- /dev/null +++ b/modules/videoio/cmake/detect_aravis.cmake @@ -0,0 +1,34 @@ +# --- Aravis SDK --- +if(NOT HAVE_ARAVIS_API AND PKG_CONFIG_FOUND) + pkg_check_modules(ARAVIS aravis-0.6 QUIET) + if(ARAVIS_FOUND) + set(HAVE_ARAVIS_API TRUE) + endif() +endif() + +if(NOT HAVE_ARAVIS_API) + find_path(ARAVIS_INCLUDE "arv.h" + PATHS "${ARAVIS_ROOT}" ENV ARAVIS_ROOT + PATH_SUFFIXES "include/aravis-0.6" + NO_DEFAULT_PATH) + find_library(ARAVIS_LIBRARY "aravis-0.6" + PATHS "${ARAVIS_ROOT}" ENV ARAVIS_ROOT + PATH_SUFFIXES "lib" + NO_DEFAULT_PATH) + if(ARAVIS_INCLUDE AND ARAVIS_LIBRARY) + set(HAVE_ARAVIS_API TRUE) + file(STRINGS "${ARAVIS_INCLUDE}/arvversion.h" ver_strings REGEX "#define +ARAVIS_(MAJOR|MINOR|MICRO)_VERSION.*") + string(REGEX REPLACE ".*ARAVIS_MAJOR_VERSION[^0-9]+([0-9]+).*" "\\1" ver_major "${ver_strings}") + string(REGEX REPLACE ".*ARAVIS_MINOR_VERSION[^0-9]+([0-9]+).*" "\\1" ver_minor "${ver_strings}") + string(REGEX REPLACE ".*ARAVIS_MICRO_VERSION[^0-9]+([0-9]+).*" "\\1" ver_micro "${ver_strings}") + set(ARAVIS_VERSION "${ver_major}.${ver_minor}.${ver_micro}" PARENT_SCOPE) # informational + set(ARAVIS_INCLUDE_DIRS "${ARAVIS_INCLUDE}") + set(ARAVIS_LIBRARIES "${ARAVIS_LIBRARY}") + endif() +endif() + +if(HAVE_ARAVIS_API) + ocv_add_external_target(aravis "${ARAVIS_INCLUDE_DIRS}" "${ARAVIS_LIBRARIES}" "HAVE_ARAVIS_API") +endif() + +set(HAVE_ARAVIS_API ${HAVE_ARAVIS_API} PARENT_SCOPE) diff --git a/modules/videoio/cmake/detect_avfoundation.cmake b/modules/videoio/cmake/detect_avfoundation.cmake new file mode 100644 index 0000000000..a341f587a1 --- /dev/null +++ b/modules/videoio/cmake/detect_avfoundation.cmake @@ -0,0 +1,18 @@ +if(APPLE) + set(HAVE_AVFOUNDATION TRUE) + if(IOS) + set(libs "-framework AVFoundation" "-framework QuartzCore") + else() + set(libs + "-framework Cocoa" + "-framework Accelerate" + "-framework AVFoundation" + "-framework CoreGraphics" + "-framework CoreMedia" + "-framework CoreVideo" + "-framework QuartzCore") + endif() + ocv_add_external_target(avfoundation "" "${libs}" "HAVE_AVFOUNDATION") +endif() + +set(HAVE_AVFOUNDATION ${HAVE_AVFOUNDATION} PARENT_SCOPE) diff --git a/modules/videoio/cmake/detect_dc1394.cmake b/modules/videoio/cmake/detect_dc1394.cmake new file mode 100644 index 0000000000..ab12964581 --- /dev/null +++ b/modules/videoio/cmake/detect_dc1394.cmake @@ -0,0 +1,31 @@ +# --- Dc1394 --- +if(NOT HAVE_DC1394_2 AND PKG_CONFIG_FOUND) + pkg_check_modules(DC1394_2 libdc1394-2 QUIET) + if(DC1394_2_FOUND) + set(DC1394_2_VERSION "${DC1394_2_VERSION}" PARENT_SCOPE) # informational + set(HAVE_DC1394_2 TRUE) + endif() +endif() + +if(NOT HAVE_DC1394_2) + find_path(DC1394_INCLUDE "dc1394/dc1394.h" + PATHS "${DC1394_ROOT}" ENV DC1394_ROOT + PATH_SUFFIXES "include" + NO_DEFAULT_PATH) + find_library(DC1394_LIBRARY "dc1394" + PATHS "${DC1394_ROOT}" ENV DC1394_ROOT + PATH_SUFFIXES "lib" + NO_DEFAULT_PATH) + if(DC1394_INCLUDE AND DC1394_LIBRARY) + set(HAVE_DC1394_2 TRUE) + set(DC1394_2_INCLUDE_DIRS "${DC1394_INCLUDE}") + set(DC1394_2_LIBRARIES "${DC1394_LIBRARY}") + set(DC1394_2_VERSION "unknown" PARENT_SCOPE) # informational + endif() +endif() + +if(HAVE_DC1394_2) + ocv_add_external_target(dc1394_2 "${DC1394_2_INCLUDE_DIRS}" "${DC1394_2_LIBRARIES}" "HAVE_DC1394_2") +endif() + +set(HAVE_DC1394_2 ${HAVE_DC1394_2} PARENT_SCOPE) diff --git a/modules/videoio/cmake/detect_dshow.cmake b/modules/videoio/cmake/detect_dshow.cmake new file mode 100644 index 0000000000..3f41b3fd34 --- /dev/null +++ b/modules/videoio/cmake/detect_dshow.cmake @@ -0,0 +1,14 @@ +# --- VideoInput/DirectShow --- +if(NOT HAVE_DSHOW AND MSVC AND NOT MSVC_VERSION LESS 1500) + set(HAVE_DSHOW TRUE) +endif() + +if(NOT HAVE_DSHOW) + check_include_file(dshow.h HAVE_DSHOW) +endif() + +if(HAVE_DSHOW) + ocv_add_external_target(dshow "" "" "HAVE_DSHOW") +endif() + +set(HAVE_DSHOW ${HAVE_DSHOW} PARENT_SCOPE) diff --git a/modules/videoio/cmake/detect_ffmpeg.cmake b/modules/videoio/cmake/detect_ffmpeg.cmake new file mode 100644 index 0000000000..0b47d3c8a7 --- /dev/null +++ b/modules/videoio/cmake/detect_ffmpeg.cmake @@ -0,0 +1,66 @@ +# --- FFMPEG --- +if(NOT HAVE_FFMPEG AND OPENCV_FFMPEG_USE_FIND_PACKAGE) + if(OPENCV_FFMPEG_USE_FIND_PACKAGE STREQUAL "1" OR OPENCV_FFMPEG_USE_FIND_PACKAGE STREQUAL "ON") + set(OPENCV_FFMPEG_USE_FIND_PACKAGE "FFMPEG") + endif() + find_package(${OPENCV_FFMPEG_USE_FIND_PACKAGE}) # Required components: AVCODEC AVFORMAT AVUTIL SWSCALE + if(FFMPEG_FOUND OR FFmpeg_FOUND) + set(HAVE_FFMPEG TRUE) + endif() +endif() + +if(NOT HAVE_FFMPEG AND WIN32 AND NOT ARM AND NOT OPENCV_FFMPEG_SKIP_DOWNLOAD) + include("${OpenCV_SOURCE_DIR}/3rdparty/ffmpeg/ffmpeg.cmake") + download_win_ffmpeg(FFMPEG_CMAKE_SCRIPT) + if(FFMPEG_CMAKE_SCRIPT) + include("${FFMPEG_CMAKE_SCRIPT}") + set(FFMPEG_libavcodec_VERSION ${FFMPEG_libavcodec_VERSION} PARENT_SCOPE) # info + set(FFMPEG_libavformat_VERSION ${FFMPEG_libavformat_VERSION} PARENT_SCOPE) # info + set(FFMPEG_libavutil_VERSION ${FFMPEG_libavutil_VERSION} PARENT_SCOPE) # info + set(FFMPEG_libswscale_VERSION ${FFMPEG_libswscale_VERSION} PARENT_SCOPE) # info + set(FFMPEG_libavresample_VERSION ${FFMPEG_libavresample_VERSION} PARENT_SCOPE) # info + set(HAVE_FFMPEG TRUE) + set(HAVE_FFMPEG_WRAPPER TRUE) + endif() +endif() + +if(NOT HAVE_FFMPEG AND PKG_CONFIG_FOUND) + pkg_check_modules(FFMPEG libavcodec libavformat libavutil libswscale QUIET) + pkg_check_modules(FFMPEG_libavresample libavresample QUIET) # optional + if(FFMPEG_FOUND) + if(FFMPEG_libavresample_FOUND) + list(APPEND FFMPEG_LIBRARIES ${FFMPEG_libavresample_LIBRARIES}) + endif() + set(HAVE_FFMPEG TRUE) + endif() +endif() + +#================================== + +if(HAVE_FFMPEG AND NOT HAVE_FFMPEG_WRAPPER) + try_compile(__VALID_FFMPEG + "${OpenCV_BINARY_DIR}" + "${OpenCV_SOURCE_DIR}/cmake/checks/ffmpeg_test.cpp" + CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${FFMPEG_INCLUDE_DIRS}" + "-DLINK_DIRECTORIES:STRING=${FFMPEG_LIBRARY_DIRS}" + "-DLINK_LIBRARIES:STRING=${FFMPEG_LIBRARIES}" + OUTPUT_VARIABLE TRY_OUT + ) + if(NOT __VALID_FFMPEG) + # message(FATAL_ERROR "FFMPEG: test check build log:\n${TRY_OUT}") + message(STATUS "WARNING: Can't build ffmpeg test code") + set(HAVE_FFMPEG FALSE) + endif() +endif() + +#================================== + +if(HAVE_FFMPEG) + set(defs "HAVE_FFMPEG") + if(HAVE_FFMPEG_WRAPPER) + list(APPEND defs "HAVE_FFMPEG_WRAPPER") + endif() + ocv_add_external_target(ffmpeg "${FFMPEG_INCLUDE_DIRS}" "${FFMPEG_LIBRARIES}" "${defs}") +endif() + +set(HAVE_FFMPEG ${HAVE_FFMPEG} PARENT_SCOPE) diff --git a/modules/videoio/cmake/detect_gphoto.cmake b/modules/videoio/cmake/detect_gphoto.cmake new file mode 100644 index 0000000000..3837bef92e --- /dev/null +++ b/modules/videoio/cmake/detect_gphoto.cmake @@ -0,0 +1,13 @@ +# --- gPhoto2 --- +if(NOT HAVE_GPHOTO2 AND PKG_CONFIG_FOUND) + pkg_check_modules(GPHOTO2 libgphoto2 QUIET) + if(GPHOTO2_FOUND) + set(HAVE_GPHOTO2 TRUE) + endif() +endif() + +if(HAVE_GPHOTO2) + ocv_add_external_target(gphoto2 "${GPHOTO2_INCLUDE_DIRS}" "${GPHOTO2_LIBRARIES}" "HAVE_GPHOTO2") +endif() + +set(HAVE_GPHOTO2 ${HAVE_GPHOTO2} PARENT_SCOPE) diff --git a/modules/videoio/cmake/detect_gstreamer.cmake b/modules/videoio/cmake/detect_gstreamer.cmake new file mode 100644 index 0000000000..1d7abaf1f2 --- /dev/null +++ b/modules/videoio/cmake/detect_gstreamer.cmake @@ -0,0 +1,101 @@ +# --- GStreamer --- +if(NOT HAVE_GSTREAMER AND WIN32) + set(env_paths "${GSTREAMER_DIR}" ENV GSTREAMER_ROOT) + if(X86_64) + list(APPEND env_paths ENV GSTREAMER_1_0_ROOT_X86_64 ENV GSTREAMER_ROOT_X86_64) + else() + list(APPEND env_paths ENV GSTREAMER_1_0_ROOT_X86 ENV GSTREAMER_ROOT_X86) + endif() + + find_path(GSTREAMER_gst_INCLUDE_DIR + gst/gst.h + PATHS ${env_paths} + PATH_SUFFIXES "include/gstreamer-1.0") + find_path(GSTREAMER_glib_INCLUDE_DIR + glib.h + PATHS ${env_paths} + PATH_SUFFIXES "include/glib-2.0") + find_path(GSTREAMER_glibconfig_INCLUDE_DIR + glibconfig.h + PATHS ${env_paths} + PATH_SUFFIXES "lib/glib-2.0/include") + + find_library(GSTREAMER_gstreamer_LIBRARY + NAMES gstreamer gstreamer-1.0 + PATHS ${env_paths} + PATH_SUFFIXES "lib") + find_library(GSTREAMER_app_LIBRARY + NAMES gstapp gstapp-1.0 + PATHS ${env_paths} + PATH_SUFFIXES "lib") + find_library(GSTREAMER_base_LIBRARY + NAMES gstbase gstbase-1.0 + PATHS ${env_paths} + PATH_SUFFIXES "lib") + find_library(GSTREAMER_pbutils_LIBRARY + NAMES gstpbutils gstpbutils-1.0 + PATHS ${env_paths} + PATH_SUFFIXES "lib") + find_library(GSTREAMER_riff_LIBRARY + NAMES gstriff gstriff-1.0 + PATHS ${env_paths} + PATH_SUFFIXES "lib") + + find_library(GSTREAMER_glib_LIBRARY + NAMES glib-2.0 + PATHS ${env_paths} + PATH_SUFFIXES "lib") + find_library(GSTREAMER_gobject_LIBRARY + NAMES gobject-2.0 + PATHS ${env_paths} + PATH_SUFFIXES "lib") + + if(GSTREAMER_gst_INCLUDE_DIR + AND GSTREAMER_glib_INCLUDE_DIR + AND GSTREAMER_glibconfig_INCLUDE_DIR + AND GSTREAMER_gstreamer_LIBRARY + AND GSTREAMER_app_LIBRARY + AND GSTREAMER_base_LIBRARY + AND GSTREAMER_pbutils_LIBRARY + AND GSTREAMER_riff_LIBRARY + AND GSTREAMER_glib_LIBRARY + AND GSTREAMER_gobject_LIBRARY) + file(STRINGS "${GSTREAMER_gst_INCLUDE_DIR}/gst/gstversion.h" ver_strings REGEX "#define +GST_VERSION_(MAJOR|MINOR|MICRO|NANO).*") + string(REGEX REPLACE ".*GST_VERSION_MAJOR[^0-9]+([0-9]+).*" "\\1" ver_major "${ver_strings}") + string(REGEX REPLACE ".*GST_VERSION_MINOR[^0-9]+([0-9]+).*" "\\1" ver_minor "${ver_strings}") + string(REGEX REPLACE ".*GST_VERSION_MICRO[^0-9]+([0-9]+).*" "\\1" ver_micro "${ver_strings}") + set(GSTREAMER_VERSION "${ver_major}.${ver_minor}.${ver_micro}" PARENT_SCOPE) # informational + set(HAVE_GSTREAMER TRUE) + set(GSTREAMER_LIBRARIES + ${GSTREAMER_gstreamer_LIBRARY} + ${GSTREAMER_base_LIBRARY} + ${GSTREAMER_app_LIBRARY} + ${GSTREAMER_riff_LIBRARY} + ${GSTREAMER_pbutils_LIBRARY} + ${GSTREAMER_glib_LIBRARY} + ${GSTREAMER_gobject_LIBRARY}) + set(GSTREAMER_INCLUDE_DIRS + ${GSTREAMER_gst_INCLUDE_DIR} + ${GSTREAMER_glib_INCLUDE_DIR} + ${GSTREAMER_glibconfig_INCLUDE_DIR}) + endif() +endif() + +if(NOT HAVE_GSTREAMER AND PKG_CONFIG_FOUND) + pkg_check_modules(GSTREAMER_base gstreamer-base-1.0 QUIET) + pkg_check_modules(GSTREAMER_app gstreamer-app-1.0 QUIET) + pkg_check_modules(GSTREAMER_riff gstreamer-riff-1.0 QUIET) + pkg_check_modules(GSTREAMER_pbutils gstreamer-pbutils-1.0 QUIET) + if(GSTREAMER_base_FOUND AND GSTREAMER_app_FOUND AND GSTREAMER_riff_FOUND AND GSTREAMER_pbutils_FOUND) + set(HAVE_GSTREAMER TRUE) + set(GSTREAMER_VERSION ${GSTREAMER_base_VERSION} PARENT_SCOPE) # informational + set(GSTREAMER_LIBRARIES ${GSTREAMER_base_LIBRARIES} ${GSTREAMER_app_LIBRARIES} ${GSTREAMER_riff_LIBRARIES} ${GSTREAMER_pbutils_LIBRARIES}) + set(GSTREAMER_INCLUDE_DIRS ${GSTREAMER_base_INCLUDE_DIRS} ${GSTREAMER_app_INCLUDE_DIRS} ${GSTREAMER_riff_INCLUDE_DIRS} ${GSTREAMER_pbutils_INCLUDE_DIRS}) + endif() +endif() + +if(HAVE_GSTREAMER) + ocv_add_external_target(gstreamer "${GSTREAMER_INCLUDE_DIRS}" "${GSTREAMER_LIBRARIES}" "HAVE_GSTREAMER") +endif() + +set(HAVE_GSTREAMER ${HAVE_GSTREAMER} PARENT_SCOPE) diff --git a/modules/videoio/cmake/detect_ios.cmake b/modules/videoio/cmake/detect_ios.cmake new file mode 100644 index 0000000000..c75426060b --- /dev/null +++ b/modules/videoio/cmake/detect_ios.cmake @@ -0,0 +1,15 @@ +if(APPLE AND IOS) + set(HAVE_CAP_IOS TRUE) + set(libs + "-framework Accelerate" + "-framework AVFoundation" + "-framework CoreGraphics" + "-framework CoreImage" + "-framework CoreMedia" + "-framework CoreVideo" + "-framework QuartzCore" + "-framework UIKit") + ocv_add_external_target(cap_ios "" "${libs}" "HAVE_CAP_IOS") +endif() + +set(HAVE_CAP_IOS ${HAVE_CAP_IOS} PARENT_SCOPE) diff --git a/modules/videoio/cmake/detect_msdk.cmake b/modules/videoio/cmake/detect_msdk.cmake new file mode 100644 index 0000000000..79b0599f1a --- /dev/null +++ b/modules/videoio/cmake/detect_msdk.cmake @@ -0,0 +1,47 @@ +if(NOT HAVE_MFX) + set(paths "${MFX_HOME}" ENV "MFX_HOME" ENV "INTELMEDIASDKROOT") + if(MSVC) + if(MSVC_VERSION LESS 1900) + set(vs_suffix) + else() + set(vs_suffix "_vs2015") + endif() + if(X86_64) + set(vs_arch "x64") + else() + set(vs_arch "win32") + endif() + endif() + find_path(MFX_INCLUDE mfxdefs.h + PATHS ${paths} + PATH_SUFFIXES "include" + NO_DEFAULT_PATH) + find_library(MFX_LIBRARY mfx libmfx${vs_suffix} + PATHS ${paths} + PATH_SUFFIXES "lib64" "lib/lin_x64" "lib/${vs_arch}" + NO_DEFAULT_PATH) + if(MFX_INCLUDE AND MFX_LIBRARY) + set(HAVE_MFX TRUE) + set(MFX_INCLUDE_DIRS "${MFX_INCLUDE}") + set(MFX_LIBRARIES "${MFX_LIBRARY}") + endif() +endif() + +if(HAVE_MFX AND UNIX) + find_path(MFX_va_INCLUDE va/va.h PATHS ${paths} PATH_SUFFIXES "include") + find_library(MFX_va_LIBRARY va PATHS ${paths} PATH_SUFFIXES "lib64" "lib/lin_x64") + find_library(MFX_va_drm_LIBRARY va-drm PATHS ${paths} PATH_SUFFIXES "lib64" "lib/lin_x64") + if(MFX_va_INCLUDE AND MFX_va_LIBRARY AND MFX_va_drm_LIBRARY) + list(APPEND MFX_INCLUDE_DIRS "${MFX_va_INCLUDE}") + list(APPEND MFX_LIBRARIES "${MFX_va_LIBRARY}" "${MFX_va_drm_LIBRARY}") + # list(APPEND MFX_LIBRARIES "-Wl,--exclude-libs=libmfx") + else() + set(HAVE_MFX FALSE) + endif() +endif() + +if(HAVE_MFX) + ocv_add_external_target(mediasdk "${MFX_INCLUDE_DIRS}" "${MFX_LIBRARIES}" "HAVE_MFX") +endif() + +set(HAVE_MFX ${HAVE_MFX} PARENT_SCOPE) diff --git a/modules/videoio/cmake/detect_msmf.cmake b/modules/videoio/cmake/detect_msmf.cmake new file mode 100644 index 0000000000..a1c91dab67 --- /dev/null +++ b/modules/videoio/cmake/detect_msmf.cmake @@ -0,0 +1,25 @@ +# --- VideoInput/Microsoft Media Foundation --- +if(NOT HAVE_MSMF) + check_include_file(mfapi.h HAVE_MFAPI) + if(HAVE_MFAPI) + set(HAVE_MSMF TRUE) + endif() +endif() + +if(HAVE_MSMF) + if(WITH_MSMF_DXVA) + check_include_file(d3d11.h HAVE_D3D11) + check_include_file(d3d11_4.h HAVE_D3D11_4) + if(HAVE_D3D11 AND HAVE_D3D11_4) + set(HAVE_MSMF_DXVA TRUE) + endif() + endif() + set(defs "HAVE_MSMF") + if(HAVE_MSMF_DXVA) + list(APPEND defs "HAVE_MSMF_DXVA") + endif() + ocv_add_external_target(msmf "" "" "${defs}") +endif() + +set(HAVE_MSMF ${HAVE_MSMF} PARENT_SCOPE) +set(HAVE_MSMF_DXVA ${HAVE_MSMF_DXVA} PARENT_SCOPE) diff --git a/modules/videoio/cmake/detect_openni2.cmake b/modules/videoio/cmake/detect_openni2.cmake new file mode 100644 index 0000000000..76c31454da --- /dev/null +++ b/modules/videoio/cmake/detect_openni2.cmake @@ -0,0 +1,49 @@ +# --- OpenNI2 --- + +if(NOT HAVE_OPENNI2) + set(paths "${OPENNI2_DIR}") + if(MSVC AND X86_64) + list(APPEND paths ENV OPENNI2_INCLUDE64 ENV OPENNI2_LIB64 ENV OPENNI2_REDIST64) + else() + list(APPEND paths ENV OPENNI2_INCLUDE ENV OPENNI2_LIB ENV OPENNI2_REDIST) + endif() + + # From SDK + find_path(OPENNI2_INCLUDE "OpenNI.h" + PATHS ${paths} + PATH_SUFFIXES "Include" + NO_DEFAULT_PATH) + find_library(OPENNI2_LIBRARY "OpenNI2" + PATHS ${paths} + PATH_SUFFIXES "Redist" "Lib" + NO_DEFAULT_PATH) + + if(OPENNI2_LIBRARY AND OPENNI2_INCLUDE) + set(HAVE_OPENNI2 TRUE) + set(OPENNI2_INCLUDE_DIRS "${OPENNI2_INCLUDE}") + set(OPENNI2_LIBRARIES "${OPENNI2_LIBRARY}") + endif() +endif() + +if(NOT HAVE_OPENNI2) + # From system + find_path(OPENNI2_SYS_INCLUDE "OpenNI.h" PATH_SUFFIXES "openni2" "ni2") + find_library(OPENNI2_SYS_LIBRARY "OpenNI2") + + if(OPENNI2_SYS_LIBRARY AND OPENNI2_SYS_INCLUDE) + set(HAVE_OPENNI2 TRUE) + set(OPENNI2_INCLUDE_DIRS "${OPENNI2_SYS_INCLUDE}") + set(OPENNI2_LIBRARIES "${OPENNI2_SYS_LIBRARY}") + endif() +endif() + +if(HAVE_OPENNI2) + file(STRINGS "${OPENNI2_INCLUDE_DIRS}/OniVersion.h" ver_strings REGEX "#define +ONI_VERSION_(MAJOR|MINOR|MAINTENANCE|BUILD).*") + string(REGEX REPLACE ".*ONI_VERSION_MAJOR[^0-9]+([0-9]+).*" "\\1" ver_major "${ver_strings}") + string(REGEX REPLACE ".*ONI_VERSION_MINOR[^0-9]+([0-9]+).*" "\\1" ver_minor "${ver_strings}") + string(REGEX REPLACE ".*ONI_VERSION_MAINTENANCE[^0-9]+([0-9]+).*" "\\1" ver_maint "${ver_strings}") + set(OPENNI2_VERSION "${ver_major}.${ver_minor}.${ver_maint}" PARENT_SCOPE) # informational + ocv_add_external_target(openni2 "${OPENNI2_INCLUDE_DIRS}" "${OPENNI2_LIBRARIES}" "HAVE_OPENNI2") +endif() + +set(HAVE_OPENNI2 ${HAVE_OPENNI2} PARENT_SCOPE) diff --git a/modules/videoio/cmake/detect_pvapi.cmake b/modules/videoio/cmake/detect_pvapi.cmake new file mode 100644 index 0000000000..a0f4673fdc --- /dev/null +++ b/modules/videoio/cmake/detect_pvapi.cmake @@ -0,0 +1,23 @@ +# --- PvApi --- +if(NOT HAVE_PVAPI) + if(X86_64) + set(arch x64) + else() + set(arch x86) + endif() + find_path(PVAPI_INCLUDE "PvApi.h" + PATHS "${PVAPI_ROOT}" ENV PVAPI_ROOT + PATH_SUFFIXES "inc-pc") + find_library(PVAPI_LIBRARY "PvAPI" + PATHS "${PVAPI_ROOT}" ENV PVAPI_ROOT + PATH_SUFFIXES "bin-pc/${arch}/${gcc}") + if(PVAPI_INCLUDE AND PVAPI_LIBRARY) + set(HAVE_PVAPI TRUE) + endif() +endif() + +if(HAVE_PVAPI) + ocv_add_external_target(pvapi "${PVAPI_INCLUDE}" "${PVAPI_LIBRARY}" "HAVE_PVAPI") +endif() + +set(HAVE_PVAPI ${HAVE_PVAPI} PARENT_SCOPE) diff --git a/modules/videoio/cmake/detect_realsense.cmake b/modules/videoio/cmake/detect_realsense.cmake new file mode 100644 index 0000000000..32e5e02c9e --- /dev/null +++ b/modules/videoio/cmake/detect_realsense.cmake @@ -0,0 +1,28 @@ +# --- Intel librealsense --- + +if(NOT HAVE_LIBREALSENSE) + find_package(realsense2 QUIET) + if(realsense2_FOUND) + set(HAVE_LIBREALSENSE TRUE) + set(LIBREALSENSE_VERSION "${realsense2_VERSION}" PARENT_SCOPE) # informational + ocv_add_external_target(librealsense "" "${realsense2_LIBRARY}" "HAVE_LIBREALSENSE") + endif() +endif() + +if(NOT HAVE_LIBREALSENSE) + find_path(LIBREALSENSE_INCLUDE_DIR "librealsense2/rs.hpp" + PATHS "${LIBREALSENSE_INCLUDE}" ENV LIBREALSENSE_INCLUDE) + find_library(LIBREALSENSE_LIBRARIES "realsense2" + PATHS "${LIBREALSENSE_LIB}" ENV LIBREALSENSE_LIB) + if(LIBREALSENSE_INCLUDE_DIR AND LIBREALSENSE_LIBRARIES) + set(HAVE_LIBREALSENSE TRUE) + file(STRINGS "${LIBREALSENSE_INCLUDE_DIR}/librealsense2/rs.h" ver_strings REGEX "#define +RS2_API_(MAJOR|MINOR|PATCH|BUILD)_VERSION.*") + string(REGEX REPLACE ".*RS2_API_MAJOR_VERSION[^0-9]+([0-9]+).*" "\\1" ver_major "${ver_strings}") + string(REGEX REPLACE ".*RS2_API_MINOR_VERSION[^0-9]+([0-9]+).*" "\\1" ver_minor "${ver_strings}") + string(REGEX REPLACE ".*RS2_API_PATCH_VERSION[^0-9]+([0-9]+).*" "\\1" ver_patch "${ver_strings}") + set(LIBREALSENSE_VERSION "${ver_major}.${ver_minor}.${ver_patch}" PARENT_SCOPE) # informational + ocv_add_external_target(librealsense "${LIBREALSENSE_INCLUDE_DIR}" "${LIBREALSENSE_LIBRARIES}" "HAVE_LIBREALSENSE") + endif() +endif() + +set(HAVE_LIBREALSENSE ${HAVE_LIBREALSENSE} PARENT_SCOPE) diff --git a/modules/videoio/cmake/detect_v4l.cmake b/modules/videoio/cmake/detect_v4l.cmake new file mode 100644 index 0000000000..05b73b003c --- /dev/null +++ b/modules/videoio/cmake/detect_v4l.cmake @@ -0,0 +1,19 @@ +# --- V4L --- +if(NOT HAVE_V4L) + set(CMAKE_REQUIRED_QUIET TRUE) # for check_include_file + check_include_file(linux/videodev2.h HAVE_CAMV4L2) + check_include_file(sys/videoio.h HAVE_VIDEOIO) + if(HAVE_CAMV4L2 OR HAVE_VIDEOIO) + set(HAVE_V4L TRUE) + set(defs) + if(HAVE_CAMV4L2) + list(APPEND defs "HAVE_CAMV4L2") + endif() + if(HAVE_VIDEOIO) + list(APPEND defs "HAVE_VIDEOIO") + endif() + ocv_add_external_target(v4l "" "" "${defs}") + endif() +endif() + +set(HAVE_V4L ${HAVE_V4L} PARENT_SCOPE) diff --git a/modules/videoio/cmake/detect_ximea.cmake b/modules/videoio/cmake/detect_ximea.cmake new file mode 100644 index 0000000000..5e0521b599 --- /dev/null +++ b/modules/videoio/cmake/detect_ximea.cmake @@ -0,0 +1,29 @@ +if(NOT HAVE_XIMEA) + if(WIN32) + get_filename_component(regpath "[HKEY_CURRENT_USER\\Software\\XIMEA\\CamSupport\\API;Path]" ABSOLUTE) + endif() + if(X86_64) + set(lib_dir "x64") + set(lib_suffix "64") + else() + set(lib_dir "x86") + set(lib_suffix "32") + endif() + find_path(XIMEA_INCLUDE "xiApi.h" + PATHS "${XIMEA_ROOT}" ENV XIMEA_ROOT "/opt/XIMEA" + HINTS "${regpath}" + PATH_SUFFIXES "include" "API") + find_library(XIMEA_LIBRARY m3api xiapi${lib_suffix} + PATHS "${XIMEA_ROOT}" ENV XIMEA_ROOT "/opt/XIMEA" + HINTS "${regpath}" + PATH_SUFFIXES "API/${lib_dir}") + if(XIMEA_INCLUDE AND XIMEA_LIBRARY) + set(HAVE_XIMEA TRUE) + endif() +endif() + +if(HAVE_XIMEA) + ocv_add_external_target(ximea "${XIMEA_INCLUDE}" "${XIMEA_LIBRARY}" "HAVE_XIMEA") +endif() + +set(HAVE_XIMEA ${HAVE_XIMEA} PARENT_SCOPE) diff --git a/modules/videoio/cmake/detect_xine.cmake b/modules/videoio/cmake/detect_xine.cmake new file mode 100644 index 0000000000..14d61a4b3f --- /dev/null +++ b/modules/videoio/cmake/detect_xine.cmake @@ -0,0 +1,12 @@ +if(NOT HAVE_XINE AND PKG_CONFIG_FOUND) + pkg_check_modules(XINE libxine QUIET) + if(XINE_FOUND) + set(HAVE_XINE TRUE) + endif() +endif() + +if(HAVE_XINE) + ocv_add_external_target(xine "${XINE_INCLUDE_DIRS}" "${XINE_LIBRARIES}" "HAVE_XINE") +endif() + +set(HAVE_XINE ${HAVE_XINE} PARENT_SCOPE) diff --git a/modules/videoio/cmake/init.cmake b/modules/videoio/cmake/init.cmake new file mode 100644 index 0000000000..5e71c2e724 --- /dev/null +++ b/modules/videoio/cmake/init.cmake @@ -0,0 +1,40 @@ +macro(add_backend backend_id cond_var) + if(${cond_var}) + include("${CMAKE_CURRENT_LIST_DIR}/detect_${backend_id}.cmake") + endif() +endmacro() + +function(ocv_add_external_target name inc link def) + if(BUILD_SHARED_LIBS) + set(imp IMPORTED) + endif() + add_library(ocv.3rdparty.${name} INTERFACE ${imp}) + set_target_properties(ocv.3rdparty.${name} PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${inc}" + INTERFACE_SYSTEM_INCLUDE_DIRECTORIES "${inc}" + INTERFACE_LINK_LIBRARIES "${link}" + INTERFACE_COMPILE_DEFINITIONS "${def}") + if(NOT BUILD_SHARED_LIBS) + install(TARGETS ocv.3rdparty.${name} EXPORT OpenCVModules) + endif() +endfunction() + +add_backend("ffmpeg" WITH_FFMPEG) +add_backend("gstreamer" WITH_GSTREAMER) +add_backend("v4l" WITH_V4L) + +add_backend("aravis" WITH_ARAVIS) +add_backend("dc1394" WITH_1394) +add_backend("gphoto" WITH_GPHOTO2) +add_backend("msdk" WITH_MFX) +add_backend("openni2" WITH_OPENNI2) +add_backend("pvapi" WITH_PVAPI) +add_backend("realsense" WITH_LIBREALSENSE) +add_backend("ximea" WITH_XIMEA) +add_backend("xine" WITH_XINE) + +add_backend("avfoundation" WITH_AVFOUNDATION) +add_backend("ios" WITH_CAP_IOS) + +add_backend("dshow" WITH_DSHOW) +add_backend("msmf" WITH_MSMF) diff --git a/modules/videoio/include/opencv2/videoio.hpp b/modules/videoio/include/opencv2/videoio.hpp index 408f9e8f87..d31985b56b 100644 --- a/modules/videoio/include/opencv2/videoio.hpp +++ b/modules/videoio/include/opencv2/videoio.hpp @@ -108,7 +108,8 @@ enum VideoCaptureAPIs { CAP_GIGANETIX = 1300, //!< Smartek Giganetix GigEVisionSDK CAP_MSMF = 1400, //!< Microsoft Media Foundation (via videoInput) CAP_WINRT = 1410, //!< Microsoft Windows Runtime using Media Foundation - CAP_INTELPERC = 1500, //!< Intel Perceptual Computing SDK + CAP_INTELPERC = 1500, //!< RealSense (former Intel Perceptual Computing SDK) + CAP_REALSENSE = 1500, //!< Synonym for CAP_INTELPERC CAP_OPENNI2 = 1600, //!< OpenNI2 (for Kinect) CAP_OPENNI2_ASUS = 1610, //!< OpenNI2 (for Asus Xtion and Occipital Structure sensors) CAP_GPHOTO2 = 1700, //!< gPhoto2 connection diff --git a/modules/videoio/perf/perf_input.cpp b/modules/videoio/perf/perf_input.cpp index 0efc517277..27fa11165e 100644 --- a/modules/videoio/perf/perf_input.cpp +++ b/modules/videoio/perf/perf_input.cpp @@ -3,8 +3,6 @@ // of this distribution and at http://opencv.org/license.html #include "perf_precomp.hpp" -#ifdef HAVE_VIDEO_INPUT - namespace opencv_test { using namespace perf; @@ -35,5 +33,3 @@ PERF_TEST_P(VideoCapture_Reading, ReadFile, testing::ValuesIn(bunny_files) ) } } // namespace - -#endif // HAVE_VIDEO_INPUT diff --git a/modules/videoio/perf/perf_output.cpp b/modules/videoio/perf/perf_output.cpp index f379cacb83..6f871dc510 100644 --- a/modules/videoio/perf/perf_output.cpp +++ b/modules/videoio/perf/perf_output.cpp @@ -3,8 +3,6 @@ // of this distribution and at http://opencv.org/license.html #include "perf_precomp.hpp" -#ifdef HAVE_VIDEO_OUTPUT - namespace opencv_test { using namespace perf; @@ -38,11 +36,12 @@ PERF_TEST_P(VideoWriter_Writing, WriteFrame, #endif VideoWriter writer(outfile, fourcc, 25, cv::Size(image.cols, image.rows), isColor); + if (!writer.isOpened()) + throw SkipTestException("Video file can not be opened"); + TEST_CYCLE_N(100) { writer << image; } SANITY_CHECK_NOTHING(); remove(outfile.c_str()); } } // namespace - -#endif // HAVE_VIDEO_OUTPUT diff --git a/modules/videoio/src/cap_avfoundation.mm b/modules/videoio/src/cap_avfoundation.mm index 9f76fbb149..372140ed35 100644 --- a/modules/videoio/src/cap_avfoundation.mm +++ b/modules/videoio/src/cap_avfoundation.mm @@ -29,13 +29,15 @@ * */ + #pragma clang diagnostic push + #pragma clang diagnostic ignored "-Wdeprecated-declarations" + #include "precomp.hpp" #include "opencv2/imgproc.hpp" #include #import #import - /********************** Declaration of class headers ************************/ /***************************************************************************** @@ -1330,3 +1332,5 @@ bool CvVideoWriter_AVFoundation::writeFrame(const IplImage* iplimage) { } } + +#pragma clang diagnostic pop diff --git a/modules/videoio/src/cap_avfoundation_mac.mm b/modules/videoio/src/cap_avfoundation_mac.mm index 6886ff6d3d..465590b7b8 100644 --- a/modules/videoio/src/cap_avfoundation_mac.mm +++ b/modules/videoio/src/cap_avfoundation_mac.mm @@ -39,6 +39,8 @@ // //M*//////////////////////////////////////////////////////////////////////////////////////// +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" #include "precomp.hpp" #include "opencv2/imgproc.hpp" @@ -1333,3 +1335,5 @@ bool CvVideoWriter_AVFoundation::writeFrame(const IplImage* iplimage) { } } + +#pragma clang diagnostic pop diff --git a/modules/videoio/src/cap_dc1394.cpp b/modules/videoio/src/cap_dc1394.cpp deleted file mode 100644 index 4da41984df..0000000000 --- a/modules/videoio/src/cap_dc1394.cpp +++ /dev/null @@ -1,1113 +0,0 @@ -/* This is the contributed code: -Firewire and video4linux camera support for videoio - -2003-03-12 Magnus Lundin -lundin@mlu.mine.nu - -THIS EXEPERIMENTAL CODE -Tested on 2.4.19 with 1394, video1394, v4l, dc1394 and raw1394 support - -This set of files adds support for firevre and usb cameras. -First it tries to install a firewire camera, -if that fails it tries a v4l/USB camera - -It has been tested with the motempl sample program - -INSTALLATION -Install OpenCV -Install v4l -Install dc1394 raw1394 - coriander should work with your camera - Backup videoio folder - Copy new files - cd into videoio folder - make clean (cvcap.cpp must be rebuilt) - make - make install - - -The build is controlled by the following entries in the videoio Makefile: - -libvideoio_la_LIBADD = -L/usr/X11R6/lib -lXm -lMrm -lUil -lpng -ljpeg -lz -ltiff -lavcodec -lraw1394 -ldc1394_control -DEFS = -DHAVE_CONFIG_H -DHAVE_DC1394 - - -Now it should be possible to use videoio camera functions, works for me. - - -THINGS TO DO -Better ways to select 1394 or v4l camera -Better support for videosize -Format7 - -Comments and changes welcome -/Magnus - -2005-10-19 Roman Stanchak -rstanchak@yahoo.com - -Support added for setting MODE and other DC1394 properties. Also added CONVERT_RGB flag -which indicates whether or not color conversion is performed in cvRetrieveFrame. The default -for CONVERT_RGB=1 for backward compatibility. - -Tested with 2.6.12 with libdc1394-1.0.0, libraw1394-0.10.1 using a Point Grey Flea - -*/ - - -/*M/////////////////////////////////////////////////////////////////////////////////////// -// -// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. -// -// By downloading, copying, installing or using the software you agree to this license. -// If you do not agree to this license, do not download, install, -// copy or use the software. -// -// -// Intel License Agreement -// For Open Source Computer Vision Library -// -// Copyright (C) 2000, Intel Corporation, all rights reserved. -// Third party copyrights are property of their respective owners. -// -// Redistribution and use in source and binary forms, with or without modification, -// are permitted provided that the following conditions are met: -// -// * Redistribution's of source code must retain the above copyright notice, -// this list of conditions and the following disclaimer. -// -// * Redistribution's in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// * The name of Intel Corporation may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// -// This software is provided by the copyright holders and contributors "as is" and -// any express or implied warranties, including, but not limited to, the implied -// warranties of merchantability and fitness for a particular purpose are disclaimed. -// In no event shall the Intel Corporation or contributors be liable for any direct, -// indirect, incidental, special, exemplary, or consequential damages -// (including, but not limited to, procurement of substitute goods or services; -// loss of use, data, or profits; or business interruption) however caused -// and on any theory of liability, whether in contract, strict liability, -// or tort (including negligence or otherwise) arising in any way out of -// the use of this software, even if advised of the possibility of such damage. -// -//M*/ - -#include "precomp.hpp" - -#if !defined _WIN32 && defined HAVE_DC1394 - -#include -#include -#include -#include - -#ifdef NDEBUG -#define CV_WARN(message) -#else -#define CV_WARN(message) fprintf(stderr, "warning: %s (%s:%d)\n", message, __FILE__, __LINE__) -#endif - -#define CV_DC1394_CALL(expr) \ -if((expr)<0){ \ - OPENCV_ERROR(CV_StsInternal, "", "libdc1394 function call returned < 0"); \ -} - -#define DELAY 50000 - -// bpp for 16-bits cameras... this value works for PtGrey DragonFly... -#define MONO16_BPP 8 - -/* should be in pixelformat */ -static void uyv2bgr(const unsigned char *src, unsigned char *dest, unsigned long long int NumPixels); -static void uyvy2bgr(const unsigned char *src, unsigned char *dest, unsigned long long int NumPixels); -static void uyyvyy2bgr(const unsigned char *src, unsigned char *dest, unsigned long long int NumPixels); -static void y2bgr(const unsigned char *src, unsigned char *dest, unsigned long long int NumPixels); -static void y162bgr(const unsigned char *src, unsigned char *dest, unsigned long long int NumPixels, int bits); -static void rgb482bgr(const unsigned char *src8, unsigned char *dest, unsigned long long int NumPixels, int bits); - -static const char * videodev[4]={ - "/dev/video1394/0", - "/dev/video1394/1", - "/dev/video1394/2", - "/dev/video1394/3" -}; - -typedef struct CvCaptureCAM_DC1394 -{ - raw1394handle_t handle; - nodeid_t camera_node; - dc1394_cameracapture* camera; - int format; - int mode; - int color_mode; - int frame_rate; - const char * device_name; - IplImage frame; - int convert; - int buffer_is_writeable; // indicates whether frame.imageData is allocated by OpenCV or DC1394 -} -CvCaptureCAM_DC1394; - -static void icvCloseCAM_DC1394( CvCaptureCAM_DC1394* capture ); - -static int icvGrabFrameCAM_DC1394( CvCaptureCAM_DC1394* capture ); -static IplImage* icvRetrieveFrameCAM_DC1394( CvCaptureCAM_DC1394* capture, int ); - -static double icvGetPropertyCAM_DC1394( CvCaptureCAM_DC1394* capture, int property_id ); -static int icvSetPropertyCAM_DC1394( CvCaptureCAM_DC1394* capture, int property_id, double value ); - -// utility functions -static int icvFormatSupportedCAM_DC1394(int format, quadlet_t formats); -static int icvModeSupportedCAM_DC1394(int format, int mode, quadlet_t modes); -static int icvColorMode( int mode ); -static unsigned int icvGetBestFrameRate( CvCaptureCAM_DC1394 * capture, int format, int mode); -static int icvResizeFrame(CvCaptureCAM_DC1394 * capture); - -/*********************** Implementations ***************************************/ -#define MAX_PORTS 3 -#define MAX_CAMERAS 8 -#define NUM_BUFFERS 8 -struct raw1394_portinfo ports[MAX_PORTS]; -static raw1394handle_t handles[MAX_PORTS]; -static int camCount[MAX_PORTS]; -static int numPorts = -1; -static int numCameras = 0; -static nodeid_t *camera_nodes; -struct camnode {dc1394_cameracapture cam;int portnum;} cameras[MAX_CAMERAS]; - -static const int preferred_modes[] -= { - // uncomment the following line to test a particular mode: - //FORMAT_VGA_NONCOMPRESSED, MODE_640x480_MONO16, 0, - FORMAT_SVGA_NONCOMPRESSED_2, - MODE_1600x1200_RGB, MODE_1600x1200_YUV422, MODE_1280x960_RGB, MODE_1280x960_YUV422, - MODE_1600x1200_MONO, MODE_1280x960_MONO, MODE_1600x1200_MONO16, MODE_1280x960_MONO16, - FORMAT_SVGA_NONCOMPRESSED_1, - MODE_1024x768_RGB, MODE_1024x768_YUV422, MODE_800x600_RGB, MODE_800x600_YUV422, - MODE_1024x768_MONO, MODE_800x600_MONO, MODE_1024x768_MONO16, MODE_800x600_MONO16, - FORMAT_VGA_NONCOMPRESSED, - MODE_640x480_RGB, MODE_640x480_YUV422, MODE_640x480_YUV411, MODE_320x240_YUV422, - MODE_160x120_YUV444, MODE_640x480_MONO, MODE_640x480_MONO16, - FORMAT_SCALABLE_IMAGE_SIZE, - MODE_FORMAT7_0, MODE_FORMAT7_1, MODE_FORMAT7_2, MODE_FORMAT7_3, - MODE_FORMAT7_4, MODE_FORMAT7_5, MODE_FORMAT7_6, MODE_FORMAT7_7, - 0 -}; - -void icvInitCapture_DC1394(){ - int p; - - raw1394handle_t raw_handle = raw1394_new_handle(); - if( raw_handle == 0 ) { - numPorts = 0; - return; - } - numPorts = raw1394_get_port_info(raw_handle, ports, MAX_PORTS); - raw1394_destroy_handle(raw_handle); - for (p = 0; p < numPorts; p++) { - handles[p] = dc1394_create_handle(p); - if (handles[p]==NULL) { numPorts=-1; return; /*ERROR_CLEANUP_EXIT*/ } - - /* get the camera nodes and describe them as we find them */ - camera_nodes = dc1394_get_camera_nodes(handles[p], &camCount[p], 0); - for (int i=0;i=numCameras) - return 0; - if (index<0) - return 0; - - CvCaptureCAM_DC1394 * pcap = (CvCaptureCAM_DC1394*)cvAlloc(sizeof(*pcap)); - - /* Select a port and camera */ - pcap->device_name = videodev[cameras[index].portnum]; - pcap->handle = handles[cameras[index].portnum]; - pcap->camera = &cameras[index].cam; - - // get supported formats - if (dc1394_query_supported_formats(pcap->handle, pcap->camera->node, &formats)<0) { - fprintf(stderr,"%s:%d: Could not query supported formats\n",__FILE__,__LINE__); - formats=0x0; - } - for (i=0; i < NUM_FORMATS; i++) { - modes[i]=0; - if (icvFormatSupportedCAM_DC1394(i+FORMAT_MIN, formats)){ - if (dc1394_query_supported_modes(pcap->handle, pcap->camera->node, i+FORMAT_MIN, &modes[i])<0) { - fprintf(stderr,"%s:%d: Could not query Format%d modes\n",__FILE__,__LINE__,i); - } - } - } - - pcap->format = 0; - pcap->mode = 0; - pcap->color_mode = 0; - pcap->frame_rate = 0; - - int format_idx = -1; - - // scan the list of preferred modes, and find a supported one - for(i=0; (pcap->mode == 0) && (preferred_modes[i] != 0); i++) { - if((preferred_modes[i] >= FORMAT_MIN) && (preferred_modes[i] <= FORMAT_MAX)) { - pcap->format = preferred_modes[i]; - format_idx = preferred_modes[i] - FORMAT_MIN; - continue; - } - assert(format_idx != -1); - if ( ! icvFormatSupportedCAM_DC1394(pcap->format, formats) ) - continue; - if ( icvModeSupportedCAM_DC1394(pcap->format, preferred_modes[i], modes[format_idx]) ){ - pcap->mode = preferred_modes[i]; - } - } - if (pcap->mode == 0) { - fprintf(stderr,"%s:%d: Could not find a supported mode for this camera\n",__FILE__,__LINE__); - goto ERROR; - } - - pcap->color_mode = icvColorMode( pcap->mode ); - if( pcap->color_mode == -1){ - fprintf(stderr,"%s:%d: ERROR: BPP is Unsupported!!\n",__FILE__,__LINE__); - goto ERROR; - } - - // set frame rate to optimal value given format and mode - pcap->frame_rate = icvGetBestFrameRate(pcap, pcap->format, pcap->mode); - - if (pcap->format!=FORMAT_SCALABLE_IMAGE_SIZE) { // everything except Format 7 - if (dc1394_dma_setup_capture(pcap->handle, pcap->camera->node, index+1 /*channel*/, - pcap->format, pcap->mode, SPEED_400, - pcap->frame_rate, NUM_BUFFERS, 1 /*drop_frames*/, - pcap->device_name, pcap->camera) != DC1394_SUCCESS) { - fprintf(stderr,"%s:%d: Failed to setup DMA capture with VIDEO1394\n",__FILE__,__LINE__); - goto ERROR; - } - } - else { - if(dc1394_dma_setup_format7_capture(pcap->handle,pcap->camera->node,index+1 /*channel*/, - pcap->mode, SPEED_400, QUERY_FROM_CAMERA, - (unsigned int)QUERY_FROM_CAMERA, (unsigned int)QUERY_FROM_CAMERA, - (unsigned int)QUERY_FROM_CAMERA, (unsigned int)QUERY_FROM_CAMERA, - NUM_BUFFERS, 1 /*drop_frames*/, - pcap->device_name, pcap->camera) != DC1394_SUCCESS) { - fprintf(stderr,"%s:%d: Failed to setup DMA capture with VIDEO1394\n",__FILE__,__LINE__); - goto ERROR; - } - } - - if (dc1394_start_iso_transmission(pcap->handle, pcap->camera->node)!=DC1394_SUCCESS) { - fprintf(stderr,"%s:%d: Could not start ISO transmission\n",__FILE__,__LINE__); - goto ERROR; - } - - usleep(DELAY); - - dc1394bool_t status; - if (dc1394_get_iso_status(pcap->handle, pcap->camera->node, &status)!=DC1394_SUCCESS) { - fprintf(stderr,"%s:%d: Could get ISO status",__FILE__,__LINE__); - goto ERROR; - } - if (status==DC1394_FALSE) { - fprintf(stderr,"%s:%d: ISO transmission refuses to start",__FILE__,__LINE__); - goto ERROR; - } - - // convert camera image to RGB by default - pcap->convert=1; - - // no image data allocated yet - pcap->buffer_is_writeable = 0; - - memset(&(pcap->frame), 0, sizeof(IplImage)); - icvResizeFrame( pcap ); - return pcap; - -ERROR: - return 0; -}; - -static void icvCloseCAM_DC1394( CvCaptureCAM_DC1394* capture ){ - dc1394_stop_iso_transmission(capture->handle, capture->camera->node); - dc1394_dma_unlisten (capture->handle, capture->camera); - /* Deallocate space for RGBA data */ - if(capture->convert){ - cvFree(&capture->frame.imageData); - } -} - -static int icvGrabFrameCAM_DC1394( CvCaptureCAM_DC1394* capture ){ - // TODO: should this function wait until the next frame is available or return - // immediately ? - float waiting = 0; - do{ - int result = dc1394_dma_single_capture_poll(capture->camera); - if(result==DC1394_SUCCESS){ - return 1; - } - else if(result==DC1394_NO_FRAME){ - usleep(1000000/120); //sleep for at least a 1/2 of the frame rate - waiting += 1.0/120.0; - } - else{ - printf("dc1394_dma_single_capture_poll failed\n"); - return 0; - } - } while(waiting<2); - printf("dc1394_dma_single_capture_poll timed out\n"); - return 0; -} - -static IplImage* icvRetrieveFrameCAM_DC1394( CvCaptureCAM_DC1394* capture, int ){ - if(capture->camera->capture_buffer ) - { - if(capture->convert){ - /* Convert to RGBA */ - unsigned char * src = (unsigned char *)capture->camera->capture_buffer; - unsigned char * dst = (unsigned char *)capture->frame.imageData; - switch (capture->color_mode) { - case COLOR_FORMAT7_RGB8: - //printf("icvRetrieveFrame convert RGB to BGR\n"); - /* Convert RGB to BGR */ - for (int i=0;iframe.imageSize;i+=6) { - dst[i] = src[i+2]; - dst[i+1] = src[i+1]; - dst[i+2] = src[i]; - dst[i+3] = src[i+5]; - dst[i+4] = src[i+4]; - dst[i+5] = src[i+3]; - } - break; - case COLOR_FORMAT7_YUV422: - //printf("icvRetrieveFrame convert YUV422 to BGR %d\n"); - uyvy2bgr(src, - dst, - capture->camera->frame_width * capture->camera->frame_height); - break; - case COLOR_FORMAT7_MONO8: - //printf("icvRetrieveFrame convert MONO8 to BGR %d\n"); - y2bgr(src, - dst, - capture->camera->frame_width * capture->camera->frame_height); - break; - case COLOR_FORMAT7_YUV411: - //printf("icvRetrieveFrame convert YUV411 to BGR %d\n"); - uyyvyy2bgr(src, - dst, - capture->camera->frame_width * capture->camera->frame_height); - break; - case COLOR_FORMAT7_YUV444: - //printf("icvRetrieveFrame convert YUV444 to BGR %d\n"); - uyv2bgr(src, - dst, - capture->camera->frame_width * capture->camera->frame_height); - break; - case COLOR_FORMAT7_MONO16: - //printf("icvRetrieveFrame convert MONO16 to BGR %d\n"); - y162bgr(src, - dst, - capture->camera->frame_width * capture->camera->frame_height, MONO16_BPP); - break; - case COLOR_FORMAT7_RGB16: - //printf("icvRetrieveFrame convert RGB16 to BGR %d\n"); - rgb482bgr(src, - dst, - capture->camera->frame_width * capture->camera->frame_height, MONO16_BPP); - break; - default: - fprintf(stderr,"%s:%d: Unsupported color mode %d\n",__FILE__,__LINE__,capture->color_mode); - return 0; - } /* switch (capture->mode) */ - } - else{ - // return raw data - capture->frame.imageData = (char *) capture->camera->capture_buffer; - capture->frame.imageDataOrigin = (char *) capture->camera->capture_buffer; - } - - // TODO: if convert=0, we are not actually done with the buffer - // but this seems to work anyway. - dc1394_dma_done_with_buffer(capture->camera); - - return &capture->frame; - } - return 0; -}; - -static double icvGetPropertyCAM_DC1394( CvCaptureCAM_DC1394* capture, int property_id ){ - int index=-1; - switch ( property_id ) { - case CV_CAP_PROP_CONVERT_RGB: - return capture->convert; - case CV_CAP_PROP_MODE: - return capture->mode; - case CV_CAP_PROP_FORMAT: - return capture->format; - case CV_CAP_PROP_FPS: - CV_DC1394_CALL(dc1394_get_video_framerate(capture->handle, capture->camera->node, - (unsigned int *) &capture->camera->frame_rate)); - switch(capture->camera->frame_rate) { - case FRAMERATE_1_875: - return 1.875; - case FRAMERATE_3_75: - return 3.75; - case FRAMERATE_7_5: - return 7.5; - case FRAMERATE_15: - return 15.; - case FRAMERATE_30: - return 30.; - case FRAMERATE_60: - return 60; -#if NUM_FRAMERATES > 6 - case FRAMERATE_120: - return 120; -#endif -#if NUM_FRAMERATES > 7 - case FRAMERATE_240: - return 240; -#endif - } - default: - index = property_id; // did they pass in a LIBDC1394 feature flag? - break; - } - if(index>=FEATURE_MIN && index<=FEATURE_MAX){ - dc1394bool_t has_feature; - CV_DC1394_CALL( dc1394_is_feature_present(capture->handle, capture->camera->node, - index, &has_feature)); - if(!has_feature){ - CV_WARN("Feature is not supported by this camera"); - } - else{ - unsigned int value; - dc1394_get_feature_value(capture->handle, capture->camera->node, index, &value); - return (double) value; - } - } - - return 0; -}; - -// resize capture->frame appropriately depending on camera and capture settings -static int icvResizeFrame(CvCaptureCAM_DC1394 * capture){ - if(capture->convert){ - // resize if sizes are different, formats are different - // or conversion option has changed - if(capture->camera->frame_width != capture->frame.width || - capture->camera->frame_height != capture->frame.height || - capture->frame.depth != 8 || - capture->frame.nChannels != 3 || - capture->frame.imageData == NULL || - capture->buffer_is_writeable == 0) - { - if(capture->frame.imageData && capture->buffer_is_writeable){ - cvReleaseData( &(capture->frame)); - } - cvInitImageHeader( &capture->frame, cvSize( capture->camera->frame_width, - capture->camera->frame_height ), - IPL_DEPTH_8U, 3, IPL_ORIGIN_TL, 4 ); - cvCreateData( &(capture->frame) ); - capture->buffer_is_writeable = 1; - } - - } - else { - // free image data if allocated by opencv - if(capture->buffer_is_writeable){ - cvReleaseData(&(capture->frame)); - } - - // figure out number of channels and bpp - int bpp = 8; - int nch = 3; - int width = capture->camera->frame_width; - int height = capture->camera->frame_height; - double code = CV_FOURCC('B','G','R',0); - switch(capture->color_mode){ - case COLOR_FORMAT7_YUV422: - nch = 2; - code = CV_FOURCC('Y','4','2','2'); - break; - case COLOR_FORMAT7_MONO8: - code = CV_FOURCC('Y',0,0,0); - nch = 1; - break; - case COLOR_FORMAT7_YUV411: - code = CV_FOURCC('Y','4','1','1'); - width *= 2; - nch = 3; //yy[u/v] - break; - case COLOR_FORMAT7_YUV444: - code = CV_FOURCC('Y','U','V',0); - nch = 3; - break; - case COLOR_FORMAT7_MONO16: - code = CV_FOURCC('Y',0,0,0); - bpp = IPL_DEPTH_16S; - nch = 1; - break; - case COLOR_FORMAT7_RGB16: - bpp = IPL_DEPTH_16S; - nch = 3; - break; - default: - break; - } - // reset image header - cvInitImageHeader( &capture->frame,cvSize( width, height ), bpp, nch, IPL_ORIGIN_TL, 4 ); - //assert(capture->frame.imageSize == capture->camera->quadlets_per_frame*4); - capture->buffer_is_writeable = 0; - } - return 1; -} - -// Toggle setting about whether or not RGB color conversion is to be performed -// Allocates/Initializes capture->frame appropriately -int icvSetConvertRGB(CvCaptureCAM_DC1394 * capture, int convert){ - if(convert==capture->convert){ - // no action necessary - return 1; - } - capture->convert = convert; - return icvResizeFrame( capture ); -} - -// given desired format, mode, and modes bitmask from camera, determine if format and mode are supported -static int -icvFormatSupportedCAM_DC1394(int format, quadlet_t formats){ - // formats is a bitmask whose higher order bits indicate whether format is supported - int shift = 31 - (format - FORMAT_MIN); - int mask = 1 << shift; - return (formats & mask) != 0; -} - -// analyze modes bitmask from camera to determine if desired format and mode are supported -static int -icvModeSupportedCAM_DC1394(int format, int mode, quadlet_t modes){ - // modes is a bitmask whose higher order bits indicate whether mode is supported - int format_idx = format - FORMAT_MIN; - int mode_format_min = MODE_FORMAT0_MIN + 32*format_idx; - int shift = 31 - (mode - mode_format_min); - int mask = 0x1 << shift; - return (modes & mask) != 0; -} - -// Setup camera to use given dc1394 mode -static int -icvSetModeCAM_DC1394( CvCaptureCAM_DC1394 * capture, int mode ){ - quadlet_t modes, formats; - //printf("\n"); - - // figure out corrent format for this mode - int format = (mode - MODE_FORMAT0_MIN) / 32 + FORMAT_MIN; - - // get supported formats - if (dc1394_query_supported_formats(capture->handle, capture->camera->node, &formats)<0) { - fprintf(stderr,"%s:%d: Could not query supported formats\n",__FILE__,__LINE__); - return 0; - } - - // is format for requested mode supported ? - if(icvFormatSupportedCAM_DC1394(format, formats)==0){ - return 0; - } - - // get supported modes for requested format - if (dc1394_query_supported_modes(capture->handle, capture->camera->node, format, &modes)<0){ - fprintf(stderr,"%s:%d: Could not query supported modes for format %d\n",__FILE__,__LINE__, capture->format); - return 0; - } - - // is requested mode supported ? - if(! icvModeSupportedCAM_DC1394(format, mode, modes) ){ - return 0; - } - - int color_mode = icvColorMode( mode ); - - if(color_mode == -1){ - return 0; - } - - int frame_rate = icvGetBestFrameRate(capture, format, mode); - - dc1394_dma_unlisten(capture->handle, capture->camera); - if (dc1394_dma_setup_capture(capture->handle, capture->camera->node, capture->camera->channel /*channel*/, - format, mode, SPEED_400, - frame_rate, NUM_BUFFERS, 1 /*drop_frames*/, - capture->device_name, capture->camera) != DC1394_SUCCESS) { - fprintf(stderr,"%s:%d: Failed to setup DMA capture with VIDEO1394\n",__FILE__,__LINE__); - return 0; - } - dc1394_start_iso_transmission(capture->handle, capture->camera->node); - - capture->frame_rate = frame_rate; - capture->format = format; - capture->mode = mode; - capture->color_mode = color_mode; - - // now fix image size to match new mode - icvResizeFrame( capture ); - return 1; -} - -// query camera for supported frame rates and select fastest for given format and mode -static unsigned int icvGetBestFrameRate( CvCaptureCAM_DC1394 * capture, int format, int mode ){ - quadlet_t framerates; - if (dc1394_query_supported_framerates(capture->handle, capture->camera->node, - format, mode, &framerates)!=DC1394_SUCCESS) - { - fprintf(stderr,"%s:%d: Could not query supported framerates\n",__FILE__,__LINE__); - framerates = 0; - } - - for (int f=FRAMERATE_MAX; f>=FRAMERATE_MIN; f--) { - if (framerates & (0x1<< (31-(f-FRAMERATE_MIN)))) { - return f; - } - } - return 0; -} - -static int -icvSetFrameRateCAM_DC1394( CvCaptureCAM_DC1394 * capture, double value ){ - unsigned int fps=15; - if(capture->format == FORMAT_SCALABLE_IMAGE_SIZE) - return 0; /* format 7 has no fixed framerates */ - if (value==-1){ - fps=icvGetBestFrameRate( capture, capture->format, capture->mode ); - } - else if (value==1.875) - fps=FRAMERATE_1_875; - else if (value==3.75) - fps=FRAMERATE_3_75; - else if (value==7.5) - fps=FRAMERATE_7_5; - else if (value==15) - fps=FRAMERATE_15; - else if (value==30) - fps=FRAMERATE_30; - else if (value==60) - fps=FRAMERATE_60; -#if NUM_FRAMERATES > 6 - else if (value==120) - fps=FRAMERATE_120; -#endif -#if NUM_FRAMERATES > 7 - else if (value==240) - fps=FRAMERATE_240; -#endif - dc1394_set_video_framerate(capture->handle, capture->camera->node,fps); - dc1394_get_video_framerate(capture->handle, capture->camera->node, - (unsigned int *) &capture->camera->frame_rate); - - return fps==(unsigned int) capture->camera->frame_rate; -} - -// for given mode return color format -static int -icvColorMode( int mode ){ - switch(mode) { - case MODE_160x120_YUV444: - return COLOR_FORMAT7_YUV444; - case MODE_320x240_YUV422: - case MODE_640x480_YUV422: - case MODE_800x600_YUV422: - case MODE_1024x768_YUV422: - case MODE_1280x960_YUV422: - case MODE_1600x1200_YUV422: - return COLOR_FORMAT7_YUV422; - case MODE_640x480_YUV411: - return COLOR_FORMAT7_YUV411; - case MODE_640x480_RGB: - case MODE_800x600_RGB: - case MODE_1024x768_RGB: - case MODE_1280x960_RGB: - case MODE_1600x1200_RGB: - return COLOR_FORMAT7_RGB8; - case MODE_640x480_MONO: - case MODE_800x600_MONO: - case MODE_1024x768_MONO: - case MODE_1280x960_MONO: - case MODE_1600x1200_MONO: - return COLOR_FORMAT7_MONO8; - case MODE_640x480_MONO16: - case MODE_800x600_MONO16: - case MODE_1024x768_MONO16: - case MODE_1280x960_MONO16: - case MODE_1600x1200_MONO16: - return COLOR_FORMAT7_MONO16; - case MODE_FORMAT7_0: - case MODE_FORMAT7_1: - case MODE_FORMAT7_2: - case MODE_FORMAT7_3: - case MODE_FORMAT7_4: - case MODE_FORMAT7_5: - case MODE_FORMAT7_6: - case MODE_FORMAT7_7: - fprintf(stderr,"%s:%d: Format7 not yet supported\n",__FILE__,__LINE__); - default: - break; - } - return -1; -} - -// function to set camera properties using dc1394 feature enum -// val == -1 indicates to set this property to 'auto' -static int -icvSetFeatureCAM_DC1394( CvCaptureCAM_DC1394* capture, int feature_id, int val){ - dc1394bool_t isOn = DC1394_FALSE; - dc1394bool_t hasAutoCapability = DC1394_FALSE; - dc1394bool_t isAutoOn = DC1394_FALSE; - unsigned int nval; - unsigned int minval,maxval; - - // Turn the feature on if it is OFF - if( dc1394_is_feature_on(capture->handle, capture->camera->node, feature_id, &isOn) - == DC1394_FAILURE ) { - return 0; - } - if( isOn == DC1394_FALSE ) { - // try to turn it on. - if( dc1394_feature_on_off(capture->handle, capture->camera->node, feature_id, 1) == DC1394_FAILURE ) { - fprintf(stderr, "error turning feature %d on!\n", feature_id); - return 0; - } - } - - // Check if the feature supports auto mode - dc1394_has_auto_mode(capture->handle, capture->camera->node, feature_id, &hasAutoCapability); - if( hasAutoCapability ) { - - // now check if the auto is on. - if( dc1394_is_feature_auto(capture->handle, capture->camera->node, feature_id, &isAutoOn ) == DC1394_FAILURE ) { - fprintf(stderr, "error determining if feature %d has auto on!\n", feature_id); - return 0; - } - } - // Caller requested auto mode, but cannot support it - else if(val==-1){ - fprintf(stderr, "feature %d does not support auto mode\n", feature_id); - return 0; - } - - if(val==-1){ - // if the auto mode isn't enabled, enable it - if( isAutoOn == DC1394_FALSE ) { - if(dc1394_auto_on_off(capture->handle, capture->camera->node, feature_id, 1) == DC1394_FAILURE ) { - fprintf(stderr, "error turning feature %d auto ON!\n", feature_id); - return 0; - } - } - return 1; - } - - // ELSE turn OFF auto and adjust feature manually - if( isAutoOn == DC1394_TRUE ) { - if(dc1394_auto_on_off(capture->handle, capture->camera->node, feature_id, 0) == DC1394_FAILURE ) { - fprintf(stderr, "error turning feature %d auto OFF!\n", feature_id); - return 0; - } - } - - // Clamp val to within feature range - CV_DC1394_CALL( dc1394_get_min_value(capture->handle, capture->camera->node, feature_id, &minval)); - CV_DC1394_CALL( dc1394_get_max_value(capture->handle, capture->camera->node, feature_id, &maxval)); - val = (int)MIN(maxval, MAX((unsigned)val, minval)); - - - if (dc1394_set_feature_value(capture->handle, capture->camera->node, feature_id, val) == - DC1394_FAILURE){ - fprintf(stderr, "error setting feature value\n"); - return 0; - } - if (dc1394_get_feature_value(capture->handle, capture->camera->node, feature_id, &nval) == - DC1394_FAILURE){ - fprintf(stderr, "error setting feature value\n"); - return 0; - } - return nval==(unsigned int)val; - -} - -// cvSetCaptureProperty callback function implementation -static int -icvSetPropertyCAM_DC1394( CvCaptureCAM_DC1394* capture, int property_id, double value ){ - int index=-1; - switch ( property_id ) { - case CV_CAP_PROP_CONVERT_RGB: - return icvSetConvertRGB( capture, value != 0 ); - case CV_CAP_PROP_MODE: - return icvSetModeCAM_DC1394( capture, (int) value ); - case CV_CAP_PROP_FPS: - return icvSetFrameRateCAM_DC1394( capture, value ); - case CV_CAP_PROP_BRIGHTNESS: - index = FEATURE_BRIGHTNESS; - break; - case CV_CAP_PROP_CONTRAST: - index = FEATURE_GAMMA; - break; - case CV_CAP_PROP_SATURATION: - index = FEATURE_SATURATION; - break; - case CV_CAP_PROP_HUE: - index = FEATURE_HUE; - break; - case CV_CAP_PROP_GAIN: - index = FEATURE_GAIN; - break; - default: - index = property_id; // did they pass in a LIBDC1394 feature flag? - break; - } - if(index>=FEATURE_MIN && index<=FEATURE_MAX){ - return icvSetFeatureCAM_DC1394(capture, index, (int) value); - } - return 0; -}; - -/********************************************************************** - * - * CONVERSION FUNCTIONS TO RGB 24bpp - * - **********************************************************************/ - -/* color conversion functions from Bart Nabbe. *//* corrected by Damien: bad coeficients in YUV2RGB */ -#define YUV2RGB(y, u, v, r, g, b)\ - r = y + ((v*1436) >> 10);\ -g = y - ((u*352 + v*731) >> 10);\ -b = y + ((u*1814) >> 10);\ -r = r < 0 ? 0 : r;\ -g = g < 0 ? 0 : g;\ -b = b < 0 ? 0 : b;\ -r = r > 255 ? 255 : r;\ -g = g > 255 ? 255 : g;\ -b = b > 255 ? 255 : b - - static void -uyv2bgr(const unsigned char *src, unsigned char *dest, - unsigned long long int NumPixels) -{ - int i = NumPixels + (NumPixels << 1) - 1; - int j = NumPixels + (NumPixels << 1) - 1; - int y, u, v; - int r, g, b; - - while (i > 0) { - v = src[i--] - 128; - y = src[i--]; - u = src[i--] - 128; - YUV2RGB(y, u, v, r, g, b); - dest[j--] = r; - dest[j--] = g; - dest[j--] = b; - } -} - - static void -uyvy2bgr(const unsigned char *src, unsigned char *dest, - unsigned long long int NumPixels) -{ - int i = (NumPixels << 1) - 1; - int j = NumPixels + (NumPixels << 1) - 1; - int y0, y1, u, v; - int r, g, b; - - while (i > 0) { - y1 = src[i--]; - v = src[i--] - 128; - y0 = src[i--]; - u = src[i--] - 128; - YUV2RGB(y1, u, v, r, g, b); - dest[j--] = r; - dest[j--] = g; - dest[j--] = b; - YUV2RGB(y0, u, v, r, g, b); - dest[j--] = r; - dest[j--] = g; - dest[j--] = b; - } -} - - - static void -uyyvyy2bgr(const unsigned char *src, unsigned char *dest, - unsigned long long int NumPixels) -{ - int i = NumPixels + (NumPixels >> 1) - 1; - int j = NumPixels + (NumPixels << 1) - 1; - int y0, y1, y2, y3, u, v; - int r, g, b; - - while (i > 0) { - y3 = src[i--]; - y2 = src[i--]; - v = src[i--] - 128; - y1 = src[i--]; - y0 = src[i--]; - u = src[i--] - 128; - YUV2RGB(y3, u, v, r, g, b); - dest[j--] = r; - dest[j--] = g; - dest[j--] = b; - YUV2RGB(y2, u, v, r, g, b); - dest[j--] = r; - dest[j--] = g; - dest[j--] = b; - YUV2RGB(y1, u, v, r, g, b); - dest[j--] = r; - dest[j--] = g; - dest[j--] = b; - YUV2RGB(y0, u, v, r, g, b); - dest[j--] = r; - dest[j--] = g; - dest[j--] = b; - } -} - - static void -y2bgr(const unsigned char *src, unsigned char *dest, - unsigned long long int NumPixels) -{ - int i = NumPixels - 1; - int j = NumPixels + (NumPixels << 1) - 1; - int y; - - while (i > 0) { - y = src[i--]; - dest[j--] = y; - dest[j--] = y; - dest[j--] = y; - } -} - - static void -y162bgr(const unsigned char *src, unsigned char *dest, - unsigned long long int NumPixels, int bits) -{ - int i = (NumPixels << 1) - 1; - int j = NumPixels + (NumPixels << 1) - 1; - int y; - - while (i > 0) { - y = src[i--]; - y = (y + (src[i--] << 8)) >> (bits - 8); - dest[j--] = y; - dest[j--] = y; - dest[j--] = y; - } -} - -// this one was in coriander but didn't take bits into account - static void -rgb482bgr(const unsigned char *src, unsigned char *dest, - unsigned long long int NumPixels, int bits) -{ - int i = (NumPixels << 1) - 1; - int j = NumPixels + (NumPixels << 1) - 1; - int y; - - while (i > 0) { - y = src[i--]; - dest[j-2] = (y + (src[i--] << 8)) >> (bits - 8); - j--; - y = src[i--]; - dest[j] = (y + (src[i--] << 8)) >> (bits - 8); - j--; - y = src[i--]; - dest[j+2] = (y + (src[i--] << 8)) >> (bits - 8); - j--; - } -} - - -class CvCaptureCAM_DC1394_CPP : public CvCapture -{ -public: - CvCaptureCAM_DC1394_CPP() { captureDC1394 = 0; } - virtual ~CvCaptureCAM_DC1394_CPP() { close(); } - - virtual bool open( int index ); - virtual void close(); - - virtual double getProperty(int) const CV_OVERRIDE; - virtual bool setProperty(int, double) CV_OVERRIDE; - virtual bool grabFrame() CV_OVERRIDE; - virtual IplImage* retrieveFrame(int) CV_OVERRIDE; - virtual int getCaptureDomain() CV_OVERRIDE { return CV_CAP_DC1394; } -protected: - - CvCaptureCAM_DC1394* captureDC1394; -}; - -bool CvCaptureCAM_DC1394_CPP::open( int index ) -{ - close(); - captureDC1394 = icvCaptureFromCAM_DC1394(index); - return captureDC1394 != 0; -} - -void CvCaptureCAM_DC1394_CPP::close() -{ - if( captureDC1394 ) - { - icvCloseCAM_DC1394( captureDC1394 ); - cvFree( &captureDC1394 ); - } -} - -bool CvCaptureCAM_DC1394_CPP::grabFrame() -{ - return captureDC1394 ? icvGrabFrameCAM_DC1394( captureDC1394 ) != 0 : false; -} - -IplImage* CvCaptureCAM_DC1394_CPP::retrieveFrame(int) -{ - return captureDC1394 ? (IplImage*)icvRetrieveFrameCAM_DC1394( captureDC1394, 0 ) : 0; -} - -double CvCaptureCAM_DC1394_CPP::getProperty( int propId ) const -{ - // Simulate mutable (C++11-like) member variable - // (some members are used to cache property settings). - CvCaptureCAM_DC1394* cap = const_cast(captureDC1394); - - return cap ? icvGetPropertyCAM_DC1394( cap, propId ) : 0; -} - -bool CvCaptureCAM_DC1394_CPP::setProperty( int propId, double value ) -{ - return captureDC1394 ? icvSetPropertyCAM_DC1394( captureDC1394, propId, value ) != 0 : false; -} - -CvCapture* cvCreateCameraCapture_DC1394( int index ) -{ - CvCaptureCAM_DC1394_CPP* capture = new CvCaptureCAM_DC1394_CPP; - - if( capture->open( index )) - return capture; - - delete capture; - return 0; -} - -#endif diff --git a/modules/videoio/src/cap_dshow.cpp b/modules/videoio/src/cap_dshow.cpp index f6ca6a7329..5f1ec35ccd 100644 --- a/modules/videoio/src/cap_dshow.cpp +++ b/modules/videoio/src/cap_dshow.cpp @@ -96,6 +96,7 @@ Thanks to: #ifdef __MINGW32__ // MinGW does not understand COM interfaces #pragma GCC diagnostic ignored "-Wnon-virtual-dtor" +#define STRSAFE_NO_DEPRECATE #endif #include @@ -108,9 +109,9 @@ Thanks to: #include //Include Directshow stuff here so we don't worry about needing all the h files. -#include "DShow.h" +#include "dshow.h" #include "strmif.h" -#include "Aviriff.h" +#include "aviriff.h" #include "dvdmedia.h" #include "bdaiface.h" diff --git a/modules/videoio/src/cap_giganetix.cpp b/modules/videoio/src/cap_giganetix.cpp deleted file mode 100644 index 98fba269b0..0000000000 --- a/modules/videoio/src/cap_giganetix.cpp +++ /dev/null @@ -1,764 +0,0 @@ -//////////////////////////////////////////////////////////////////////////////////////// -// -// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. -// -// By downloading, copying, installing or using the software you agree to this license. -// If you do not agree to this license, do not download, install, -// copy or use the software. -// -// -// Intel License Agreement -// For Open Source Computer Vision Library -// -// Copyright (C) 2000, Intel Corporation, all rights reserved. -// Third party copyrights are property of their respective owners. -// -// Redistribution and use in source and binary forms, with or without modification, -// are permitted provided that the following conditions are met: -// -// * Redistribution's of source code must retain the above copyright notice, -// this list of conditions and the following disclaimer. -// -// * Redistribution's in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// * The name of Intel Corporation may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// -// This software is provided by the copyright holders and contributors "as is" and -// any express or implied warranties, including, but not limited to, the implied -// warranties of merchantability and fitness for a particular purpose are disclaimed. -// In no event shall the Intel Corporation or contributors be liable for any direct, -// indirect, incidental, special, exemplary, or consequential damages -// (including, but not limited to, procurement of substitute goods or services; -// loss of use, data, or profits; or business interruption) however caused -// and on any theory of liability, whether in contract, strict liability, -// or tort (including negligence or otherwise) arising in any way out of -// the use of this software, even if advised of the possibility of such damage. -// -// - -// -// The code has been contributed by Vladimir N. Litvinenko on 2012 Jul -// mailto:vladimir.litvinenko@codepaint.ru -// - -#include "precomp.hpp" -#include -#include - -#ifdef _WIN32 -#include -#else -#include -#endif - -#ifdef NDEBUG -#define CV_WARN(message) -#else -#define CV_WARN(message) fprintf(stderr, "warning: %s (%s:%d)\n", message, __FILE__, __LINE__) -#endif - -#define QTGIG_HEARTBEAT_TIME (12000.0) -#define QTGIG_MAX_WAIT_TIME (2.0) -#define QTGIG_IMG_WAIT_TIME (3.0) - -/*----------------------------------------------------------------------------*/ -/** - \internal - \fn bool wrprInitGigEVisionAPI(); - \brief Wrapper to GigEVisionAPI function gige::InitGigEVisionAPI () - \return true -- success - See \a wrprExitGigEVisionAPI - -*/ -bool -wrprInitGigEVisionAPI() -{ - CV_FUNCNAME("wrprInitGigEVisionAPI"); - __BEGIN__; - - try { - gige::InitGigEVisionAPI (); - } catch(...) { - CV_ERROR(CV_StsError, "GigEVisionAPI: initialization (InitGigEVisionAPI()) failed.\n"); - } - __END__; - return true; -} - -/*----------------------------------------------------------------------------*/ -/** - \internal - \fn void wrprExitGigEVisionAPI() - \brief Wrapper to GigEVisionAPI function gige::ExitGigEVisionAPI () - \return true -- success - See \a wrprInitGigEVisionAPI - -*/ -bool -wrprExitGigEVisionAPI() -{ - CV_FUNCNAME("wrprExitGigEVisionAPI"); - __BEGIN__; - - try { - gige::ExitGigEVisionAPI (); - } catch(...) { - CV_ERROR(CV_StsError, "GigEVisionAPI: finalization (ExitGigEVisionAPI()) failed.\n"); - return false; - } - __END__; - return true; -} - - -/*----------------------------------------------------------------------------*/ -/** - \internal - \fn gige::IGigEVisionAPI wrprGetGigEVisionAPI() - \brief Wrapper to GigEVisionAPI function gige::GetGigEVisionAPI () - \return item of gige::IGigEVisionAPI type - See \a wrprInitGigEVisionAPI, \a gige::IGigEVisionAPI -*/ -gige::IGigEVisionAPI -wrprGetGigEVisionAPI() -{ - - gige::IGigEVisionAPI b_ret = 0; - - CV_FUNCNAME("wrprGetGigEVisionAPI"); - __BEGIN__; - - try { - b_ret = gige::GetGigEVisionAPI (); - } catch(...) { - CV_ERROR(CV_StsError, "GigEVisionAPI: API instance (from GetGigEVisionAPI()) failed.\n"); - } - - __END__; - - return b_ret; -} - - -/*----------------------------------------------------------------------------*/ -/** - \internal - \fn bool wrprUnregisterCallback( const gige::IGigEVisionAPI* api, gige::ICallbackEvent* eventHandler) - \brief Wrapper to GigEVisionAPI function - \param api - \param eventHandler - \return true - success, else - false - See \a wrprInitGigEVisionAPI, \a gige::IGigEVisionAPI - -*/ -bool -wrprUnregisterCallback( const gige::IGigEVisionAPI* api, gige::ICallbackEvent* eventHandler) -{ - bool b_ret = api != NULL; - - if(b_ret) b_ret = api->IsValid (); - - CV_FUNCNAME("wrprUnregisterCallback"); - __BEGIN__; - - if(b_ret) - { - if(eventHandler != NULL) - { - try { - b_ret = ((gige::IGigEVisionAPIInterface*)api)->UnregisterCallback (eventHandler); - } catch(...) { - CV_ERROR(CV_StsError, "GigEVisionAPI: API unregister callback function (from UnregisterCallback()) failed.\n"); - b_ret = false; - } - } - } - __END__; - - return (b_ret); -} - - -/*----------------------------------------------------------------------------*/ -/** - \internal - \fn bool wrprDeviceIsConnect( gige::IDevice& device ) - \brief Wrapper to GigEVisionAPI function IDevice::IsConnected() - \param device - selected device - \return true - device connected -*/ -bool -wrprDeviceIsConnect( gige::IDevice& device ) -{ - bool b_ret = device != NULL; - - CV_FUNCNAME("wrprDeviceIsConnect"); - __BEGIN__; - - if(b_ret) - { - try { - b_ret = device->IsConnected (); - } catch (...) { - CV_ERROR(CV_StsError, "GigEVisionAPI: API device connection state (from IsConnected()) failed.\n"); - b_ret = false; - } - } - __END__; - - return (b_ret); -} - - -/*----------------------------------------------------------------------------*/ -/** - \internal - \fn bool wrprDeviceIsValid( gige::IDevice& device ) - \brief Wrapper to GigEVisionAPI function IDevice::Connect() - \param device - selected device - \return true - device valid - -*/ -bool -wrprDeviceIsValid( gige::IDevice& device ) -{ - bool b_ret = device != NULL; - - CV_FUNCNAME("wrprDeviceIsConnect"); - __BEGIN__; - - if(b_ret) - { - try { - b_ret = device.IsValid (); - } catch (...) { - CV_ERROR(CV_StsError, "GigEVisionAPI: API device validation state (from IsValid()) failed.\n"); - b_ret = false; - } - } - __END__; - - return (b_ret); -} - - -/*----------------------------------------------------------------------------*/ -/** - \internal - \fn bool wrprDeviceDisconnect ( gige::IDevice& device ) - \brief Wrapper to GigEVisionAPI function IDevice::Disconnect() - \param device - selected device - \return true - device valid - -*/ -bool -wrprDeviceDisconnect ( gige::IDevice& device ) -{ - bool b_ret = device != NULL; - - CV_FUNCNAME("wrprDeviceDisconnect"); - __BEGIN__; - - if(b_ret) - { - try { - device->Disconnect (); - } catch (...) { - CV_ERROR(CV_StsError, "GigEVisionAPI: API device disconnect (from Disconnect()) failed.\n"); - b_ret = false; - } - } - - __END__; - - return (b_ret); -} - - -/*----------------------------------------------------------------------------*/ -/*----------------------------------------------------------------------------*/ -/** - \internal - \class CvCaptureCAM_Giganetix - \brief Capturing video from camera via Smartec Giganetix (use GigEVisualSDK library). -*/ - -class CvCaptureCAM_Giganetix : public CvCapture -{ - public: - CvCaptureCAM_Giganetix(); - virtual ~CvCaptureCAM_Giganetix(); - - virtual bool open( int index ); - virtual void close(); - virtual double getProperty(int) const CV_OVERRIDE; - virtual bool setProperty(int, double) CV_OVERRIDE; - virtual bool grabFrame() CV_OVERRIDE; - virtual IplImage* retrieveFrame(int) CV_OVERRIDE; - virtual int getCaptureDomain() CV_OVERRIDE - { - return CV_CAP_GIGANETIX; - } - - bool start (); - bool stop (); - - protected: - - void init (); - void grabImage (); - - gige::IGigEVisionAPI m_api; - bool m_api_on; - gige::IDevice m_device; - bool m_active; - - IplImage* m_raw_image; - UINT32 m_rawImagePixelType; - bool m_monocrome; - -}; -/*----------------------------------------------------------------------------*/ -/*----------------------------------------------------------------------------*/ -void -CvCaptureCAM_Giganetix::init () -{ - m_monocrome = m_active = m_api_on = false; - m_api = 0; - m_device = 0; - m_raw_image = 0; - m_rawImagePixelType = 0; -} - -/*----------------------------------------------------------------------------*/ -CvCaptureCAM_Giganetix::CvCaptureCAM_Giganetix() -{ - init (); - - m_api_on = wrprInitGigEVisionAPI (); - - if(m_api_on) - { - if((m_api = wrprGetGigEVisionAPI ()) != NULL) - { - m_api->SetHeartbeatTime (QTGIG_HEARTBEAT_TIME); - } - } -} - -/*----------------------------------------------------------------------------*/ -CvCaptureCAM_Giganetix::~CvCaptureCAM_Giganetix() -{ - close(); -} -/*----------------------------------------------------------------------------*/ -void -CvCaptureCAM_Giganetix::close() -{ - stop (); - - (void)wrprDeviceDisconnect(m_device); - - (void)wrprExitGigEVisionAPI (); - - if(m_raw_image) cvReleaseImageHeader(&m_raw_image); - - init (); -} - -/*----------------------------------------------------------------------------*/ -bool -CvCaptureCAM_Giganetix::open( int index ) -{ - bool b_ret = m_api_on; - - CV_FUNCNAME("CvCaptureCAM_Giganetix::open"); - __BEGIN__; - - if(b_ret) - b_ret = m_api.IsValid (); - - if(b_ret ) - { - m_api->FindAllDevices (QTGIG_MAX_WAIT_TIME); - - //TODO - serch device as DevicesList member - gige::DevicesList DevicesList = m_api->GetAllDevices (); - - m_device = 0; - b_ret = false; - - for (int i = 0; i < (int) DevicesList.size() && !b_ret; i++) - { - b_ret = (i == index); - if(b_ret) - { - m_device = DevicesList[i]; - b_ret = m_device->Connect (); - - if(b_ret) - { - b_ret = - m_device->SetStringNodeValue("AcquisitionStatusSelector", "AcquisitionActive") - && - m_device->SetStringNodeValue ("TriggerMode", "Off") - && - m_device->SetStringNodeValue ("AcquisitionMode", "Continuous") - && - m_device->SetIntegerNodeValue ("AcquisitionFrameCount", 20) - ; - } - } - } // for - } - - if(!b_ret) - { - CV_ERROR(CV_StsError, "Giganetix: Error cannot find camera\n"); - close (); - } else { - start (); - } - - __END__; - - return b_ret; -} - -/*----------------------------------------------------------------------------*/ -void -CvCaptureCAM_Giganetix::grabImage () -{ - CV_FUNCNAME("CvCaptureCAM_Giganetix::grabImage"); - __BEGIN__; - - if(wrprDeviceIsValid(m_device) && wrprDeviceIsConnect(m_device)) - { - if(!m_device->IsBufferEmpty ()) - { - gige::IImageInfo imageInfo; - m_device->GetImageInfo (&imageInfo); - assert(imageInfo.IsValid()); - - if (m_device->GetPendingImagesCount() == 1) - { - UINT32 newPixelType; - UINT32 newWidth, newHeight; - - imageInfo->GetPixelType(newPixelType); - imageInfo->GetSize(newWidth, newHeight); - - //TODO - validation of image exists - bool b_validation = m_raw_image != NULL; - if(b_validation) - { - b_validation = - m_raw_image->imageSize == (int)(imageInfo->GetRawDataSize ()) - && - m_rawImagePixelType == newPixelType; - } else { - if(m_raw_image) cvReleaseImageHeader(&m_raw_image); - } - - m_rawImagePixelType = newPixelType; - m_monocrome = GvspGetBitsPerPixel((GVSP_PIXEL_TYPES)newPixelType) == IPL_DEPTH_8U; - - try { - if (m_monocrome) - { - //TODO - For Mono & Color BayerRGB raw pixel types - if (!b_validation) - { - m_raw_image = cvCreateImageHeader (cvSize((int)newWidth, (int)newHeight),IPL_DEPTH_8U,1); - m_raw_image->origin = IPL_ORIGIN_TL; - m_raw_image->dataOrder = IPL_DATA_ORDER_PIXEL; - m_raw_image->widthStep = newWidth; - } - // Copy image. - // ::memcpy(m_raw_image->imageData, imageInfo->GetRawData (), imageInfo->GetRawDataSize ()); - - //TODO - Set pointer to image ! - m_raw_image->imageData = (char*)(imageInfo->GetRawData ()); - } - - if (!m_monocrome && newPixelType == GVSP_PIX_RGB8_PACKED) - { - //TODO - 24 bit RGB color image. - if (!b_validation) - { - m_raw_image = cvCreateImageHeader (cvSize((int)newWidth, (int)newHeight), IPL_DEPTH_32F, 3); - m_raw_image->origin = IPL_ORIGIN_TL; - m_raw_image->dataOrder = IPL_DATA_ORDER_PIXEL; - m_raw_image->widthStep = newWidth * 3; - } - m_raw_image->imageData = (char*)(imageInfo->GetRawData ()); - } - } catch (...) { - CV_ERROR(CV_StsError, "Giganetix: failed to queue a buffer on device\n"); - close (); - } - } else { - //TODO - all other pixel types - m_raw_image = 0; - CV_WARN("Giganetix: Undefined image pixel type\n"); - } - m_device->PopImage (imageInfo); - m_device->ClearImageBuffer (); - } - } - - __END__; -} - -/*----------------------------------------------------------------------------*/ -bool -CvCaptureCAM_Giganetix::start () -{ - CV_FUNCNAME("CvCaptureCAM_Giganetix::start"); - __BEGIN__; - - m_active = wrprDeviceIsValid(m_device) && wrprDeviceIsConnect(m_device); - - if(m_active) - { - (void)m_device->SetIntegerNodeValue("TLParamsLocked", 1); - (void)m_device->CommandNodeExecute("AcquisitionStart"); - m_active = m_device->GetBooleanNodeValue("AcquisitionStatus", m_active); - } - - if(!m_active) - { - CV_ERROR(CV_StsError, "Giganetix: Cannot open camera\n"); - close (); - } - - __END__; - - return m_active; -} - -/*----------------------------------------------------------------------------*/ -bool -CvCaptureCAM_Giganetix::stop () -{ - if (!m_active) return true; - - CV_FUNCNAME("CvCaptureCAM_Giganetix::stop"); - __BEGIN__; - - if(wrprDeviceIsValid(m_device) && wrprDeviceIsConnect(m_device)) - { - (void)m_device->GetBooleanNodeValue("AcquisitionStatus", m_active); - - if(m_active) - { - (void)m_device->CommandNodeExecute("AcquisitionStop"); - (void)m_device->SetIntegerNodeValue("TLParamsLocked", 0); - m_device->ClearImageBuffer (); - (void)m_device->GetBooleanNodeValue("AcquisitionStatus", m_active); - } - } - - if(m_active) - { - CV_ERROR(CV_StsError, "Giganetix: Improper closure of the camera\n"); - close (); - } - __END__; - - return !m_active; -} - -/*----------------------------------------------------------------------------*/ -bool -CvCaptureCAM_Giganetix::grabFrame() -{ - bool b_ret = - wrprDeviceIsValid(m_device) - && - wrprDeviceIsConnect(m_device); - - if(b_ret) grabImage (); - - return b_ret; -} - - -/*----------------------------------------------------------------------------*/ -IplImage* -CvCaptureCAM_Giganetix::retrieveFrame(int) -{ - return ( - wrprDeviceIsValid(m_device) && wrprDeviceIsConnect(m_device) ? - m_raw_image : - NULL - ); -} - -/*----------------------------------------------------------------------------*/ -double -CvCaptureCAM_Giganetix::getProperty( int property_id ) const -{ - double d_ret = -1.0; - INT64 i; - - if(wrprDeviceIsConnect(m_device)) - { - switch ( property_id ) - { - case CV_CAP_PROP_FRAME_WIDTH: - m_device->GetIntegerNodeValue ("Width", i); - d_ret = i; - break; - case CV_CAP_PROP_FRAME_HEIGHT: - m_device->GetIntegerNodeValue ("Height", i); - d_ret = i; - break; - case CV_CAP_PROP_GIGA_FRAME_OFFSET_X: - m_device->GetIntegerNodeValue ("OffsetX", i); - d_ret = i; - break; - case CV_CAP_PROP_GIGA_FRAME_OFFSET_Y: - m_device->GetIntegerNodeValue ("OffsetY", i); - d_ret = i; - break; - case CV_CAP_PROP_GIGA_FRAME_WIDTH_MAX: - m_device->GetIntegerNodeValue ("WidthMax", i); - d_ret = i; - break; - case CV_CAP_PROP_GIGA_FRAME_HEIGH_MAX: - m_device->GetIntegerNodeValue ("HeightMax", i); - d_ret = i; - break; - case CV_CAP_PROP_GIGA_FRAME_SENS_WIDTH: - m_device->GetIntegerNodeValue ("SensorWidth", i); - d_ret = i; - break; - case CV_CAP_PROP_GIGA_FRAME_SENS_HEIGH: - m_device->GetIntegerNodeValue ("SensorHeight", i); - d_ret = i; - break; - case CV_CAP_PROP_FRAME_COUNT: - m_device->GetIntegerNodeValue ("AcquisitionFrameCount", i); - d_ret = i; - break; - case CV_CAP_PROP_EXPOSURE: - m_device->GetFloatNodeValue ("ExposureTime",d_ret); - break; - case CV_CAP_PROP_GAIN : - m_device->GetFloatNodeValue ("Gain",d_ret); - break; - case CV_CAP_PROP_TRIGGER : - bool b; - m_device->GetBooleanNodeValue ("TriggerMode",b); - d_ret = (double)b; - break; - case CV_CAP_PROP_TRIGGER_DELAY : - m_device->GetFloatNodeValue ("TriggerDelay",d_ret); - break; - default : ; - } - } - - return d_ret; -} - -/*----------------------------------------------------------------------------*/ -bool -CvCaptureCAM_Giganetix::setProperty( int property_id, double value ) -{ - bool b_ret = wrprDeviceIsConnect(m_device); - - if(b_ret) - { - bool b_val = m_active; - - switch ( property_id ) - { - case CV_CAP_PROP_FRAME_WIDTH: - stop (); - b_ret = m_device->SetIntegerNodeValue ("Width", (INT64)value); - if(b_val) start (); - break; - case CV_CAP_PROP_GIGA_FRAME_WIDTH_MAX: - stop (); - b_ret = m_device->SetIntegerNodeValue ("WidthMax", (INT64)value); - if(b_val) start (); - break; - case CV_CAP_PROP_GIGA_FRAME_SENS_WIDTH: - stop (); - b_ret = m_device->SetIntegerNodeValue ("SensorWidth", (INT64)value); - if(b_val) start (); - break; - case CV_CAP_PROP_FRAME_HEIGHT: - stop (); - b_ret = m_device->SetIntegerNodeValue ("Height", (INT64)value); - if(b_val) start (); - break; - case CV_CAP_PROP_GIGA_FRAME_HEIGH_MAX: - stop (); - b_ret = m_device->SetIntegerNodeValue ("HeightMax", (INT64)value); - if(b_val) start (); - break; - case CV_CAP_PROP_GIGA_FRAME_SENS_HEIGH: - stop (); - b_ret = m_device->SetIntegerNodeValue ("SensorHeight", (INT64)value); - if(b_val) start (); - break; - case CV_CAP_PROP_GIGA_FRAME_OFFSET_X: { - INT64 w, wmax, val = (INT64)value; - if((b_ret = m_device->GetIntegerNodeValue ("Width", w))) - if((b_ret = m_device->GetIntegerNodeValue ("WidthMax", wmax))) - b_ret = m_device->SetIntegerNodeValue ("OffsetX", (val + w) > wmax ? (wmax - w) : val); - } break; - case CV_CAP_PROP_GIGA_FRAME_OFFSET_Y: { - INT64 h, hmax, val = (INT64)value; - if((b_ret = m_device->GetIntegerNodeValue ("Height", h))) - if((b_ret = m_device->GetIntegerNodeValue ("HeightMax", hmax))) - b_ret = m_device->SetIntegerNodeValue ("OffsetY", (val + h) > hmax ? (hmax - h) : val); - b_ret = m_device->SetIntegerNodeValue ("OffsetY", (INT64)value); - } - break; - case CV_CAP_PROP_EXPOSURE: - b_ret = m_device->SetFloatNodeValue ("ExposureTime",value); - break; - case CV_CAP_PROP_GAIN : - b_ret = m_device->SetFloatNodeValue ("Gain",value); - break; - case CV_CAP_PROP_TRIGGER : - b_ret = m_device->SetBooleanNodeValue ("TriggerMode",(bool)value); - break; - case CV_CAP_PROP_TRIGGER_DELAY : - stop (); - b_ret = m_device->SetFloatNodeValue ("TriggerDelay",value); - if(b_val) start (); - break; - default: - b_ret = false; - } - } - - return b_ret; -} - - -/*----------------------------------------------------------------------------*/ -/*----------------------------------------------------------------------------*/ -CvCapture* -cvCreateCameraCapture_Giganetix( int index ) -{ - CvCaptureCAM_Giganetix* capture = new CvCaptureCAM_Giganetix; - - if (!(capture->open( index ))) - { - delete capture; - capture = NULL; - } - - return ((CvCapture*)capture); -} - -/*----------------------------------------------------------------------------*/ diff --git a/modules/videoio/src/cap_gstreamer.cpp b/modules/videoio/src/cap_gstreamer.cpp index 23d39d247a..a52553598b 100644 --- a/modules/videoio/src/cap_gstreamer.cpp +++ b/modules/videoio/src/cap_gstreamer.cpp @@ -65,11 +65,8 @@ using namespace std; #define VERSION_NUM(major, minor, micro) (major * 1000000 + minor * 1000 + micro) #define FULL_GST_VERSION VERSION_NUM(GST_VERSION_MAJOR, GST_VERSION_MINOR, GST_VERSION_MICRO) -#if FULL_GST_VERSION >= VERSION_NUM(0,10,32) #include //#include -#endif - #ifdef NDEBUG #define CV_WARN(message) @@ -77,13 +74,8 @@ using namespace std; #define CV_WARN(message) fprintf(stderr, "OpenCV | GStreamer warning: %s (%s:%d)\n", message, __FILE__, __LINE__) #endif -#if GST_VERSION_MAJOR == 0 -#define COLOR_ELEM "ffmpegcolorspace" -#define COLOR_ELEM_NAME "ffmpegcsp" -#else #define COLOR_ELEM "videoconvert" #define COLOR_ELEM_NAME COLOR_ELEM -#endif #if defined(_WIN32) || defined(_WIN64) #if defined(__MINGW32__) @@ -164,12 +156,7 @@ private: GstElement* pipeline; GstElement* v4l2src; GstElement* sink; -#if GST_VERSION_MAJOR > 0 GstSample* sample; -#else - void * sample; // unused - GstBuffer* buffer; -#endif GstCaps* caps; gint64 duration; gint width; @@ -256,19 +243,11 @@ bool GStreamerCapture::grabFrame() if(gst_app_sink_is_eos(GST_APP_SINK(sink))) return false; -#if GST_VERSION_MAJOR == 0 - if(buffer) - gst_buffer_unref(buffer); - buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink)); - if(!buffer) - return false; -#else if(sample) gst_sample_unref(sample); sample = gst_app_sink_pull_sample(GST_APP_SINK(sink)); if(!sample) return false; -#endif if (isPosFramesEmulated) emulatedFrameNumber++; @@ -283,23 +262,14 @@ bool GStreamerCapture::grabFrame() */ bool GStreamerCapture::retrieveFrame(int, OutputArray dst) { -#if GST_VERSION_MAJOR == 0 - if (!buffer) - return false; -#else if(!sample) return false; -#endif Size sz; if (!determineFrameDims(sz)) return false; // gstreamer expects us to handle the memory at this point // so we can just wrap the raw buffer and be done with it -#if GST_VERSION_MAJOR == 0 - Mat src(sz, CV_8UC1, (uchar*)GST_BUFFER_DATA(buffer)); - src.copyTo(dst); -#else GstBuffer * buf = gst_sample_get_buffer(sample); if (!buf) return false; @@ -321,18 +291,13 @@ bool GStreamerCapture::retrieveFrame(int, OutputArray dst) src.copyTo(dst); } gst_buffer_unmap(buf, &info); -#endif return true; } bool GStreamerCapture::determineFrameDims(Size &sz) { -#if GST_VERSION_MAJOR == 0 - GstCaps * frame_caps = gst_buffer_get_caps(buffer); -#else GstCaps * frame_caps = gst_sample_get_caps(sample); -#endif // bail out in no caps if (!GST_CAPS_IS_SIMPLE(frame_caps)) return false; @@ -346,7 +311,6 @@ bool GStreamerCapture::determineFrameDims(Size &sz) sz = Size(width, height); -#if GST_VERSION_MAJOR > 0 const gchar* name = gst_structure_get_name(structure); if (!name) @@ -399,10 +363,6 @@ bool GStreamerCapture::determineFrameDims(Size &sz) channels = 1; isOutputByteBuffer = true; } -#else - // we support only video/x-raw, format=BGR -> 8bit, 3 channels - channels = 3; -#endif return true; } @@ -494,27 +454,17 @@ void GStreamerCapture::setFilter(const char *prop, int type, int v1, int v2) { if(type == G_TYPE_INT) { -#if GST_VERSION_MAJOR == 0 - caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, NULL); -#else caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, NULL); -#endif } else { -#if GST_VERSION_MAJOR == 0 - caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, v2, NULL); -#else caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, v2, NULL); -#endif } } else { -#if GST_VERSION_MAJOR > 0 if (! gst_caps_is_writable(caps)) caps = gst_caps_make_writable (caps); -#endif if(type == G_TYPE_INT){ gst_caps_set_simple(caps, prop, type, v1, NULL); }else{ @@ -522,9 +472,7 @@ void GStreamerCapture::setFilter(const char *prop, int type, int v1, int v2) } } -#if GST_VERSION_MAJOR > 0 caps = gst_caps_fixate(caps); -#endif gst_app_sink_set_caps(GST_APP_SINK(sink), caps); //printf("filtering with %s\n", gst_caps_to_string(caps)); @@ -540,10 +488,8 @@ void GStreamerCapture::removeFilter(const char *filter) if(!caps) return; -#if GST_VERSION_MAJOR > 0 if (! gst_caps_is_writable(caps)) caps = gst_caps_make_writable (caps); -#endif GstStructure *s = gst_caps_get_structure(caps, 0); gst_structure_remove_field(s, filter); @@ -699,11 +645,7 @@ bool GStreamerCapture::open(const String &filename_) gchar * protocol = gst_uri_get_protocol(uri); if (!strcasecmp(protocol , "v4l2")) { -#if GST_VERSION_MAJOR == 0 - uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src"); -#else uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL); -#endif element_from_uri = true; } else @@ -727,22 +669,14 @@ bool GStreamerCapture::open(const String &filename_) GstElement *element = NULL; gboolean done = false; gchar* name = NULL; -#if GST_VERSION_MAJOR > 0 GValue value = G_VALUE_INIT; -#endif while (!done) { -#if GST_VERSION_MAJOR > 0 switch (gst_iterator_next (it, &value)) { case GST_ITERATOR_OK: element = GST_ELEMENT (g_value_get_object (&value)); -#else - switch (gst_iterator_next (it, (gpointer *)&element)) - { - case GST_ITERATOR_OK: -#endif name = gst_element_get_name(element); if (name) { @@ -762,9 +696,7 @@ bool GStreamerCapture::open(const String &filename_) done = sink && color && v4l2src; } -#if GST_VERSION_MAJOR > 0 g_value_unset (&value); -#endif break; case GST_ITERATOR_RESYNC: @@ -827,15 +759,6 @@ bool GStreamerCapture::open(const String &filename_) gst_app_sink_set_emit_signals (GST_APP_SINK(sink), FALSE); // gst_base_sink_set_sync(GST_BASE_SINK(sink), FALSE); -#if GST_VERSION_MAJOR == 0 - caps = gst_caps_new_simple("video/x-raw-rgb", - "bpp", G_TYPE_INT, 24, - "red_mask", G_TYPE_INT, 0x0000FF, - "green_mask", G_TYPE_INT, 0x00FF00, - "blue_mask", G_TYPE_INT, 0xFF0000, - NULL); -#else - caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}; image/jpeg"); if(manualpipeline){ @@ -849,7 +772,6 @@ bool GStreamerCapture::open(const String &filename_) gst_caps_unref(peer_caps); } -#endif gst_app_sink_set_caps(GST_APP_SINK(sink), caps); gst_caps_unref(caps); @@ -876,11 +798,7 @@ bool GStreamerCapture::open(const String &filename_) GstFormat format; format = GST_FORMAT_DEFAULT; -#if GST_VERSION_MAJOR == 0 - if(!gst_element_query_duration(sink, &format, &duration)) -#else if(!gst_element_query_duration(sink, format, &duration)) -#endif { handleMessage(pipeline); CV_WARN("GStreamer: unable to query duration of stream"); @@ -890,11 +808,7 @@ bool GStreamerCapture::open(const String &filename_) handleMessage(pipeline); GstPad* pad = gst_element_get_static_pad(sink, "sink"); -#if GST_VERSION_MAJOR == 0 - GstCaps* buffer_caps = gst_pad_get_caps(pad); -#else GstCaps* buffer_caps = gst_pad_get_current_caps(pad); -#endif const GstStructure *structure = gst_caps_get_structure (buffer_caps, 0); if (!gst_structure_get_int (structure, "width", &width)) @@ -921,13 +835,7 @@ bool GStreamerCapture::open(const String &filename_) gboolean status_; format_ = GST_FORMAT_DEFAULT; -#if GST_VERSION_MAJOR == 0 -#define FORMAT &format_ -#else -#define FORMAT format_ -#endif - status_ = gst_element_query_position(sink, FORMAT, &value_); -#undef FORMAT + status_ = gst_element_query_position(sink, format_, &value_); if (!status_ || value_ != 0 || duration < 0) { CV_WARN(cv::format("Cannot query video position: status=%d value=%lld duration=%lld\n", @@ -961,12 +869,6 @@ double GStreamerCapture::getProperty(int propId) const gint64 value; gboolean status; -#if GST_VERSION_MAJOR == 0 -#define FORMAT &format -#else -#define FORMAT format -#endif - if(!pipeline) { CV_WARN("GStreamer: no pipeline"); return 0; @@ -975,7 +877,7 @@ double GStreamerCapture::getProperty(int propId) const switch(propId) { case CV_CAP_PROP_POS_MSEC: format = GST_FORMAT_TIME; - status = gst_element_query_position(sink, FORMAT, &value); + status = gst_element_query_position(sink, format, &value); if(!status) { handleMessage(pipeline); CV_WARN("GStreamer: unable to query position of stream"); @@ -990,7 +892,7 @@ double GStreamerCapture::getProperty(int propId) const return 0; // TODO getProperty() "unsupported" value should be changed } format = GST_FORMAT_DEFAULT; - status = gst_element_query_position(sink, FORMAT, &value); + status = gst_element_query_position(sink, format, &value); if(!status) { handleMessage(pipeline); CV_WARN("GStreamer: unable to query position of stream"); @@ -999,7 +901,7 @@ double GStreamerCapture::getProperty(int propId) const return value; case CV_CAP_PROP_POS_AVI_RATIO: format = GST_FORMAT_PERCENT; - status = gst_element_query_position(sink, FORMAT, &value); + status = gst_element_query_position(sink, format, &value); if(!status) { handleMessage(pipeline); CV_WARN("GStreamer: unable to query position of stream"); @@ -1041,8 +943,6 @@ double GStreamerCapture::getProperty(int propId) const break; } -#undef FORMAT - return 0; } @@ -1392,22 +1292,15 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, GstCaps* caps = NULL; GstCaps* videocaps = NULL; -#if FULL_GST_VERSION >= VERSION_NUM(0,10,32) GstCaps* containercaps = NULL; GstEncodingContainerProfile* containerprofile = NULL; GstEncodingVideoProfile* videoprofile = NULL; -#endif GstIterator* it = NULL; gboolean done = FALSE; GstElement *element = NULL; gchar* name = NULL; -#if GST_VERSION_MAJOR == 0 - GstElement* splitter = NULL; - GstElement* combiner = NULL; -#endif - // we first try to construct a pipeline from the given string. // if that fails, we assume it is an ordinary filename @@ -1416,13 +1309,6 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, if(manualpipeline) { -#if GST_VERSION_MAJOR == 0 - it = gst_bin_iterate_sources(GST_BIN(encodebin)); - if(gst_iterator_next(it, (gpointer *)&source) != GST_ITERATOR_OK) { - CV_WARN("GStreamer: cannot find appsink in manual pipeline\n"); - return false; - } -#else it = gst_bin_iterate_sources (GST_BIN(encodebin)); GValue value = G_VALUE_INIT; @@ -1456,7 +1342,6 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, CV_WARN("GStreamer: cannot find appsrc in manual pipeline\n"); return false; } -#endif pipeline = encodebin; } else @@ -1489,21 +1374,17 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, return false; } -#if FULL_GST_VERSION >= VERSION_NUM(0,10,32) containercaps = gst_caps_from_string(mime); //create encodebin profile containerprofile = gst_encoding_container_profile_new("container", "container", containercaps, NULL); videoprofile = gst_encoding_video_profile_new(videocaps, NULL, NULL, 1); gst_encoding_container_profile_add_profile(containerprofile, (GstEncodingProfile *) videoprofile); -#endif //create pipeline elements encodebin = gst_element_factory_make("encodebin", NULL); -#if FULL_GST_VERSION >= VERSION_NUM(0,10,32) g_object_set(G_OBJECT(encodebin), "profile", containerprofile, NULL); -#endif source = gst_element_factory_make("appsrc", NULL); file = gst_element_factory_make("filesink", NULL); g_object_set(G_OBJECT(file), "location", filename, NULL); @@ -1511,29 +1392,17 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, if (fourcc == CV_FOURCC('M','J','P','G') && frameSize.height == 1) { -#if GST_VERSION_MAJOR > 0 input_pix_fmt = GST_VIDEO_FORMAT_ENCODED; caps = gst_caps_new_simple("image/jpeg", "framerate", GST_TYPE_FRACTION, int(fps), 1, NULL); caps = gst_caps_fixate(caps); -#else - CV_WARN("Gstreamer 0.10 Opencv backend does not support writing encoded MJPEG data."); - return false; -#endif } else if(is_color) { input_pix_fmt = GST_VIDEO_FORMAT_BGR; bufsize = frameSize.width * frameSize.height * 3; -#if GST_VERSION_MAJOR == 0 - caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR, - frameSize.width, - frameSize.height, - int(fps), 1, - 1, 1); -#else caps = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGR", "width", G_TYPE_INT, frameSize.width, @@ -1542,22 +1411,12 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, NULL); caps = gst_caps_fixate(caps); -#endif - } else { -#if FULL_GST_VERSION >= VERSION_NUM(0,10,29) input_pix_fmt = GST_VIDEO_FORMAT_GRAY8; bufsize = frameSize.width * frameSize.height; -#if GST_VERSION_MAJOR == 0 - caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_GRAY8, - frameSize.width, - frameSize.height, - int(fps), 1, - 1, 1); -#else caps = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "GRAY8", "width", G_TYPE_INT, frameSize.width, @@ -1565,11 +1424,6 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, "framerate", GST_TYPE_FRACTION, int(fps), 1, NULL); caps = gst_caps_fixate(caps); -#endif -#else - CV_Error(Error::StsError, - "Gstreamer 0.10.29 or newer is required for grayscale input"); -#endif } gst_app_src_set_caps(GST_APP_SRC(source), caps); @@ -1591,63 +1445,6 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, } } -#if GST_VERSION_MAJOR == 0 - // HACK: remove streamsplitter and streamcombiner from - // encodebin pipeline to prevent early EOF event handling - // We always fetch BGR or gray-scale frames, so combiner->spliter - // endge in graph is useless. - it = gst_bin_iterate_recurse (GST_BIN(encodebin)); - while (!done) { - switch (gst_iterator_next (it, (void**)&element)) { - case GST_ITERATOR_OK: - name = gst_element_get_name(element); - if (strstr(name, "streamsplitter")) - splitter = element; - else if (strstr(name, "streamcombiner")) - combiner = element; - break; - case GST_ITERATOR_RESYNC: - gst_iterator_resync (it); - break; - case GST_ITERATOR_ERROR: - done = true; - break; - case GST_ITERATOR_DONE: - done = true; - break; - } - } - - gst_iterator_free (it); - - if (splitter && combiner) - { - gst_element_unlink(splitter, combiner); - - GstPad* src = gst_element_get_pad(combiner, "src"); - GstPad* sink = gst_element_get_pad(combiner, "encodingsink"); - - GstPad* srcPeer = gst_pad_get_peer(src); - GstPad* sinkPeer = gst_pad_get_peer(sink); - - gst_pad_unlink(sinkPeer, sink); - gst_pad_unlink(src, srcPeer); - - gst_pad_link(sinkPeer, srcPeer); - - src = gst_element_get_pad(splitter, "encodingsrc"); - sink = gst_element_get_pad(splitter, "sink"); - - srcPeer = gst_pad_get_peer(src); - sinkPeer = gst_pad_get_peer(sink); - - gst_pad_unlink(sinkPeer, sink); - gst_pad_unlink(src, srcPeer); - - gst_pad_link(sinkPeer, srcPeer); - } -#endif - GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "write-pipeline"); stateret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING); @@ -1682,7 +1479,6 @@ bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image ) handleMessage(pipeline); -#if GST_VERSION_MAJOR > 0 if (input_pix_fmt == GST_VIDEO_FORMAT_ENCODED) { if (image->nChannels != 1 || image->depth != IPL_DEPTH_8U || image->height != 1) { CV_WARN("cvWriteFrame() needs images with depth = IPL_DEPTH_8U, nChannels = 1 and height = 1."); @@ -1690,21 +1486,18 @@ bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image ) } } else -#endif if(input_pix_fmt == GST_VIDEO_FORMAT_BGR) { if (image->nChannels != 3 || image->depth != IPL_DEPTH_8U) { CV_WARN("cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 3."); return false; } } -#if FULL_GST_VERSION >= VERSION_NUM(0,10,29) else if (input_pix_fmt == GST_VIDEO_FORMAT_GRAY8) { if (image->nChannels != 1 || image->depth != IPL_DEPTH_8U) { CV_WARN("cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 1."); return false; } } -#endif else { CV_WARN("cvWriteFrame() needs BGR or grayscale images\n"); return false; @@ -1715,17 +1508,6 @@ bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image ) timestamp = num_frames * duration; //gst_app_src_push_buffer takes ownership of the buffer, so we need to supply it a copy -#if GST_VERSION_MAJOR == 0 - buffer = gst_buffer_try_new_and_alloc (size); - if (!buffer) - { - CV_WARN("Cannot create GStreamer buffer"); - } - - memcpy(GST_BUFFER_DATA (buffer), (guint8*)image->imageData, size); - GST_BUFFER_DURATION(buffer) = duration; - GST_BUFFER_TIMESTAMP(buffer) = timestamp; -#else buffer = gst_buffer_new_allocate (NULL, size, NULL); GstMapInfo info; gst_buffer_map(buffer, &info, (GstMapFlags)GST_MAP_READ); @@ -1734,7 +1516,6 @@ bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image ) GST_BUFFER_DURATION(buffer) = duration; GST_BUFFER_PTS(buffer) = timestamp; GST_BUFFER_DTS(buffer) = timestamp; -#endif //set the current number in the frame GST_BUFFER_OFFSET(buffer) = num_frames; diff --git a/modules/videoio/src/cap_intelperc.cpp b/modules/videoio/src/cap_intelperc.cpp deleted file mode 100644 index 093b1fd235..0000000000 --- a/modules/videoio/src/cap_intelperc.cpp +++ /dev/null @@ -1,634 +0,0 @@ -#ifdef HAVE_INTELPERC - -#include "cap_intelperc.hpp" - -namespace cv -{ - -///////////////// IntelPerCStreamBase ////////////////// - -IntelPerCStreamBase::IntelPerCStreamBase() - : m_profileIdx(-1) - , m_frameIdx(0) - , m_timeStampStartNS(0) -{ -} -IntelPerCStreamBase::~IntelPerCStreamBase() -{ -} - -bool IntelPerCStreamBase::isValid() -{ - return (m_device.IsValid() && m_stream.IsValid()); -} -bool IntelPerCStreamBase::grabFrame() -{ - if (!m_stream.IsValid()) - return false; - if (-1 == m_profileIdx) - { - if (!setProperty(CV_CAP_PROP_INTELPERC_PROFILE_IDX, 0)) - return false; - } - PXCSmartSP sp; - m_pxcImage.ReleaseRef(); - if (PXC_STATUS_NO_ERROR > m_stream->ReadStreamAsync(&m_pxcImage, &sp)) - return false; - if (PXC_STATUS_NO_ERROR > sp->Synchronize()) - return false; - if (0 == m_timeStampStartNS) - m_timeStampStartNS = m_pxcImage->QueryTimeStamp(); - m_timeStamp = (double)((m_pxcImage->QueryTimeStamp() - m_timeStampStartNS) / 10000); - m_frameIdx++; - return true; -} -int IntelPerCStreamBase::getProfileIDX() const -{ - return m_profileIdx; -} -double IntelPerCStreamBase::getProperty(int propIdx) const -{ - double ret = 0.0; - switch (propIdx) - { - case CV_CAP_PROP_INTELPERC_PROFILE_COUNT: - ret = (double)m_profiles.size(); - break; - case CV_CAP_PROP_FRAME_WIDTH : - if ((0 <= m_profileIdx) && (m_profileIdx < m_profiles.size())) - ret = (double)m_profiles[m_profileIdx].imageInfo.width; - break; - case CV_CAP_PROP_FRAME_HEIGHT : - if ((0 <= m_profileIdx) && (m_profileIdx < m_profiles.size())) - ret = (double)m_profiles[m_profileIdx].imageInfo.height; - break; - case CV_CAP_PROP_FPS : - if ((0 <= m_profileIdx) && (m_profileIdx < m_profiles.size())) - { - ret = ((double)m_profiles[m_profileIdx].frameRateMin.numerator / (double)m_profiles[m_profileIdx].frameRateMin.denominator - + (double)m_profiles[m_profileIdx].frameRateMax.numerator / (double)m_profiles[m_profileIdx].frameRateMax.denominator) / 2.0; - } - break; - case CV_CAP_PROP_POS_FRAMES: - ret = (double)m_frameIdx; - break; - case CV_CAP_PROP_POS_MSEC: - ret = m_timeStamp; - break; - }; - return ret; -} -bool IntelPerCStreamBase::setProperty(int propIdx, double propVal) -{ - bool isSet = false; - switch (propIdx) - { - case CV_CAP_PROP_INTELPERC_PROFILE_IDX: - { - int propValInt = (int)propVal; - if (0 > propValInt) - { - m_profileIdx = propValInt; - isSet = true; - } - else if (propValInt < m_profiles.size()) - { - if (m_profileIdx != propValInt) - { - m_profileIdx = propValInt; - if (m_stream.IsValid()) - m_stream->SetProfile(&m_profiles[m_profileIdx]); - m_frameIdx = 0; - m_timeStampStartNS = 0; - } - isSet = true; - } - } - break; - }; - return isSet; -} -bool IntelPerCStreamBase::initDevice(PXCSession *session) -{ - if (NULL == session) - return false; - - pxcStatus sts = PXC_STATUS_NO_ERROR; - PXCSession::ImplDesc templat; - memset(&templat,0,sizeof(templat)); - templat.group = PXCSession::IMPL_GROUP_SENSOR; - templat.subgroup= PXCSession::IMPL_SUBGROUP_VIDEO_CAPTURE; - - for (int modidx = 0; PXC_STATUS_NO_ERROR <= sts; modidx++) - { - PXCSession::ImplDesc desc; - sts = session->QueryImpl(&templat, modidx, &desc); - if (PXC_STATUS_NO_ERROR > sts) - break; - - PXCSmartPtr capture; - sts = session->CreateImpl(&desc, &capture); - if (!capture.IsValid()) - continue; - - /* enumerate devices */ - for (int devidx = 0; PXC_STATUS_NO_ERROR <= sts; devidx++) - { - PXCSmartPtr device; - sts = capture->CreateDevice(devidx, &device); - if (PXC_STATUS_NO_ERROR <= sts) - { - m_device = device.ReleasePtr(); - return true; - } - } - } - return false; -} - -void IntelPerCStreamBase::initStreamImpl(PXCImage::ImageType type) -{ - if (!m_device.IsValid()) - return; - - pxcStatus sts = PXC_STATUS_NO_ERROR; - /* enumerate streams */ - for (int streamidx = 0; PXC_STATUS_NO_ERROR <= sts; streamidx++) - { - PXCCapture::Device::StreamInfo sinfo; - sts = m_device->QueryStream(streamidx, &sinfo); - if (PXC_STATUS_NO_ERROR > sts) - break; - if (PXCCapture::VideoStream::CUID != sinfo.cuid) - continue; - if (type != sinfo.imageType) - continue; - - sts = m_device->CreateStream(streamidx, &m_stream); - if (PXC_STATUS_NO_ERROR == sts) - break; - m_stream.ReleaseRef(); - } -} -bool IntelPerCStreamBase::validProfile(const PXCCapture::VideoStream::ProfileInfo& /*pinfo*/) -{ - return true; -} -void IntelPerCStreamBase::enumProfiles() -{ - m_profiles.clear(); - if (!m_stream.IsValid()) - return; - pxcStatus sts = PXC_STATUS_NO_ERROR; - for (int profidx = 0; PXC_STATUS_NO_ERROR <= sts; profidx++) - { - PXCCapture::VideoStream::ProfileInfo pinfo; - sts = m_stream->QueryProfile(profidx, &pinfo); - if (PXC_STATUS_NO_ERROR > sts) - break; - if (validProfile(pinfo)) - m_profiles.push_back(pinfo); - } -} - -///////////////// IntelPerCStreamImage ////////////////// - -IntelPerCStreamImage::IntelPerCStreamImage() -{ -} -IntelPerCStreamImage::~IntelPerCStreamImage() -{ -} - -bool IntelPerCStreamImage::initStream(PXCSession *session) -{ - if (!initDevice(session)) - return false; - initStreamImpl(PXCImage::IMAGE_TYPE_COLOR); - if (!m_stream.IsValid()) - return false; - enumProfiles(); - return true; -} -double IntelPerCStreamImage::getProperty(int propIdx) const -{ - switch (propIdx) - { - case CV_CAP_PROP_BRIGHTNESS: - { - if (!m_device.IsValid()) - return 0.0; - float fret = 0.0f; - if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_BRIGHTNESS, &fret)) - return (double)fret; - return 0.0; - } - break; - case CV_CAP_PROP_CONTRAST: - { - if (!m_device.IsValid()) - return 0.0; - float fret = 0.0f; - if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_CONTRAST, &fret)) - return (double)fret; - return 0.0; - } - break; - case CV_CAP_PROP_SATURATION: - { - if (!m_device.IsValid()) - return 0.0; - float fret = 0.0f; - if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_SATURATION, &fret)) - return (double)fret; - return 0.0; - } - break; - case CV_CAP_PROP_HUE: - { - if (!m_device.IsValid()) - return 0.0; - float fret = 0.0f; - if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_HUE, &fret)) - return (double)fret; - return 0.0; - } - break; - case CV_CAP_PROP_GAMMA: - { - if (!m_device.IsValid()) - return 0.0; - float fret = 0.0f; - if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_GAMMA, &fret)) - return (double)fret; - return 0.0; - } - break; - case CV_CAP_PROP_SHARPNESS: - { - if (!m_device.IsValid()) - return 0.0; - float fret = 0.0f; - if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_SHARPNESS, &fret)) - return (double)fret; - return 0.0; - } - break; - case CV_CAP_PROP_GAIN: - { - if (!m_device.IsValid()) - return 0.0; - float fret = 0.0f; - if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_GAIN, &fret)) - return (double)fret; - return 0.0; - } - break; - case CV_CAP_PROP_BACKLIGHT: - { - if (!m_device.IsValid()) - return 0.0; - float fret = 0.0f; - if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_BACK_LIGHT_COMPENSATION, &fret)) - return (double)fret; - return 0.0; - } - break; - case CV_CAP_PROP_EXPOSURE: - { - if (!m_device.IsValid()) - return 0.0; - float fret = 0.0f; - if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_EXPOSURE, &fret)) - return (double)fret; - return 0.0; - } - break; - //Add image stream specific properties - } - return IntelPerCStreamBase::getProperty(propIdx); -} -bool IntelPerCStreamImage::setProperty(int propIdx, double propVal) -{ - switch (propIdx) - { - case CV_CAP_PROP_BRIGHTNESS: - { - if (!m_device.IsValid()) - return false; - return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_BRIGHTNESS, (float)propVal)); - } - break; - case CV_CAP_PROP_CONTRAST: - { - if (!m_device.IsValid()) - return false; - return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_CONTRAST, (float)propVal)); - } - break; - case CV_CAP_PROP_SATURATION: - { - if (!m_device.IsValid()) - return false; - return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_SATURATION, (float)propVal)); - } - break; - case CV_CAP_PROP_HUE: - { - if (!m_device.IsValid()) - return false; - return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_HUE, (float)propVal)); - } - break; - case CV_CAP_PROP_GAMMA: - { - if (!m_device.IsValid()) - return false; - return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_GAMMA, (float)propVal)); - } - break; - case CV_CAP_PROP_SHARPNESS: - { - if (!m_device.IsValid()) - return false; - return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_SHARPNESS, (float)propVal)); - } - break; - case CV_CAP_PROP_GAIN: - { - if (!m_device.IsValid()) - return false; - return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_GAIN, (float)propVal)); - } - break; - case CV_CAP_PROP_BACKLIGHT: - { - if (!m_device.IsValid()) - return false; - return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_BACK_LIGHT_COMPENSATION, (float)propVal)); - } - break; - case CV_CAP_PROP_EXPOSURE: - { - if (!m_device.IsValid()) - return false; - return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_EXPOSURE, (float)propVal)); - } - break; - //Add image stream specific properties - } - return IntelPerCStreamBase::setProperty(propIdx, propVal); -} -bool IntelPerCStreamImage::retrieveAsOutputArray(cv::OutputArray image) -{ - if (!m_pxcImage.IsValid()) - return false; - PXCImage::ImageInfo info; - m_pxcImage->QueryInfo(&info); - - PXCImage::ImageData data; - m_pxcImage->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::COLOR_FORMAT_RGB24, &data); - - if (PXCImage::SURFACE_TYPE_SYSTEM_MEMORY != data.type) - return false; - - cv::Mat temp(info.height, info.width, CV_8UC3, data.planes[0], data.pitches[0]); - temp.copyTo(image); - - m_pxcImage->ReleaseAccess(&data); - return true; -} - -///////////////// IntelPerCStreamDepth ////////////////// - -IntelPerCStreamDepth::IntelPerCStreamDepth() -{ -} -IntelPerCStreamDepth::~IntelPerCStreamDepth() -{ -} - -bool IntelPerCStreamDepth::initStream(PXCSession *session) -{ - if (!initDevice(session)) - return false; - initStreamImpl(PXCImage::IMAGE_TYPE_DEPTH); - if (!m_stream.IsValid()) - return false; - enumProfiles(); - return true; -} -double IntelPerCStreamDepth::getProperty(int propIdx) const -{ - switch (propIdx) - { - case CV_CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE: - { - if (!m_device.IsValid()) - return 0.0; - float fret = 0.0f; - if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_DEPTH_LOW_CONFIDENCE_VALUE, &fret)) - return (double)fret; - return 0.0; - } - break; - case CV_CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE: - { - if (!m_device.IsValid()) - return 0.0; - float fret = 0.0f; - if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_DEPTH_SATURATION_VALUE, &fret)) - return (double)fret; - return 0.0; - } - break; - case CV_CAP_PROP_INTELPERC_DEPTH_CONFIDENCE_THRESHOLD: - { - if (!m_device.IsValid()) - return 0.0; - float fret = 0.0f; - if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_DEPTH_CONFIDENCE_THRESHOLD, &fret)) - return (double)fret; - return 0.0; - } - break; - case CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_HORZ: - { - if (!m_device.IsValid()) - return 0.0f; - PXCPointF32 ptf; - if (PXC_STATUS_NO_ERROR == m_device->QueryPropertyAsPoint(PXCCapture::Device::PROPERTY_DEPTH_FOCAL_LENGTH, &ptf)) - return (double)ptf.x; - return 0.0; - } - break; - case CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_VERT: - { - if (!m_device.IsValid()) - return 0.0f; - PXCPointF32 ptf; - if (PXC_STATUS_NO_ERROR == m_device->QueryPropertyAsPoint(PXCCapture::Device::PROPERTY_DEPTH_FOCAL_LENGTH, &ptf)) - return (double)ptf.y; - return 0.0; - } - break; - //Add depth stream sepcific properties - } - return IntelPerCStreamBase::getProperty(propIdx); -} -bool IntelPerCStreamDepth::setProperty(int propIdx, double propVal) -{ - switch (propIdx) - { - case CV_CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE: - { - if (!m_device.IsValid()) - return false; - return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_DEPTH_LOW_CONFIDENCE_VALUE, (float)propVal)); - } - break; - case CV_CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE: - { - if (!m_device.IsValid()) - return false; - return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_DEPTH_SATURATION_VALUE, (float)propVal)); - } - break; - case CV_CAP_PROP_INTELPERC_DEPTH_CONFIDENCE_THRESHOLD: - { - if (!m_device.IsValid()) - return false; - return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_DEPTH_CONFIDENCE_THRESHOLD, (float)propVal)); - } - break; - //Add depth stream sepcific properties - } - return IntelPerCStreamBase::setProperty(propIdx, propVal); -} -bool IntelPerCStreamDepth::retrieveDepthAsOutputArray(cv::OutputArray image) -{ - return retriveFrame(CV_16SC1, 0, image); -} -bool IntelPerCStreamDepth::retrieveIRAsOutputArray(cv::OutputArray image) -{ - return retriveFrame(CV_16SC1, 1, image); -} -bool IntelPerCStreamDepth::retrieveUVAsOutputArray(cv::OutputArray image) -{ - return retriveFrame(CV_32FC2, 2, image); -} -bool IntelPerCStreamDepth::validProfile(const PXCCapture::VideoStream::ProfileInfo& pinfo) -{ - return (PXCImage::COLOR_FORMAT_DEPTH == pinfo.imageInfo.format); -} -bool IntelPerCStreamDepth::retriveFrame(int type, int planeIdx, cv::OutputArray frame) -{ - if (!m_pxcImage.IsValid()) - return false; - PXCImage::ImageInfo info; - m_pxcImage->QueryInfo(&info); - - PXCImage::ImageData data; - m_pxcImage->AcquireAccess(PXCImage::ACCESS_READ, &data); - - if (PXCImage::SURFACE_TYPE_SYSTEM_MEMORY != data.type) - return false; - - cv::Mat temp(info.height, info.width, type, data.planes[planeIdx], data.pitches[planeIdx]); - temp.copyTo(frame); - - m_pxcImage->ReleaseAccess(&data); - return true; -} - -///////////////// VideoCapture_IntelPerC ////////////////// - -VideoCapture_IntelPerC::VideoCapture_IntelPerC() - : m_contextOpened(false) -{ - pxcStatus sts = PXCSession_Create(&m_session); - if (PXC_STATUS_NO_ERROR > sts) - return; - m_contextOpened = m_imageStream.initStream(m_session); - m_contextOpened &= m_depthStream.initStream(m_session); -} -VideoCapture_IntelPerC::~VideoCapture_IntelPerC(){} - -double VideoCapture_IntelPerC::getProperty(int propIdx) const -{ - double propValue = 0; - int purePropIdx = propIdx & ~CV_CAP_INTELPERC_GENERATORS_MASK; - if (CV_CAP_INTELPERC_IMAGE_GENERATOR == (propIdx & CV_CAP_INTELPERC_GENERATORS_MASK)) - { - propValue = m_imageStream.getProperty(purePropIdx); - } - else if (CV_CAP_INTELPERC_DEPTH_GENERATOR == (propIdx & CV_CAP_INTELPERC_GENERATORS_MASK)) - { - propValue = m_depthStream.getProperty(purePropIdx); - } - else - { - propValue = m_depthStream.getProperty(purePropIdx); - } - return propValue; -} -bool VideoCapture_IntelPerC::setProperty(int propIdx, double propVal) -{ - bool isSet = false; - int purePropIdx = propIdx & ~CV_CAP_INTELPERC_GENERATORS_MASK; - if (CV_CAP_INTELPERC_IMAGE_GENERATOR == (propIdx & CV_CAP_INTELPERC_GENERATORS_MASK)) - { - isSet = m_imageStream.setProperty(purePropIdx, propVal); - } - else if (CV_CAP_INTELPERC_DEPTH_GENERATOR == (propIdx & CV_CAP_INTELPERC_GENERATORS_MASK)) - { - isSet = m_depthStream.setProperty(purePropIdx, propVal); - } - else - { - isSet = m_depthStream.setProperty(purePropIdx, propVal); - } - return isSet; -} - -bool VideoCapture_IntelPerC::grabFrame() -{ - if (!isOpened()) - return false; - - bool isGrabbed = false; - if (m_depthStream.isValid()) - isGrabbed = m_depthStream.grabFrame(); - if ((m_imageStream.isValid()) && (-1 != m_imageStream.getProfileIDX())) - isGrabbed &= m_imageStream.grabFrame(); - - return isGrabbed; -} -bool VideoCapture_IntelPerC::retrieveFrame(int outputType, cv::OutputArray frame) -{ - switch (outputType) - { - case CV_CAP_INTELPERC_DEPTH_MAP: - return m_depthStream.retrieveDepthAsOutputArray(frame); - case CV_CAP_INTELPERC_UVDEPTH_MAP: - return m_depthStream.retrieveUVAsOutputArray(frame); - case CV_CAP_INTELPERC_IR_MAP: - return m_depthStream.retrieveIRAsOutputArray(frame); - case CV_CAP_INTELPERC_IMAGE: - return m_imageStream.retrieveAsOutputArray(frame); - } - return false; -} -int VideoCapture_IntelPerC::getCaptureDomain() -{ - return CV_CAP_INTELPERC; -} - -bool VideoCapture_IntelPerC::isOpened() const -{ - return m_contextOpened; -} - -} - -#endif //HAVE_INTELPERC diff --git a/modules/videoio/src/cap_intelperc.hpp b/modules/videoio/src/cap_intelperc.hpp deleted file mode 100644 index 209cd4736a..0000000000 --- a/modules/videoio/src/cap_intelperc.hpp +++ /dev/null @@ -1,115 +0,0 @@ -/*M/////////////////////////////////////////////////////////////////////////////////////// -// -// This file is part of OpenCV project. -// It is subject to the license terms in the LICENSE file found in the top-level directory -// of this distribution and at http://opencv.org/license.html. -// -// Copyright (C) 2014, Itseez, Inc., all rights reserved. -// Third party copyrights are property of their respective owners. -// -//M*/ - -#ifndef _CAP_INTELPERC_HPP_ -#define _CAP_INTELPERC_HPP_ - -#include "precomp.hpp" - -#ifdef HAVE_INTELPERC - -#include "pxcsession.h" -#include "pxcsmartptr.h" -#include "pxccapture.h" - -namespace cv -{ - -class IntelPerCStreamBase -{ -public: - IntelPerCStreamBase(); - virtual ~IntelPerCStreamBase(); - - bool isValid(); - bool grabFrame(); - int getProfileIDX() const; -public: - virtual bool initStream(PXCSession *session) = 0; - virtual double getProperty(int propIdx) const; - virtual bool setProperty(int propIdx, double propVal); -protected: - mutable PXCSmartPtr m_device; - bool initDevice(PXCSession *session); - - PXCSmartPtr m_stream; - void initStreamImpl(PXCImage::ImageType type); -protected: - std::vector m_profiles; - int m_profileIdx; - int m_frameIdx; - pxcU64 m_timeStampStartNS; - double m_timeStamp; - PXCSmartPtr m_pxcImage; - - virtual bool validProfile(const PXCCapture::VideoStream::ProfileInfo& /*pinfo*/); - void enumProfiles(); -}; - -class IntelPerCStreamImage - : public IntelPerCStreamBase -{ -public: - IntelPerCStreamImage(); - virtual ~IntelPerCStreamImage(); - - virtual bool initStream(PXCSession *session); - virtual double getProperty(int propIdx) const; - virtual bool setProperty(int propIdx, double propVal); -public: - bool retrieveAsOutputArray(OutputArray image); -}; - -class IntelPerCStreamDepth - : public IntelPerCStreamBase -{ -public: - IntelPerCStreamDepth(); - virtual ~IntelPerCStreamDepth(); - - virtual bool initStream(PXCSession *session); - virtual double getProperty(int propIdx) const; - virtual bool setProperty(int propIdx, double propVal); -public: - bool retrieveDepthAsOutputArray(OutputArray image); - bool retrieveIRAsOutputArray(OutputArray image); - bool retrieveUVAsOutputArray(OutputArray image); -protected: - virtual bool validProfile(const PXCCapture::VideoStream::ProfileInfo& pinfo); -protected: - bool retriveFrame(int type, int planeIdx, OutputArray frame); -}; - -class VideoCapture_IntelPerC : public IVideoCapture -{ -public: - VideoCapture_IntelPerC(); - virtual ~VideoCapture_IntelPerC(); - - virtual double getProperty(int propIdx) const CV_OVERRIDE; - virtual bool setProperty(int propIdx, double propVal) CV_OVERRIDE; - - virtual bool grabFrame() CV_OVERRIDE; - virtual bool retrieveFrame(int outputType, OutputArray frame) CV_OVERRIDE; - virtual int getCaptureDomain() CV_OVERRIDE; - virtual bool isOpened() const CV_OVERRIDE; -protected: - bool m_contextOpened; - - PXCSmartPtr m_session; - IntelPerCStreamImage m_imageStream; - IntelPerCStreamDepth m_depthStream; -}; - -} - -#endif //HAVE_INTELPERC -#endif //_CAP_INTELPERC_HPP_ \ No newline at end of file diff --git a/modules/videoio/src/cap_msmf.cpp b/modules/videoio/src/cap_msmf.cpp index a373da157b..10b9a41c18 100644 --- a/modules/videoio/src/cap_msmf.cpp +++ b/modules/videoio/src/cap_msmf.cpp @@ -52,6 +52,7 @@ #undef WINVER #define WINVER _WIN32_WINNT_WIN8 #endif + #include #include #include @@ -60,7 +61,7 @@ #include #include #include -#include +#include #ifdef HAVE_MSMF_DXVA #include #include diff --git a/modules/videoio/src/cap_openni.cpp b/modules/videoio/src/cap_openni.cpp deleted file mode 100644 index e4dbea80d7..0000000000 --- a/modules/videoio/src/cap_openni.cpp +++ /dev/null @@ -1,1236 +0,0 @@ -/*M/////////////////////////////////////////////////////////////////////////////////////// -// -// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. -// -// By downloading, copying, installing or using the software you agree to this license. -// If you do not agree to this license, do not download, install, -// copy or use the software. -// -// -// Intel License Agreement -// For Open Source Computer Vision Library -// -// Copyright (C) 2000, Intel Corporation, all rights reserved. -// Third party copyrights are property of their respective owners. -// -// Redistribution and use in source and binary forms, with or without modification, -// are permitted provided that the following conditions are met: -// -// * Redistribution's of source code must retain the above copyright notice, -// this list of conditions and the following disclaimer. -// -// * Redistribution's in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// * The name of Intel Corporation may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// -// This software is provided by the copyright holders and contributors "as is" and -// any express or implied warranties, including, but not limited to, the implied -// warranties of merchantability and fitness for a particular purpose are disclaimed. -// In no event shall the Intel Corporation or contributors be liable for any direct, -// indirect, incidental, special, exemplary, or consequential damages -// (including, but not limited to, procurement of substitute goods or services; -// loss of use, data, or profits; or business interruption) however caused -// and on any theory of liability, whether in contract, strict liability, -// or tort (including negligence or otherwise) arising in any way out of -// the use of this software, even if advised of the possibility of such damage. -// -//M*/ -#include "precomp.hpp" -#include "opencv2/core.hpp" -#include "opencv2/imgproc.hpp" - -#ifdef HAVE_OPENNI - -#include - -#ifndef i386 -# define i386 0 -#endif -#ifndef __arm__ -# define __arm__ 0 -#endif -#ifndef _ARC -# define _ARC 0 -#endif -#ifndef __APPLE__ -# define __APPLE__ 0 -#endif - -#include "XnCppWrapper.h" - -const cv::String XMLConfig = -"" - "" - "" - "" - "" - "" - "" - "" - "" - "" - "" - "" - "" - "" - "" - "" - "" - "" - " " - "" - "" - "" - "" - "" - "" - "" -"\n"; - -/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -class ApproximateSyncGrabber -{ -public: - ApproximateSyncGrabber( xn::Context &_context, - xn::DepthGenerator &_depthGenerator, - xn::ImageGenerator &_imageGenerator, - int _maxBufferSize, bool _isCircleBuffer, int _maxTimeDuration ) : - context(_context), depthGenerator(_depthGenerator), imageGenerator(_imageGenerator), - maxBufferSize(_maxBufferSize), isCircleBuffer(_isCircleBuffer), maxTimeDuration(_maxTimeDuration) - { - - CV_Assert( depthGenerator.IsValid() ); - CV_Assert( imageGenerator.IsValid() ); - } - - void setMaxBufferSize( int _maxBufferSize ) - { - maxBufferSize = _maxBufferSize; - } - inline int getMaxBufferSize() const { return maxBufferSize; } - - void setIsCircleBuffer( bool _isCircleBuffer ) { isCircleBuffer = _isCircleBuffer; } - bool getIsCircleBuffer() const { return isCircleBuffer; } - - void setMaxTimeDuration( int _maxTimeDuration ) { maxTimeDuration = _maxTimeDuration; } - int getMaxTimeDuration() const { return maxTimeDuration; } - - bool grab( xn::DepthMetaData& depthMetaData, - xn::ImageMetaData& imageMetaData ) - { - CV_Assert( task ); - - - while( task->grab(depthMetaData, imageMetaData) == false ) - { - task->spin(); - } - return true; - - } - - void start() - { - CV_Assert( depthGenerator.IsValid() ); - CV_Assert( imageGenerator.IsValid() ); - task.reset( new ApproximateSynchronizer( *this ) ); - } - - void finish() - { - task.release(); - } - - bool isRun() const { return task != 0; } - - xn::Context &context; - xn::DepthGenerator &depthGenerator; - xn::ImageGenerator &imageGenerator; - -private: - ApproximateSyncGrabber(const ApproximateSyncGrabber&); - ApproximateSyncGrabber& operator=(const ApproximateSyncGrabber&); - - int maxBufferSize; - bool isCircleBuffer; - int maxTimeDuration; - - class ApproximateSynchronizerBase - { - public: - ApproximateSynchronizerBase( ApproximateSyncGrabber& _approxSyncGrabber ) : - approxSyncGrabber(_approxSyncGrabber), isDepthFilled(false), isImageFilled(false) - {} - - virtual ~ApproximateSynchronizerBase() {} - - virtual bool isSpinContinue() const = 0; - virtual void pushDepthMetaData( xn::DepthMetaData& depthMetaData ) = 0; - virtual void pushImageMetaData( xn::ImageMetaData& imageMetaData ) = 0; - virtual bool popDepthMetaData( xn::DepthMetaData& depthMetaData ) = 0; - virtual bool popImageMetaData( xn::ImageMetaData& imageMetaData ) = 0; - - void spin() - { - while(isSpinContinue() == true) - { - XnStatus status = approxSyncGrabber.context.WaitAnyUpdateAll(); - if( status != XN_STATUS_OK ) - continue; - - //xn::DepthMetaData depth; - //xn::ImageMetaData image; - approxSyncGrabber.depthGenerator.GetMetaData(depth); - approxSyncGrabber.imageGenerator.GetMetaData(image); - - if( depth.Data() && depth.IsDataNew() ) - pushDepthMetaData( depth ); - - if( image.Data() && image.IsDataNew() ) - pushImageMetaData( image ); - } - } - - virtual bool grab( xn::DepthMetaData& depthMetaData, - xn::ImageMetaData& imageMetaData ) - { - for(;;) - { - if( !isDepthFilled ) - isDepthFilled = popDepthMetaData(depth); - if( !isImageFilled ) - isImageFilled = popImageMetaData(image); - - if( !isDepthFilled || !isImageFilled ) - break; - - double timeDiff = 1e-3 * std::abs(static_cast(depth.Timestamp()) - static_cast(image.Timestamp())); - - if( timeDiff <= approxSyncGrabber.maxTimeDuration ) - { - depthMetaData.InitFrom(depth); - imageMetaData.InitFrom(image); - isDepthFilled = isImageFilled = false; - return true; - } - else - { - if( depth.Timestamp() < image.Timestamp() ) - isDepthFilled = false; - else - isImageFilled = false; - } - } - - return false; - } - - protected: - ApproximateSyncGrabber& approxSyncGrabber; - xn::DepthMetaData depth; - xn::ImageMetaData image; - bool isDepthFilled; - bool isImageFilled; - }; - - // If there isn't TBB the synchronization will be executed in the main thread. - class ApproximateSynchronizer: public ApproximateSynchronizerBase - { - public: - ApproximateSynchronizer( ApproximateSyncGrabber& _approxSyncGrabber ) : - ApproximateSynchronizerBase(_approxSyncGrabber) - {} - - virtual bool isSpinContinue() const CV_OVERRIDE - { - int maxBufferSize = approxSyncGrabber.getMaxBufferSize(); - return (maxBufferSize <= 0) || (static_cast(depthQueue.size()) < maxBufferSize && - static_cast(imageQueue.size()) < maxBufferSize); // "<" to may push - } - - virtual inline void pushDepthMetaData( xn::DepthMetaData& depthMetaData ) CV_OVERRIDE - { - cv::Ptr depthPtr = cv::makePtr(); - depthPtr->CopyFrom(depthMetaData); - depthQueue.push(depthPtr); - } - virtual inline void pushImageMetaData( xn::ImageMetaData& imageMetaData ) CV_OVERRIDE - { - cv::Ptr imagePtr = cv::makePtr(); - imagePtr->CopyFrom(imageMetaData); - imageQueue.push(imagePtr); - } - virtual inline bool popDepthMetaData( xn::DepthMetaData& depthMetaData ) CV_OVERRIDE - { - if( depthQueue.empty() ) - return false; - - depthMetaData.CopyFrom(*depthQueue.front()); - depthQueue.pop(); - return true; - } - virtual inline bool popImageMetaData( xn::ImageMetaData& imageMetaData ) CV_OVERRIDE - { - if( imageQueue.empty() ) - return false; - - imageMetaData.CopyFrom(*imageQueue.front()); - imageQueue.pop(); - return true; - } - - private: - std::queue > depthQueue; - std::queue > imageQueue; - }; - - cv::Ptr task; -}; - -/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -class CvCapture_OpenNI : public CvCapture -{ -public: - enum { DEVICE_DEFAULT=0, DEVICE_MS_KINECT=0, DEVICE_ASUS_XTION=1, DEVICE_MAX=1 }; - - static const int INVALID_PIXEL_VAL = 0; - static const int INVALID_COORDINATE_VAL = 0; - - static const int DEFAULT_MAX_BUFFER_SIZE = 2; - static const int DEFAULT_IS_CIRCLE_BUFFER = 0; - static const int DEFAULT_MAX_TIME_DURATION = 20; - - CvCapture_OpenNI(int index=0); - CvCapture_OpenNI(const char * filename); - virtual ~CvCapture_OpenNI(); - - virtual double getProperty(int propIdx) const CV_OVERRIDE; - virtual bool setProperty(int probIdx, double propVal) CV_OVERRIDE; - virtual bool grabFrame() CV_OVERRIDE; - virtual IplImage* retrieveFrame(int outputType) CV_OVERRIDE; - - bool isOpened() const; - -protected: - struct OutputMap - { - public: - cv::Mat mat; - IplImage* getIplImagePtr(); - private: - IplImage iplHeader; - }; - - static const int outputMapsTypesCount = 7; - - static XnMapOutputMode defaultMapOutputMode(); - - IplImage* retrieveDepthMap(); - IplImage* retrievePointCloudMap(); - IplImage* retrieveDisparityMap(); - IplImage* retrieveDisparityMap_32F(); - IplImage* retrieveValidDepthMask(); - IplImage* retrieveBGRImage(); - IplImage* retrieveGrayImage(); - - bool readCamerasParams(); - - double getDepthGeneratorProperty(int propIdx) const; - bool setDepthGeneratorProperty(int propIdx, double propVal); - double getImageGeneratorProperty(int propIdx) const; - bool setImageGeneratorProperty(int propIdx, double propVal); - double getCommonProperty(int propIdx) const; - bool setCommonProperty(int propIdx, double propVal); - - // OpenNI context - xn::Context context; - bool isContextOpened; - - xn::ProductionNode productionNode; - - // Data generators with its metadata - xn::DepthGenerator depthGenerator; - xn::DepthMetaData depthMetaData; - - xn::ImageGenerator imageGenerator; - xn::ImageMetaData imageMetaData; - - int maxBufferSize, maxTimeDuration; // for approx sync - bool isCircleBuffer; - cv::Ptr approxSyncGrabber; - - // Cameras settings: - // TODO find in OpenNI function to convert z->disparity and remove fields "baseline" and depthFocalLength_VGA - // Distance between IR projector and IR camera (in meters) - XnDouble baseline; - // Focal length for the IR camera in VGA resolution (in pixels) - XnUInt64 depthFocalLength_VGA; - - // The value for shadow (occluded pixels) - XnUInt64 shadowValue; - // The value for pixels without a valid disparity measurement - XnUInt64 noSampleValue; - - std::vector outputMaps; -}; - -IplImage* CvCapture_OpenNI::OutputMap::getIplImagePtr() -{ - if( mat.empty() ) - return 0; - - iplHeader = cvIplImage(mat); - return &iplHeader; -} - -bool CvCapture_OpenNI::isOpened() const -{ - return isContextOpened; -} - -XnMapOutputMode CvCapture_OpenNI::defaultMapOutputMode() -{ - XnMapOutputMode mode; - mode.nXRes = XN_VGA_X_RES; - mode.nYRes = XN_VGA_Y_RES; - mode.nFPS = 30; - return mode; -} - -CvCapture_OpenNI::CvCapture_OpenNI( int index ) -{ - int deviceType = DEVICE_DEFAULT; - XnStatus status; - - isContextOpened = false; - maxBufferSize = DEFAULT_MAX_BUFFER_SIZE; - isCircleBuffer = DEFAULT_IS_CIRCLE_BUFFER; - maxTimeDuration = DEFAULT_MAX_TIME_DURATION; - - if( index >= 10 ) - { - deviceType = index / 10; - index %= 10; - } - - if( deviceType > DEVICE_MAX ) - return; - - // Initialize and configure the context. - status = context.Init(); - if( status != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to initialize the context: %s\n", xnGetStatusString(status)); - return; - } - - // Find devices - xn::NodeInfoList devicesList; - status = context.EnumerateProductionTrees( XN_NODE_TYPE_DEVICE, NULL, devicesList, 0 ); - if( status != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to enumerate production trees: %s\n", xnGetStatusString(status)); - return; - } - - // Chose device according to index - xn::NodeInfoList::Iterator it = devicesList.Begin(); - for( int i = 0; i < index && it!=devicesList.End(); ++i ) it++; - if ( it == devicesList.End() ) - { - fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed device with index %d\n", index); - return; - } - - xn::NodeInfo deviceNode = *it; - status = context.CreateProductionTree( deviceNode, productionNode ); - if( status != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to create production tree: %s\n", xnGetStatusString(status)); - return; - } - - xn::ScriptNode scriptNode; - status = context.RunXmlScript( XMLConfig.c_str(), scriptNode ); - if( status != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to run xml script: %s\n", xnGetStatusString(status)); - return; - } - - // Associate generators with context. - // enumerate the nodes to find if depth generator is present - xn::NodeInfoList depthList; - status = context.EnumerateExistingNodes( depthList, XN_NODE_TYPE_DEPTH ); - if( status != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to enumerate depth generators: %s\n", xnGetStatusString(status)); - return; - } - if( depthList.IsEmpty() ) - { - fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : The device doesn't have depth generator. Such devices aren't supported now.\n"); - return; - } - status = depthGenerator.Create( context ); - if( status != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to create depth generator: %s\n", xnGetStatusString(status)); - return; - } - - // enumerate the nodes to find if image generator is present - xn::NodeInfoList imageList; - status = context.EnumerateExistingNodes( imageList, XN_NODE_TYPE_IMAGE ); - if( status != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to enumerate image generators: %s\n", xnGetStatusString(status)); - return; - } - - if( !imageList.IsEmpty() ) - { - status = imageGenerator.Create( context ); - if( status != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to create image generator: %s\n", xnGetStatusString(status)); - return; - } - } - - // Set map output mode. - if( depthGenerator.IsValid() ) - { - CV_DbgAssert( depthGenerator.SetMapOutputMode(defaultMapOutputMode()) == XN_STATUS_OK ); // xn::DepthGenerator supports VGA only! (Jan 2011) - } - if( imageGenerator.IsValid() ) - { - CV_DbgAssert( imageGenerator.SetMapOutputMode(defaultMapOutputMode()) == XN_STATUS_OK ); - } - - if( deviceType == DEVICE_ASUS_XTION ) - { - //ps/asus specific - imageGenerator.SetIntProperty("InputFormat", 1 /*XN_IO_IMAGE_FORMAT_YUV422*/); - imageGenerator.SetPixelFormat(XN_PIXEL_FORMAT_RGB24); - depthGenerator.SetIntProperty("RegistrationType", 1 /*XN_PROCESSING_HARDWARE*/); - } - - // Start generating data. - status = context.StartGeneratingAll(); - if( status != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to start generating OpenNI data: %s\n", xnGetStatusString(status)); - return; - } - - if( !readCamerasParams() ) - { - fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Could not read cameras parameters\n"); - return; - } - - outputMaps.resize( outputMapsTypesCount ); - - isContextOpened = true; - - setProperty(CV_CAP_PROP_OPENNI_REGISTRATION, 1.0); -} - -CvCapture_OpenNI::CvCapture_OpenNI(const char * filename) -{ - XnStatus status; - - isContextOpened = false; - maxBufferSize = DEFAULT_MAX_BUFFER_SIZE; - isCircleBuffer = DEFAULT_IS_CIRCLE_BUFFER; - maxTimeDuration = DEFAULT_MAX_TIME_DURATION; - - // Initialize and configure the context. - status = context.Init(); - if( status != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to initialize the context: %s\n", xnGetStatusString(status)); - return; - } - - // Open file - status = context.OpenFileRecording( filename, productionNode ); - if( status != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to open input file (%s): %s\n", filename, xnGetStatusString(status)); - return; - } - - context.FindExistingNode( XN_NODE_TYPE_DEPTH, depthGenerator ); - context.FindExistingNode( XN_NODE_TYPE_IMAGE, imageGenerator ); - - if( !readCamerasParams() ) - { - fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Could not read cameras parameters\n"); - return; - } - - outputMaps.resize( outputMapsTypesCount ); - - isContextOpened = true; -} - -CvCapture_OpenNI::~CvCapture_OpenNI() -{ - context.StopGeneratingAll(); - context.Release(); -} - -bool CvCapture_OpenNI::readCamerasParams() -{ - XnDouble pixelSize = 0; - if( depthGenerator.GetRealProperty( "ZPPS", pixelSize ) != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::readCamerasParams : Could not read pixel size!\n"); - return false; - } - - // pixel size @ VGA = pixel size @ SXGA x 2 - pixelSize *= 2.0; // in mm - - // focal length of IR camera in pixels for VGA resolution - XnUInt64 zeroPlanDistance; // in mm - if( depthGenerator.GetIntProperty( "ZPD", zeroPlanDistance ) != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::readCamerasParams : Could not read virtual plane distance!\n"); - return false; - } - - if( depthGenerator.GetRealProperty( "LDDIS", baseline ) != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::readCamerasParams : Could not read base line!\n"); - return false; - } - - // baseline from cm -> mm - baseline *= 10; - - // focal length from mm -> pixels (valid for 640x480) - depthFocalLength_VGA = (XnUInt64)((double)zeroPlanDistance / (double)pixelSize); - - if( depthGenerator.GetIntProperty( "ShadowValue", shadowValue ) != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::readCamerasParams : Could not read property \"ShadowValue\"!\n"); - return false; - } - - if( depthGenerator.GetIntProperty("NoSampleValue", noSampleValue ) != XN_STATUS_OK ) - { - fprintf(stderr, "CvCapture_OpenNI::readCamerasParams : Could not read property \"NoSampleValue\"!\n"); - return false; - } - - return true; -} - -double CvCapture_OpenNI::getProperty( int propIdx ) const -{ - double propValue = 0; - - if( isOpened() ) - { - int purePropIdx = propIdx & ~CV_CAP_OPENNI_GENERATORS_MASK; - - if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IMAGE_GENERATOR ) - { - propValue = getImageGeneratorProperty( purePropIdx ); - } - else if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_DEPTH_GENERATOR ) - { - propValue = getDepthGeneratorProperty( purePropIdx ); - } - else - { - propValue = getCommonProperty( purePropIdx ); - } - } - - return propValue; -} - -bool CvCapture_OpenNI::setProperty( int propIdx, double propValue ) -{ - bool isSet = false; - if( isOpened() ) - { - int purePropIdx = propIdx & ~CV_CAP_OPENNI_GENERATORS_MASK; - - if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IMAGE_GENERATOR ) - { - isSet = setImageGeneratorProperty( purePropIdx, propValue ); - } - else if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_DEPTH_GENERATOR ) - { - isSet = setDepthGeneratorProperty( purePropIdx, propValue ); - } - else - { - isSet = setCommonProperty( purePropIdx, propValue ); - } - } - - return isSet; -} - -double CvCapture_OpenNI::getCommonProperty( int propIdx ) const -{ - double propValue = 0; - - switch( propIdx ) - { - // There is a set of properties that correspond to depth generator by default - // (is they are pass without particular generator flag). Two reasons of this: - // 1) We can assume that depth generator is the main one for depth sensor. - // 2) In the initial vertions of OpenNI integration to OpenCV the value of - // flag CV_CAP_OPENNI_DEPTH_GENERATOR was 0 (it isn't zero now). - case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT : - case CV_CAP_PROP_FRAME_WIDTH : - case CV_CAP_PROP_FRAME_HEIGHT : - case CV_CAP_PROP_FPS : - case CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH : - case CV_CAP_PROP_OPENNI_BASELINE : - case CV_CAP_PROP_OPENNI_FOCAL_LENGTH : - case CV_CAP_PROP_OPENNI_REGISTRATION : - propValue = getDepthGeneratorProperty( propIdx ); - break; - case CV_CAP_PROP_OPENNI_APPROX_FRAME_SYNC : - propValue = !approxSyncGrabber.empty() && approxSyncGrabber->isRun() ? 1. : 0.; - break; - case CV_CAP_PROP_OPENNI_MAX_BUFFER_SIZE : - propValue = maxBufferSize; - break; - case CV_CAP_PROP_OPENNI_CIRCLE_BUFFER : - propValue = isCircleBuffer ? 1. : 0.; - break; - case CV_CAP_PROP_OPENNI_MAX_TIME_DURATION : - propValue = maxTimeDuration; - break; - default : - CV_Error( CV_StsBadArg, cv::format("Such parameter (propIdx=%d) isn't supported for getting.\n", propIdx) ); - } - - return propValue; -} - -bool CvCapture_OpenNI::setCommonProperty( int propIdx, double propValue ) -{ - bool isSet = false; - - switch( propIdx ) - { - // There is a set of properties that correspond to depth generator by default - // (is they are pass without particular generator flag). - case CV_CAP_PROP_OPENNI_REGISTRATION: - isSet = setDepthGeneratorProperty( propIdx, propValue ); - break; - case CV_CAP_PROP_OPENNI_APPROX_FRAME_SYNC : - if( propValue && depthGenerator.IsValid() && imageGenerator.IsValid() ) - { - // start synchronization - if( approxSyncGrabber.empty() ) - { - approxSyncGrabber.reset(new ApproximateSyncGrabber( context, depthGenerator, imageGenerator, maxBufferSize, isCircleBuffer, maxTimeDuration )); - } - else - { - approxSyncGrabber->finish(); - - // update params - approxSyncGrabber->setMaxBufferSize(maxBufferSize); - approxSyncGrabber->setIsCircleBuffer(isCircleBuffer); - approxSyncGrabber->setMaxTimeDuration(maxTimeDuration); - } - approxSyncGrabber->start(); - } - else if( !propValue && !approxSyncGrabber.empty() ) - { - // finish synchronization - approxSyncGrabber->finish(); - } - break; - case CV_CAP_PROP_OPENNI_MAX_BUFFER_SIZE : - maxBufferSize = cvRound(propValue); - if( !approxSyncGrabber.empty() ) - approxSyncGrabber->setMaxBufferSize(maxBufferSize); - break; - case CV_CAP_PROP_OPENNI_CIRCLE_BUFFER : - if( !approxSyncGrabber.empty() ) - approxSyncGrabber->setIsCircleBuffer(isCircleBuffer); - break; - case CV_CAP_PROP_OPENNI_MAX_TIME_DURATION : - maxTimeDuration = cvRound(propValue); - if( !approxSyncGrabber.empty() ) - approxSyncGrabber->setMaxTimeDuration(maxTimeDuration); - break; - default: - CV_Error( CV_StsBadArg, cv::format("Such parameter (propIdx=%d) isn't supported for setting.\n", propIdx) ); - } - - return isSet; -} - -double CvCapture_OpenNI::getDepthGeneratorProperty( int propIdx ) const -{ - double propValue = 0; - if( !depthGenerator.IsValid() ) - return propValue; - - XnMapOutputMode mode; - - switch( propIdx ) - { - case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT : - CV_DbgAssert( depthGenerator.IsValid() ); - propValue = 1.; - break; - case CV_CAP_PROP_FRAME_WIDTH : - if( depthGenerator.GetMapOutputMode(mode) == XN_STATUS_OK ) - propValue = mode.nXRes; - break; - case CV_CAP_PROP_FRAME_HEIGHT : - if( depthGenerator.GetMapOutputMode(mode) == XN_STATUS_OK ) - propValue = mode.nYRes; - break; - case CV_CAP_PROP_FPS : - if( depthGenerator.GetMapOutputMode(mode) == XN_STATUS_OK ) - propValue = mode.nFPS; - break; - case CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH : - propValue = depthGenerator.GetDeviceMaxDepth(); - break; - case CV_CAP_PROP_OPENNI_BASELINE : - propValue = baseline; - break; - case CV_CAP_PROP_OPENNI_FOCAL_LENGTH : - propValue = (double)depthFocalLength_VGA; - break; - case CV_CAP_PROP_OPENNI_REGISTRATION : - propValue = depthGenerator.GetAlternativeViewPointCap().IsViewPointAs(const_cast(this)->imageGenerator) ? 1.0 : 0.0; - break; - case CV_CAP_PROP_POS_MSEC : - propValue = (double)depthGenerator.GetTimestamp(); - break; - case CV_CAP_PROP_POS_FRAMES : - propValue = depthGenerator.GetFrameID(); - break; - default : - CV_Error( CV_StsBadArg, cv::format("Depth generator does not support such parameter (propIdx=%d) for getting.\n", propIdx) ); - } - - return propValue; -} - -bool CvCapture_OpenNI::setDepthGeneratorProperty( int propIdx, double propValue ) -{ - bool isSet = false; - - CV_Assert( depthGenerator.IsValid() ); - - switch( propIdx ) - { - case CV_CAP_PROP_OPENNI_REGISTRATION: - { - if( propValue != 0.0 ) // "on" - { - // if there isn't image generator (i.e. ASUS XtionPro doesn't have it) - // then the property isn't available - if( imageGenerator.IsValid() ) - { - if( !depthGenerator.GetAlternativeViewPointCap().IsViewPointAs(imageGenerator) ) - { - if( depthGenerator.GetAlternativeViewPointCap().IsViewPointSupported(imageGenerator) ) - { - XnStatus status = depthGenerator.GetAlternativeViewPointCap().SetViewPoint(imageGenerator); - if( status != XN_STATUS_OK ) - fprintf(stderr, "CvCapture_OpenNI::setDepthGeneratorProperty : %s\n", xnGetStatusString(status)); - else - isSet = true; - } - else - fprintf(stderr, "CvCapture_OpenNI::setDepthGeneratorProperty : Unsupported viewpoint.\n"); - } - else - isSet = true; - } - } - else // "off" - { - XnStatus status = depthGenerator.GetAlternativeViewPointCap().ResetViewPoint(); - if( status != XN_STATUS_OK ) - fprintf(stderr, "CvCapture_OpenNI::setDepthGeneratorProperty : %s\n", xnGetStatusString(status)); - else - isSet = true; - } - } - break; - default: - CV_Error( CV_StsBadArg, cv::format("Depth generator does not support such parameter (propIdx=%d) for setting.\n", propIdx) ); - } - - return isSet; -} - -double CvCapture_OpenNI::getImageGeneratorProperty( int propIdx ) const -{ - double propValue = 0.; - if( !imageGenerator.IsValid() ) - return propValue; - - XnMapOutputMode mode; - switch( propIdx ) - { - case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT : - CV_DbgAssert( imageGenerator.IsValid() ); - propValue = 1.; - break; - case CV_CAP_PROP_FRAME_WIDTH : - if( imageGenerator.GetMapOutputMode(mode) == XN_STATUS_OK ) - propValue = mode.nXRes; - break; - case CV_CAP_PROP_FRAME_HEIGHT : - if( imageGenerator.GetMapOutputMode(mode) == XN_STATUS_OK ) - propValue = mode.nYRes; - break; - case CV_CAP_PROP_FPS : - if( imageGenerator.GetMapOutputMode(mode) == XN_STATUS_OK ) - propValue = mode.nFPS; - break; - case CV_CAP_PROP_POS_MSEC : - propValue = (double)imageGenerator.GetTimestamp(); - break; - case CV_CAP_PROP_POS_FRAMES : - propValue = (double)imageGenerator.GetFrameID(); - break; - default : - CV_Error( CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for getting.\n", propIdx) ); - } - - return propValue; -} - -bool CvCapture_OpenNI::setImageGeneratorProperty( int propIdx, double propValue ) -{ - bool isSet = false; - if( !imageGenerator.IsValid() ) - return isSet; - - switch( propIdx ) - { - case CV_CAP_PROP_OPENNI_OUTPUT_MODE : - { - XnMapOutputMode mode; - - switch( cvRound(propValue) ) - { - case CV_CAP_OPENNI_VGA_30HZ : - mode.nXRes = XN_VGA_X_RES; - mode.nYRes = XN_VGA_Y_RES; - mode.nFPS = 30; - break; - case CV_CAP_OPENNI_SXGA_15HZ : - mode.nXRes = XN_SXGA_X_RES; - mode.nYRes = XN_SXGA_Y_RES; - mode.nFPS = 15; - break; - case CV_CAP_OPENNI_SXGA_30HZ : - mode.nXRes = XN_SXGA_X_RES; - mode.nYRes = XN_SXGA_Y_RES; - mode.nFPS = 30; - break; - case CV_CAP_OPENNI_QVGA_30HZ : - mode.nXRes = XN_QVGA_X_RES; - mode.nYRes = XN_QVGA_Y_RES; - mode.nFPS = 30; - break; - case CV_CAP_OPENNI_QVGA_60HZ : - mode.nXRes = XN_QVGA_X_RES; - mode.nYRes = XN_QVGA_Y_RES; - mode.nFPS = 60; - break; - default : - CV_Error( CV_StsBadArg, "Unsupported image generator output mode.\n"); - } - - XnStatus status = imageGenerator.SetMapOutputMode( mode ); - if( status != XN_STATUS_OK ) - fprintf(stderr, "CvCapture_OpenNI::setImageGeneratorProperty : %s\n", xnGetStatusString(status)); - else - isSet = true; - break; - } - default: - CV_Error( CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for setting.\n", propIdx) ); - } - - return isSet; -} - -bool CvCapture_OpenNI::grabFrame() -{ - if( !isOpened() ) - return false; - - bool isGrabbed = false; - if( !approxSyncGrabber.empty() && approxSyncGrabber->isRun() ) - { - isGrabbed = approxSyncGrabber->grab( depthMetaData, imageMetaData ); - } - else - { - XnStatus status = context.WaitAndUpdateAll(); - if( status != XN_STATUS_OK ) - return false; - - if( depthGenerator.IsValid() ) - depthGenerator.GetMetaData( depthMetaData ); - if( imageGenerator.IsValid() ) - imageGenerator.GetMetaData( imageMetaData ); - isGrabbed = true; - } - - return isGrabbed; -} - -inline void getDepthMapFromMetaData( const xn::DepthMetaData& depthMetaData, cv::Mat& depthMap, XnUInt64 noSampleValue, XnUInt64 shadowValue ) -{ - int cols = depthMetaData.XRes(); - int rows = depthMetaData.YRes(); - - depthMap.create( rows, cols, CV_16UC1 ); - - const XnDepthPixel* pDepthMap = depthMetaData.Data(); - - // CV_Assert( sizeof(unsigned short) == sizeof(XnDepthPixel) ); - memcpy( depthMap.data, pDepthMap, cols*rows*sizeof(XnDepthPixel) ); - - cv::Mat badMask = (depthMap == (double)noSampleValue) | (depthMap == (double)shadowValue) | (depthMap == 0); - - // mask the pixels with invalid depth - depthMap.setTo( cv::Scalar::all( CvCapture_OpenNI::INVALID_PIXEL_VAL ), badMask ); -} - -IplImage* CvCapture_OpenNI::retrieveDepthMap() -{ - if( !depthMetaData.Data() ) - return 0; - - getDepthMapFromMetaData( depthMetaData, outputMaps[CV_CAP_OPENNI_DEPTH_MAP].mat, noSampleValue, shadowValue ); - - return outputMaps[CV_CAP_OPENNI_DEPTH_MAP].getIplImagePtr(); -} - -IplImage* CvCapture_OpenNI::retrievePointCloudMap() -{ - if( !depthMetaData.Data() ) - return 0; - - cv::Mat depth; - getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue ); - - const int badPoint = INVALID_PIXEL_VAL; - const float badCoord = INVALID_COORDINATE_VAL; - int cols = depthMetaData.XRes(), rows = depthMetaData.YRes(); - cv::Mat pointCloud_XYZ( rows, cols, CV_32FC3, cv::Scalar::all(badPoint) ); - - std::vector proj(cols*rows); - std::vector real(cols*rows); - for( int y = 0; y < rows; y++ ) - { - for( int x = 0; x < cols; x++ ) - { - int ind = y*cols+x; - proj[ind].X = (float)x; - proj[ind].Y = (float)y; - proj[ind].Z = depth.at(y, x); - } - } - depthGenerator.ConvertProjectiveToRealWorld(cols*rows, &proj.front(), &real.front()); - - for( int y = 0; y < rows; y++ ) - { - for( int x = 0; x < cols; x++ ) - { - // Check for invalid measurements - if( depth.at(y, x) == badPoint ) // not valid - pointCloud_XYZ.at(y,x) = cv::Point3f( badCoord, badCoord, badCoord ); - else - { - int ind = y*cols+x; - pointCloud_XYZ.at(y,x) = cv::Point3f( real[ind].X*0.001f, real[ind].Y*0.001f, real[ind].Z*0.001f); // from mm to meters - } - } - } - - outputMaps[CV_CAP_OPENNI_POINT_CLOUD_MAP].mat = pointCloud_XYZ; - - return outputMaps[CV_CAP_OPENNI_POINT_CLOUD_MAP].getIplImagePtr(); -} - -static void computeDisparity_32F( const xn::DepthMetaData& depthMetaData, cv::Mat& disp, XnDouble baseline, XnUInt64 F, - XnUInt64 noSampleValue, XnUInt64 shadowValue ) -{ - cv::Mat depth; - getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue ); - CV_Assert( depth.type() == CV_16UC1 ); - - - // disparity = baseline * F / z; - - float mult = (float)(baseline /*mm*/ * F /*pixels*/); - - disp.create( depth.size(), CV_32FC1); - disp = cv::Scalar::all( CvCapture_OpenNI::INVALID_PIXEL_VAL ); - for( int y = 0; y < disp.rows; y++ ) - { - for( int x = 0; x < disp.cols; x++ ) - { - unsigned short curDepth = depth.at(y,x); - if( curDepth != CvCapture_OpenNI::INVALID_PIXEL_VAL ) - disp.at(y,x) = mult / curDepth; - } - } -} - -IplImage* CvCapture_OpenNI::retrieveDisparityMap() -{ - if( !depthMetaData.Data() ) - return 0; - - cv::Mat disp32; - computeDisparity_32F( depthMetaData, disp32, baseline, depthFocalLength_VGA, noSampleValue, shadowValue ); - - disp32.convertTo( outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].mat, CV_8UC1 ); - - return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].getIplImagePtr(); -} - -IplImage* CvCapture_OpenNI::retrieveDisparityMap_32F() -{ - if( !depthMetaData.Data() ) - return 0; - - computeDisparity_32F( depthMetaData, outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].mat, baseline, depthFocalLength_VGA, noSampleValue, shadowValue ); - - return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].getIplImagePtr(); -} - -IplImage* CvCapture_OpenNI::retrieveValidDepthMask() -{ - if( !depthMetaData.Data() ) - return 0; - - cv::Mat depth; - getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue ); - - outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].mat = depth != CvCapture_OpenNI::INVALID_PIXEL_VAL; - - return outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].getIplImagePtr(); -} - -inline void getBGRImageFromMetaData( const xn::ImageMetaData& imageMetaData, cv::Mat& bgrImage ) -{ - if( imageMetaData.PixelFormat() != XN_PIXEL_FORMAT_RGB24 ) - CV_Error( CV_StsUnsupportedFormat, "Unsupported format of grabbed image\n" ); - - cv::Mat rgbImage( imageMetaData.YRes(), imageMetaData.XRes(), CV_8UC3 ); - const XnRGB24Pixel* pRgbImage = imageMetaData.RGB24Data(); - - // CV_Assert( 3*sizeof(uchar) == sizeof(XnRGB24Pixel) ); - memcpy( rgbImage.data, pRgbImage, rgbImage.total()*sizeof(XnRGB24Pixel) ); - cv::cvtColor( rgbImage, bgrImage, CV_RGB2BGR ); -} - -IplImage* CvCapture_OpenNI::retrieveBGRImage() -{ - if( !imageMetaData.Data() ) - return 0; - - getBGRImageFromMetaData( imageMetaData, outputMaps[CV_CAP_OPENNI_BGR_IMAGE].mat ); - - return outputMaps[CV_CAP_OPENNI_BGR_IMAGE].getIplImagePtr(); -} - -IplImage* CvCapture_OpenNI::retrieveGrayImage() -{ - if( !imageMetaData.Data() ) - return 0; - - CV_Assert( imageMetaData.BytesPerPixel() == 3 ); // RGB - - cv::Mat rgbImage; - getBGRImageFromMetaData( imageMetaData, rgbImage ); - cv::cvtColor( rgbImage, outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].mat, CV_BGR2GRAY ); - - return outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].getIplImagePtr(); -} - -IplImage* CvCapture_OpenNI::retrieveFrame( int outputType ) -{ - IplImage* image = 0; - CV_Assert( outputType < outputMapsTypesCount && outputType >= 0); - - if( outputType == CV_CAP_OPENNI_DEPTH_MAP ) - { - image = retrieveDepthMap(); - } - else if( outputType == CV_CAP_OPENNI_POINT_CLOUD_MAP ) - { - image = retrievePointCloudMap(); - } - else if( outputType == CV_CAP_OPENNI_DISPARITY_MAP ) - { - image = retrieveDisparityMap(); - } - else if( outputType == CV_CAP_OPENNI_DISPARITY_MAP_32F ) - { - image = retrieveDisparityMap_32F(); - } - else if( outputType == CV_CAP_OPENNI_VALID_DEPTH_MASK ) - { - image = retrieveValidDepthMask(); - } - else if( outputType == CV_CAP_OPENNI_BGR_IMAGE ) - { - image = retrieveBGRImage(); - } - else if( outputType == CV_CAP_OPENNI_GRAY_IMAGE ) - { - image = retrieveGrayImage(); - } - - return image; -} - - -CvCapture* cvCreateCameraCapture_OpenNI( int index ) -{ - CvCapture_OpenNI* capture = new CvCapture_OpenNI( index ); - - if( capture->isOpened() ) - return capture; - - delete capture; - return 0; -} - -CvCapture* cvCreateFileCapture_OpenNI( const char* filename ) -{ - CvCapture_OpenNI* capture = new CvCapture_OpenNI( filename ); - - if( capture->isOpened() ) - return capture; - - delete capture; - return 0; -} - -#endif diff --git a/modules/videoio/src/videoio_registry.cpp b/modules/videoio/src/videoio_registry.cpp index ee6fab1dc7..c11a8056aa 100644 --- a/modules/videoio/src/videoio_registry.cpp +++ b/modules/videoio/src/videoio_registry.cpp @@ -8,7 +8,6 @@ #include "opencv2/videoio/registry.hpp" -#include "cap_intelperc.hpp" #include "cap_librealsense.hpp" #include "cap_dshow.hpp" @@ -45,7 +44,7 @@ namespace { /** Ordering guidelines: - modern optimized, multi-platform libraries: ffmpeg, gstreamer, Media SDK - platform specific universal SDK: WINRT, AVFOUNDATION, MSMF/DSHOW, V4L/V4L2 -- RGB-D: OpenNI/OpenNI2, INTELPERC/REALSENSE +- RGB-D: OpenNI/OpenNI2, REALSENSE - special OpenCV (file-based): "images", "mjpeg" - special camera SDKs, including stereo: other special SDKs: FIREWIRE/1394, XIMEA/ARAVIS/GIGANETIX/PVAPI(GigE) - other: XINE, gphoto2, etc @@ -88,16 +87,12 @@ static const struct VideoBackendInfo builtin_backends[] = // RGB-D universal -#ifdef HAVE_OPENNI - DECLARE_BACKEND(CAP_OPENNI, "OPENNI", MODE_CAPTURE_ALL), -#endif #ifdef HAVE_OPENNI2 DECLARE_BACKEND(CAP_OPENNI2, "OPENNI2", MODE_CAPTURE_ALL), #endif -#ifdef HAVE_INTELPERC - DECLARE_BACKEND(CAP_INTELPERC, "INTEL_PERC", MODE_CAPTURE_BY_INDEX), -#elif defined(HAVE_LIBREALSENSE) - DECLARE_BACKEND(CAP_INTELPERC, "INTEL_REALSENSE", MODE_CAPTURE_BY_INDEX), + +#ifdef HAVE_LIBREALSENSE + DECLARE_BACKEND(CAP_REALSENSE, "INTEL_REALSENSE", MODE_CAPTURE_BY_INDEX), #endif // OpenCV file-based only @@ -115,9 +110,6 @@ static const struct VideoBackendInfo builtin_backends[] = #ifdef HAVE_XIMEA DECLARE_BACKEND(CAP_XIAPI, "XIMEA", MODE_CAPTURE_ALL), #endif -#ifdef HAVE_GIGE_API - DECLARE_BACKEND(CAP_GIGANETIX, "GIGANETIX", MODE_CAPTURE_BY_INDEX), -#endif #ifdef HAVE_ARAVIS_API DECLARE_BACKEND(CAP_ARAVIS, "ARAVIS", MODE_CAPTURE_BY_INDEX), #endif @@ -415,12 +407,8 @@ void VideoCapture_create(CvCapture*& capture, Ptr& icap, VideoCap TRY_OPEN(makePtr(index)); break; #endif -#ifdef HAVE_INTELPERC - case CAP_INTELPERC: - TRY_OPEN(makePtr()); - break; -#elif defined(HAVE_LIBREALSENSE) - case CAP_INTELPERC: +#ifdef HAVE_LIBREALSENSE + case CAP_REALSENSE: TRY_OPEN(makePtr(index)); break; #endif @@ -454,11 +442,6 @@ void VideoCapture_create(CvCapture*& capture, Ptr& icap, VideoCap TRY_OPEN_LEGACY(cvCreateCameraCapture_PvAPI(index)) break; #endif -#ifdef HAVE_OPENNI - case CAP_OPENNI: - TRY_OPEN_LEGACY(cvCreateCameraCapture_OpenNI(index)) - break; -#endif #ifdef HAVE_OPENNI2 case CAP_OPENNI2: TRY_OPEN_LEGACY(cvCreateCameraCapture_OpenNI2(index)) @@ -476,12 +459,6 @@ void VideoCapture_create(CvCapture*& capture, Ptr& icap, VideoCap break; #endif -#ifdef HAVE_GIGE_API - case CAP_GIGANETIX: - TRY_OPEN_LEGACY(cvCreateCameraCapture_Giganetix(index)) - break; -#endif - #ifdef HAVE_ARAVIS_API case CAP_ARAVIS: TRY_OPEN_LEGACY(cvCreateCameraCapture_Aravis(index)) @@ -510,12 +487,6 @@ void VideoCapture_create(CvCapture*& capture, Ptr& icap, VideoCap break; #endif -#ifdef HAVE_OPENNI - case CAP_OPENNI: - TRY_OPEN_LEGACY(cvCreateFileCapture_OpenNI(filename.c_str())) - break; -#endif - #ifdef HAVE_OPENNI2 case CAP_OPENNI2: TRY_OPEN_LEGACY(cvCreateFileCapture_OpenNI2(filename.c_str())) diff --git a/modules/videoio/test/test_video_io.cpp b/modules/videoio/test/test_video_io.cpp index fad5458d88..f7d7639a98 100644 --- a/modules/videoio/test/test_video_io.cpp +++ b/modules/videoio/test/test_video_io.cpp @@ -303,10 +303,6 @@ public: //================================================================================================== static const VideoCaptureAPIs backend_params[] = { -#ifdef HAVE_QUICKTIME - CAP_QT, -#endif - // TODO: Broken? //#ifdef HAVE_AVFOUNDATION // CAP_AVFOUNDATION, @@ -333,7 +329,6 @@ static const VideoCaptureAPIs backend_params[] = { }; static const string bunny_params[] = { -#ifdef HAVE_VIDEO_INPUT string("wmv"), string("mov"), string("mp4"), @@ -341,7 +336,6 @@ static const string bunny_params[] = { string("avi"), string("h264"), string("h265"), -#endif string("mjpg.avi") }; @@ -384,18 +378,6 @@ static Ext_Fourcc_PSNR synthetic_params[] = { makeParam("mov", "H264", 30.f, CAP_MSMF), #endif -#ifdef HAVE_QUICKTIME - makeParam("mov", "mp4v", 30.f, CAP_QT), - makeParam("avi", "XVID", 30.f, CAP_QT), - makeParam("avi", "MPEG", 30.f, CAP_QT), - makeParam("avi", "IYUV", 30.f, CAP_QT), - makeParam("avi", "MJPG", 30.f, CAP_QT), - - makeParam("mkv", "XVID", 30.f, CAP_QT), - makeParam("mkv", "MPEG", 30.f, CAP_QT), - makeParam("mkv", "MJPG", 30.f, CAP_QT), -#endif - // TODO: Broken? //#ifdef HAVE_AVFOUNDATION // makeParam("mov", "mp4v", 30.f, CAP_AVFOUNDATION), diff --git a/modules/world/CMakeLists.txt b/modules/world/CMakeLists.txt index 269beb7bc3..20edbd5733 100644 --- a/modules/world/CMakeLists.txt +++ b/modules/world/CMakeLists.txt @@ -64,9 +64,6 @@ ocv_target_compile_definitions(${the_module} PRIVATE OPENCV_MODULE_IS_PART_OF_WO if(BUILD_opencv_imgcodecs AND OPENCV_MODULE_opencv_imgcodecs_IS_PART_OF_WORLD) ocv_imgcodecs_configure_target() endif() -if(BUILD_opencv_videoio AND OPENCV_MODULE_opencv_videoio_IS_PART_OF_WORLD) - ocv_videoio_configure_target() -endif() if(BUILD_opencv_highgui AND OPENCV_MODULE_opencv_highgui_IS_PART_OF_WORLD) ocv_highgui_configure_target() endif()