mirror of
https://github.com/opencv/opencv.git
synced 2024-11-24 19:20:28 +08:00
Merge pull request #25410 from kaingwade:add_videocapture_depth_sample
Add videocapture_depth.cpp sample #25410 The PR is to combine the examples `videocapture_openni.cpp`, `videocapture_realsense.cpp` and `videocapture_obsensor.cpp` into `videocapture_depth.cpp`. Tested cameras and OS using this sample are listed below: | | Windows 10 | Ubuntu 22.04 | Mac M1 14.3 | |------------------------|--------------|--------------|---------------| | Orbbec Gemini 2 Series | ✓ | ✓ | ✓ | | RealSense D435, D455 | ✓ | ✓ | ✗ | | Kinect, XtionPRO | - | - | - | Note: - OpenNI based cameras (Kinect, XtionPRO) are not tested as I don't have them. - RealSense D435 and D455 don't work on Mac with OpenCV.
This commit is contained in:
parent
48b457f8c7
commit
bd1f9cd1ed
@ -84,5 +84,5 @@ there are two flags that should be used to set/get property of the needed genera
|
|||||||
flag value is assumed by default if neither of the two possible values of the property is set.
|
flag value is assumed by default if neither of the two possible values of the property is set.
|
||||||
|
|
||||||
For more information please refer to the example of usage
|
For more information please refer to the example of usage
|
||||||
[videocapture_realsense.cpp](https://github.com/opencv/opencv/tree/5.x/samples/cpp/videocapture_realsense.cpp)
|
[videocapture_depth.cpp](https://github.com/opencv/opencv/tree/5.x/samples/cpp/videocapture_depth.cpp)
|
||||||
in opencv/samples/cpp folder.
|
in opencv/samples/cpp folder.
|
||||||
|
@ -140,5 +140,5 @@ property. The following properties of cameras available through OpenNI interface
|
|||||||
- CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_REGISTRATION
|
- CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_REGISTRATION
|
||||||
|
|
||||||
For more information please refer to the example of usage
|
For more information please refer to the example of usage
|
||||||
[videocapture_openni.cpp](https://github.com/opencv/opencv/tree/5.x/samples/cpp/videocapture_openni.cpp) in
|
[videocapture_depth.cpp](https://github.com/opencv/opencv/tree/5.x/samples/cpp/videocapture_depth.cpp) in
|
||||||
opencv/samples/cpp folder.
|
opencv/samples/cpp folder.
|
||||||
|
174
samples/cpp/videocapture_depth.cpp
Normal file
174
samples/cpp/videocapture_depth.cpp
Normal file
@ -0,0 +1,174 @@
|
|||||||
|
#include "opencv2/videoio.hpp"
|
||||||
|
#include "opencv2/highgui.hpp"
|
||||||
|
#include "opencv2/imgproc.hpp"
|
||||||
|
|
||||||
|
#include <iostream>
|
||||||
|
|
||||||
|
using namespace cv;
|
||||||
|
|
||||||
|
static void colorizeDisparity(const Mat& gray, Mat& rgb, double maxDisp=-1.f)
|
||||||
|
{
|
||||||
|
CV_Assert(!gray.empty());
|
||||||
|
CV_Assert(gray.type() == CV_8UC1);
|
||||||
|
|
||||||
|
if(maxDisp <= 0)
|
||||||
|
{
|
||||||
|
maxDisp = 0;
|
||||||
|
minMaxLoc(gray, nullptr, &maxDisp);
|
||||||
|
}
|
||||||
|
|
||||||
|
rgb = Mat::zeros(gray.size(), CV_8UC3);
|
||||||
|
if(maxDisp < 1)
|
||||||
|
return;
|
||||||
|
|
||||||
|
Mat tmp;
|
||||||
|
convertScaleAbs(gray, tmp, 255.f / maxDisp);
|
||||||
|
applyColorMap(tmp, rgb, COLORMAP_JET);
|
||||||
|
}
|
||||||
|
|
||||||
|
static float getMaxDisparity(VideoCapture& capture, int minDistance)
|
||||||
|
{
|
||||||
|
float b = (float)capture.get(CAP_OPENNI_DEPTH_GENERATOR_BASELINE); // mm
|
||||||
|
float F = (float)capture.get(CAP_OPENNI_DEPTH_GENERATOR_FOCAL_LENGTH); // pixels
|
||||||
|
return b * F / minDistance;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void colorizeDepth(const Mat& depth, Mat& rgb)
|
||||||
|
{
|
||||||
|
CV_Assert(!depth.empty());
|
||||||
|
|
||||||
|
normalize(depth, rgb, 0, 255, NORM_MINMAX, CV_8UC1);
|
||||||
|
applyColorMap(rgb, rgb, COLORMAP_JET);
|
||||||
|
}
|
||||||
|
|
||||||
|
const char* keys = "{type t | | Camera Type: OpenNI, RealSense, Orbbec}"
|
||||||
|
"{dist d | 400 | The minimum measurable distance in milimeter between the camera and the object}"
|
||||||
|
"{help h | | Help}";
|
||||||
|
|
||||||
|
const char* about =
|
||||||
|
"\nThis example demostrates how to get data from 3D cameras via OpenCV.\n"
|
||||||
|
"Currently OpenCV supports 3 types of 3D cameras:\n"
|
||||||
|
"1. Depth sensors compatible with OpenNI (Kinect, XtionPRO). "
|
||||||
|
"Users must install OpenNI library and PrimeSensorModule for OpenNI and configure OpenCV with WITH_OPENNI flag ON in CMake.\n"
|
||||||
|
"2. Depth sensors compatible with Intel® RealSense SDK (RealSense). "
|
||||||
|
"Users must install Intel RealSense SDK 2.0 and configure OpenCV with WITH_LIBREALSENSE flag ON in CMake.\n"
|
||||||
|
"3. Orbbec UVC depth sensors. "
|
||||||
|
"For the use of OpenNI based Orbbec cameras, please refer to the example openni_orbbec_astra.cpp\n";
|
||||||
|
|
||||||
|
int main(int argc, char *argv[])
|
||||||
|
{
|
||||||
|
CommandLineParser parser(argc, argv, keys);
|
||||||
|
parser.about(about);
|
||||||
|
|
||||||
|
if(parser.has("help"))
|
||||||
|
{
|
||||||
|
parser.printMessage();
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.has("type"))
|
||||||
|
{
|
||||||
|
int backend;
|
||||||
|
int flagDepth, flagBGR, flagIR;
|
||||||
|
bool hasDisparity = false;
|
||||||
|
|
||||||
|
String camType = parser.get<String>("type");
|
||||||
|
if (camType == "OpenNI")
|
||||||
|
{
|
||||||
|
backend = CAP_OPENNI2;
|
||||||
|
flagDepth = CAP_OPENNI_DEPTH_MAP;
|
||||||
|
flagBGR = CAP_OPENNI_BGR_IMAGE;
|
||||||
|
flagIR = CAP_OPENNI_IR_IMAGE;
|
||||||
|
hasDisparity = true;
|
||||||
|
}
|
||||||
|
else if (camType == "RealSense")
|
||||||
|
{
|
||||||
|
backend = CAP_INTELPERC;
|
||||||
|
flagDepth = CAP_INTELPERC_DEPTH_MAP;
|
||||||
|
flagBGR = CAP_INTELPERC_IMAGE;
|
||||||
|
flagIR = CAP_INTELPERC_IR_MAP;
|
||||||
|
}
|
||||||
|
else if (camType == "Orbbec")
|
||||||
|
{
|
||||||
|
backend = CAP_OBSENSOR;
|
||||||
|
flagDepth = CAP_OBSENSOR_DEPTH_MAP;
|
||||||
|
flagBGR = CAP_OBSENSOR_BGR_IMAGE;
|
||||||
|
flagIR = CAP_OBSENSOR_IR_IMAGE; // Note output IR stream is not implemented for now.
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
std::cerr << "Invalid input of camera type." << std::endl;
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
VideoCapture capture(backend);
|
||||||
|
if(!capture.isOpened())
|
||||||
|
{
|
||||||
|
std::cerr << "Fail to open depth camera." << std::endl;
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (camType == "OpenNI")
|
||||||
|
{
|
||||||
|
capture.set(CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE, CAP_OPENNI_VGA_30HZ);
|
||||||
|
capture.set(CAP_OPENNI_DEPTH_GENERATOR_PRESENT, true);
|
||||||
|
capture.set(CAP_OPENNI_IMAGE_GENERATOR_PRESENT, true);
|
||||||
|
capture.set(CAP_OPENNI_IR_GENERATOR_PRESENT, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
Mat depthMap;
|
||||||
|
Mat bgrImage;
|
||||||
|
Mat irImage;
|
||||||
|
Mat disparityMap;
|
||||||
|
|
||||||
|
for(;;)
|
||||||
|
{
|
||||||
|
if(capture.grab())
|
||||||
|
{
|
||||||
|
if(capture.retrieve(depthMap, flagDepth))
|
||||||
|
{
|
||||||
|
Mat colorDepthMap;
|
||||||
|
colorizeDepth(depthMap, colorDepthMap);
|
||||||
|
imshow("Colorized Depth Map", colorDepthMap);
|
||||||
|
}
|
||||||
|
|
||||||
|
if(capture.retrieve(bgrImage, flagBGR))
|
||||||
|
imshow("RGB Image", bgrImage);
|
||||||
|
|
||||||
|
if(capture.retrieve(irImage, flagIR))
|
||||||
|
{
|
||||||
|
if (camType == "OpenNI")
|
||||||
|
{
|
||||||
|
Mat ir8;
|
||||||
|
irImage.convertTo(ir8, CV_8U, 256.0 / 3500, 0.0);
|
||||||
|
imshow("Infrared Image", ir8);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
imshow("Infrared Image", irImage);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasDisparity)
|
||||||
|
{
|
||||||
|
int minDist = parser.get<int>("dist"); // mm
|
||||||
|
if(capture.retrieve(disparityMap, CAP_OPENNI_DISPARITY_MAP))
|
||||||
|
{
|
||||||
|
Mat colorDisparityMap;
|
||||||
|
colorizeDisparity(disparityMap, colorDisparityMap, getMaxDisparity(capture, minDist));
|
||||||
|
colorDisparityMap.setTo(Scalar(0, 0, 0), disparityMap == 0);
|
||||||
|
imshow("Colorized Disparity Map", colorDisparityMap);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(waitKey(30) >= 0)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
parser.printMessage();
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
@ -1,83 +0,0 @@
|
|||||||
/**
|
|
||||||
* attention: Astra2 cameras currently only support Windows and Linux kernel versions no higher than 4.15, and higher versions of Linux kernel may have exceptions.
|
|
||||||
*/
|
|
||||||
|
|
||||||
#include <opencv2/videoio.hpp>
|
|
||||||
#include <opencv2/highgui.hpp>
|
|
||||||
#include <opencv2/imgproc.hpp>
|
|
||||||
#include <iostream>
|
|
||||||
|
|
||||||
using namespace cv;
|
|
||||||
int main()
|
|
||||||
{
|
|
||||||
VideoCapture obsensorCapture(0, CAP_OBSENSOR);
|
|
||||||
if(!obsensorCapture.isOpened()){
|
|
||||||
std::cerr << "Failed to open obsensor capture! Index out of range or no response from device";
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
double fx = obsensorCapture.get(CAP_PROP_OBSENSOR_INTRINSIC_FX);
|
|
||||||
double fy = obsensorCapture.get(CAP_PROP_OBSENSOR_INTRINSIC_FY);
|
|
||||||
double cx = obsensorCapture.get(CAP_PROP_OBSENSOR_INTRINSIC_CX);
|
|
||||||
double cy = obsensorCapture.get(CAP_PROP_OBSENSOR_INTRINSIC_CY);
|
|
||||||
std::cout << "obsensor camera intrinsic params: fx=" << fx << ", fy=" << fy << ", cx=" << cx << ", cy=" << cy << std::endl;
|
|
||||||
|
|
||||||
Mat image;
|
|
||||||
Mat depthMap;
|
|
||||||
Mat adjDepthMap;
|
|
||||||
|
|
||||||
// Minimum depth value
|
|
||||||
const double minVal = 300;
|
|
||||||
// Maximum depth value
|
|
||||||
const double maxVal = 5000;
|
|
||||||
while (true)
|
|
||||||
{
|
|
||||||
// Grab depth map like this:
|
|
||||||
// obsensorCapture >> depthMap;
|
|
||||||
|
|
||||||
// Another way to grab depth map (and bgr image).
|
|
||||||
if (obsensorCapture.grab())
|
|
||||||
{
|
|
||||||
if (obsensorCapture.retrieve(image, CAP_OBSENSOR_BGR_IMAGE))
|
|
||||||
{
|
|
||||||
imshow("RGB", image);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (obsensorCapture.retrieve(depthMap, CAP_OBSENSOR_DEPTH_MAP))
|
|
||||||
{
|
|
||||||
depthMap.convertTo(adjDepthMap, CV_8U, 255.0 / (maxVal - minVal), -minVal * 255.0 / (maxVal - minVal));
|
|
||||||
applyColorMap(adjDepthMap, adjDepthMap, COLORMAP_JET);
|
|
||||||
imshow("DEPTH", adjDepthMap);
|
|
||||||
}
|
|
||||||
|
|
||||||
// depth map overlay on bgr image
|
|
||||||
static const float alpha = 0.6f;
|
|
||||||
if (!image.empty() && !depthMap.empty())
|
|
||||||
{
|
|
||||||
depthMap.convertTo(adjDepthMap, CV_8U, 255.0 / (maxVal - minVal), -minVal * 255.0 / (maxVal - minVal));
|
|
||||||
cv::resize(adjDepthMap, adjDepthMap, cv::Size(image.cols, image.rows));
|
|
||||||
for (int i = 0; i < image.rows; i++)
|
|
||||||
{
|
|
||||||
for (int j = 0; j < image.cols; j++)
|
|
||||||
{
|
|
||||||
cv::Vec3b& outRgb = image.at<cv::Vec3b>(i, j);
|
|
||||||
uint8_t depthValue = 255 - adjDepthMap.at<uint8_t>(i, j);
|
|
||||||
if (depthValue != 0 && depthValue != 255)
|
|
||||||
{
|
|
||||||
outRgb[0] = (uint8_t)(outRgb[0] * (1.0f - alpha) + depthValue * alpha);
|
|
||||||
outRgb[1] = (uint8_t)(outRgb[1] * (1.0f - alpha) + depthValue * alpha);
|
|
||||||
outRgb[2] = (uint8_t)(outRgb[2] * (1.0f - alpha) + depthValue * alpha);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
imshow("DepthToColor", image);
|
|
||||||
}
|
|
||||||
image.release();
|
|
||||||
depthMap.release();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pollKey() >= 0)
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
@ -1,280 +0,0 @@
|
|||||||
#include "opencv2/videoio/videoio.hpp"
|
|
||||||
#include "opencv2/highgui.hpp"
|
|
||||||
#include "opencv2/imgproc.hpp"
|
|
||||||
|
|
||||||
#include <iostream>
|
|
||||||
|
|
||||||
using namespace cv;
|
|
||||||
using namespace std;
|
|
||||||
|
|
||||||
static void help()
|
|
||||||
{
|
|
||||||
cout << "\nThis program demonstrates usage of depth sensors (Kinect, XtionPRO,...).\n"
|
|
||||||
"The user gets some of the supported output images.\n"
|
|
||||||
"\nAll supported output map types:\n"
|
|
||||||
"1.) Data given from depth generator\n"
|
|
||||||
" CAP_OPENNI_DEPTH_MAP - depth values in mm (CV_16UC1)\n"
|
|
||||||
" CAP_OPENNI_POINT_CLOUD_MAP - XYZ in meters (CV_32FC3)\n"
|
|
||||||
" CAP_OPENNI_DISPARITY_MAP - disparity in pixels (CV_8UC1)\n"
|
|
||||||
" CAP_OPENNI_DISPARITY_MAP_32F - disparity in pixels (CV_32FC1)\n"
|
|
||||||
" CAP_OPENNI_VALID_DEPTH_MASK - mask of valid pixels (not occluded, not shaded etc.) (CV_8UC1)\n"
|
|
||||||
"2.) Data given from RGB image generator\n"
|
|
||||||
" CAP_OPENNI_BGR_IMAGE - color image (CV_8UC3)\n"
|
|
||||||
" CAP_OPENNI_GRAY_IMAGE - gray image (CV_8UC1)\n"
|
|
||||||
"2.) Data given from IR image generator\n"
|
|
||||||
" CAP_OPENNI_IR_IMAGE - gray image (CV_16UC1)\n"
|
|
||||||
<< endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
static void colorizeDisparity( const Mat& gray, Mat& rgb, double maxDisp=-1.f)
|
|
||||||
{
|
|
||||||
CV_Assert( !gray.empty() );
|
|
||||||
CV_Assert( gray.type() == CV_8UC1 );
|
|
||||||
|
|
||||||
if( maxDisp <= 0 )
|
|
||||||
{
|
|
||||||
maxDisp = 0;
|
|
||||||
minMaxLoc( gray, 0, &maxDisp );
|
|
||||||
}
|
|
||||||
|
|
||||||
rgb.create( gray.size(), CV_8UC3 );
|
|
||||||
rgb = Scalar::all(0);
|
|
||||||
if( maxDisp < 1 )
|
|
||||||
return;
|
|
||||||
|
|
||||||
Mat tmp;
|
|
||||||
convertScaleAbs(gray, tmp, 255.f / maxDisp);
|
|
||||||
applyColorMap(tmp, rgb, COLORMAP_JET);
|
|
||||||
}
|
|
||||||
|
|
||||||
static float getMaxDisparity( VideoCapture& capture )
|
|
||||||
{
|
|
||||||
const int minDistance = 400; // mm
|
|
||||||
float b = (float)capture.get( CAP_OPENNI_DEPTH_GENERATOR_BASELINE ); // mm
|
|
||||||
float F = (float)capture.get( CAP_OPENNI_DEPTH_GENERATOR_FOCAL_LENGTH ); // pixels
|
|
||||||
return b * F / minDistance;
|
|
||||||
}
|
|
||||||
|
|
||||||
static void printCommandLineParams()
|
|
||||||
{
|
|
||||||
cout << "-cd= Colorized disparity? (0 or 1; 1 by default) Ignored if disparity map is not selected to show." << endl;
|
|
||||||
cout << "-fmd= Fixed max disparity? (0 or 1; 0 by default) Ignored if disparity map is not colorized (-cd 0)." << endl;
|
|
||||||
cout << "-mode= image mode: resolution and fps, supported three values: 0 - CAP_OPENNI_VGA_30HZ, 1 - CAP_OPENNI_SXGA_15HZ," << endl;
|
|
||||||
cout << " 2 - CAP_OPENNI_SXGA_30HZ (0 by default). Ignored if rgb image or gray image are not selected to show." << endl;
|
|
||||||
cout << "-m= Mask to set which output images are need. It is a string of size 6. Each element of this is '0' or '1' and" << endl;
|
|
||||||
cout << " determine: is depth map, disparity map, valid pixels mask, rgb image, gray image need or not (correspondently), ir image" << endl ;
|
|
||||||
cout << " By default -m=010100 i.e. disparity map and rgb image will be shown." << endl ;
|
|
||||||
cout << "-r= Filename of .oni video file. The data will grabbed from it." << endl ;
|
|
||||||
}
|
|
||||||
|
|
||||||
static void parseCommandLine( int argc, char* argv[], bool& isColorizeDisp, bool& isFixedMaxDisp, int& imageMode, bool retrievedImageFlags[],
|
|
||||||
string& filename, bool& isFileReading )
|
|
||||||
{
|
|
||||||
filename.clear();
|
|
||||||
cv::CommandLineParser parser(argc, argv, "{h help||}{cd|1|}{fmd|0|}{mode|-1|}{m|010100|}{r||}");
|
|
||||||
if (parser.has("h"))
|
|
||||||
{
|
|
||||||
help();
|
|
||||||
printCommandLineParams();
|
|
||||||
exit(0);
|
|
||||||
}
|
|
||||||
isColorizeDisp = (parser.get<int>("cd") != 0);
|
|
||||||
isFixedMaxDisp = (parser.get<int>("fmd") != 0);
|
|
||||||
imageMode = parser.get<int>("mode");
|
|
||||||
int flags = parser.get<int>("m");
|
|
||||||
isFileReading = parser.has("r");
|
|
||||||
if (isFileReading)
|
|
||||||
filename = parser.get<string>("r");
|
|
||||||
if (!parser.check())
|
|
||||||
{
|
|
||||||
parser.printErrors();
|
|
||||||
help();
|
|
||||||
exit(-1);
|
|
||||||
}
|
|
||||||
if (flags % 1000000 == 0)
|
|
||||||
{
|
|
||||||
cout << "No one output image is selected." << endl;
|
|
||||||
exit(0);
|
|
||||||
}
|
|
||||||
for (int i = 0; i < 6; i++)
|
|
||||||
{
|
|
||||||
retrievedImageFlags[5 - i] = (flags % 10 != 0);
|
|
||||||
flags /= 10;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* To work with Kinect or XtionPRO the user must install OpenNI library and PrimeSensorModule for OpenNI and
|
|
||||||
* configure OpenCV with WITH_OPENNI flag is ON (using CMake).
|
|
||||||
*/
|
|
||||||
int main( int argc, char* argv[] )
|
|
||||||
{
|
|
||||||
bool isColorizeDisp, isFixedMaxDisp;
|
|
||||||
int imageMode;
|
|
||||||
bool retrievedImageFlags[6];
|
|
||||||
string filename;
|
|
||||||
bool isVideoReading;
|
|
||||||
parseCommandLine( argc, argv, isColorizeDisp, isFixedMaxDisp, imageMode, retrievedImageFlags, filename, isVideoReading );
|
|
||||||
|
|
||||||
cout << "Device opening ..." << endl;
|
|
||||||
VideoCapture capture;
|
|
||||||
if( isVideoReading )
|
|
||||||
capture.open( filename );
|
|
||||||
else
|
|
||||||
{
|
|
||||||
capture.open( CAP_OPENNI2 );
|
|
||||||
}
|
|
||||||
|
|
||||||
cout << "done." << endl;
|
|
||||||
|
|
||||||
if( !capture.isOpened() )
|
|
||||||
{
|
|
||||||
cout << "Can not open a capture object." << endl;
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if( !isVideoReading && imageMode >= 0 )
|
|
||||||
{
|
|
||||||
bool modeRes=false;
|
|
||||||
switch ( imageMode )
|
|
||||||
{
|
|
||||||
case 0:
|
|
||||||
modeRes = capture.set( CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE, CAP_OPENNI_VGA_30HZ );
|
|
||||||
break;
|
|
||||||
case 1:
|
|
||||||
modeRes = capture.set( CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE, CAP_OPENNI_SXGA_15HZ );
|
|
||||||
break;
|
|
||||||
case 2:
|
|
||||||
modeRes = capture.set( CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE, CAP_OPENNI_SXGA_30HZ );
|
|
||||||
break;
|
|
||||||
//The following modes are only supported by the Xtion Pro Live
|
|
||||||
case 3:
|
|
||||||
modeRes = capture.set( CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE, CAP_OPENNI_QVGA_30HZ );
|
|
||||||
break;
|
|
||||||
case 4:
|
|
||||||
modeRes = capture.set( CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE, CAP_OPENNI_QVGA_60HZ );
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
CV_Error( Error::StsBadArg, "Unsupported image mode property.\n");
|
|
||||||
}
|
|
||||||
if (!modeRes)
|
|
||||||
cout << "\nThis image mode is not supported by the device, the default value (CV_CAP_OPENNI_SXGA_15HZ) will be used.\n" << endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
// turn on depth, color and IR if needed
|
|
||||||
if (retrievedImageFlags[0] || retrievedImageFlags[1] || retrievedImageFlags[2])
|
|
||||||
capture.set(CAP_OPENNI_DEPTH_GENERATOR_PRESENT, true);
|
|
||||||
else
|
|
||||||
capture.set(CAP_OPENNI_DEPTH_GENERATOR_PRESENT, false);
|
|
||||||
if (retrievedImageFlags[3] || retrievedImageFlags[4])
|
|
||||||
capture.set(CAP_OPENNI_IMAGE_GENERATOR_PRESENT, true);
|
|
||||||
else
|
|
||||||
capture.set(CAP_OPENNI_IMAGE_GENERATOR_PRESENT, false);
|
|
||||||
if (retrievedImageFlags[5])
|
|
||||||
capture.set(CAP_OPENNI_IR_GENERATOR_PRESENT, true);
|
|
||||||
else
|
|
||||||
capture.set(CAP_OPENNI_IR_GENERATOR_PRESENT, false);
|
|
||||||
|
|
||||||
// Print some available device settings.
|
|
||||||
if (capture.get(CAP_OPENNI_DEPTH_GENERATOR_PRESENT))
|
|
||||||
{
|
|
||||||
cout << "\nDepth generator output mode:" << endl <<
|
|
||||||
"FRAME_WIDTH " << capture.get(CAP_PROP_FRAME_WIDTH) << endl <<
|
|
||||||
"FRAME_HEIGHT " << capture.get(CAP_PROP_FRAME_HEIGHT) << endl <<
|
|
||||||
"FRAME_MAX_DEPTH " << capture.get(CAP_PROP_OPENNI_FRAME_MAX_DEPTH) << " mm" << endl <<
|
|
||||||
"FPS " << capture.get(CAP_PROP_FPS) << endl <<
|
|
||||||
"REGISTRATION " << capture.get(CAP_PROP_OPENNI_REGISTRATION) << endl;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
cout << "\nDevice doesn't contain depth generator or it is not selected." << endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
if( capture.get( CAP_OPENNI_IMAGE_GENERATOR_PRESENT ) )
|
|
||||||
{
|
|
||||||
cout <<
|
|
||||||
"\nImage generator output mode:" << endl <<
|
|
||||||
"FRAME_WIDTH " << capture.get( CAP_OPENNI_IMAGE_GENERATOR+CAP_PROP_FRAME_WIDTH ) << endl <<
|
|
||||||
"FRAME_HEIGHT " << capture.get( CAP_OPENNI_IMAGE_GENERATOR+CAP_PROP_FRAME_HEIGHT ) << endl <<
|
|
||||||
"FPS " << capture.get( CAP_OPENNI_IMAGE_GENERATOR+CAP_PROP_FPS ) << endl;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
cout << "\nDevice doesn't contain image generator or it is not selected." << endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
if( capture.get(CAP_OPENNI_IR_GENERATOR_PRESENT) )
|
|
||||||
{
|
|
||||||
cout <<
|
|
||||||
"\nIR generator output mode:" << endl <<
|
|
||||||
"FRAME_WIDTH " << capture.get(CAP_OPENNI_IR_GENERATOR + CAP_PROP_FRAME_WIDTH) << endl <<
|
|
||||||
"FRAME_HEIGHT " << capture.get(CAP_OPENNI_IR_GENERATOR + CAP_PROP_FRAME_HEIGHT) << endl <<
|
|
||||||
"FPS " << capture.get(CAP_OPENNI_IR_GENERATOR + CAP_PROP_FPS) << endl;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
cout << "\nDevice doesn't contain IR generator or it is not selected." << endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
for(;;)
|
|
||||||
{
|
|
||||||
Mat depthMap;
|
|
||||||
Mat validDepthMap;
|
|
||||||
Mat disparityMap;
|
|
||||||
Mat bgrImage;
|
|
||||||
Mat grayImage;
|
|
||||||
Mat irImage;
|
|
||||||
|
|
||||||
if( !capture.grab() )
|
|
||||||
{
|
|
||||||
cout << "Can not grab images." << endl;
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if( retrievedImageFlags[0] && capture.retrieve( depthMap, CAP_OPENNI_DEPTH_MAP ) )
|
|
||||||
{
|
|
||||||
const float scaleFactor = 0.05f;
|
|
||||||
Mat show; depthMap.convertTo( show, CV_8UC1, scaleFactor );
|
|
||||||
imshow( "depth map", show );
|
|
||||||
}
|
|
||||||
|
|
||||||
if( retrievedImageFlags[1] && capture.retrieve( disparityMap, CAP_OPENNI_DISPARITY_MAP ) )
|
|
||||||
{
|
|
||||||
if( isColorizeDisp )
|
|
||||||
{
|
|
||||||
Mat colorDisparityMap;
|
|
||||||
colorizeDisparity( disparityMap, colorDisparityMap, isFixedMaxDisp ? getMaxDisparity(capture) : -1 );
|
|
||||||
Mat validColorDisparityMap;
|
|
||||||
colorDisparityMap.copyTo( validColorDisparityMap, disparityMap != 0 );
|
|
||||||
imshow( "colorized disparity map", validColorDisparityMap );
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
imshow( "original disparity map", disparityMap );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if( retrievedImageFlags[2] && capture.retrieve( validDepthMap, CAP_OPENNI_VALID_DEPTH_MASK ) )
|
|
||||||
imshow( "valid depth mask", validDepthMap );
|
|
||||||
|
|
||||||
if( retrievedImageFlags[3] && capture.retrieve( bgrImage, CAP_OPENNI_BGR_IMAGE ) )
|
|
||||||
imshow( "rgb image", bgrImage );
|
|
||||||
|
|
||||||
if( retrievedImageFlags[4] && capture.retrieve( grayImage, CAP_OPENNI_GRAY_IMAGE ) )
|
|
||||||
imshow( "gray image", grayImage );
|
|
||||||
|
|
||||||
if( retrievedImageFlags[5] && capture.retrieve( irImage, CAP_OPENNI_IR_IMAGE ) )
|
|
||||||
{
|
|
||||||
Mat ir8;
|
|
||||||
irImage.convertTo(ir8, CV_8U, 256.0 / 3500, 0.0);
|
|
||||||
imshow("IR image", ir8);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if( waitKey( 30 ) >= 0 )
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
@ -1,33 +0,0 @@
|
|||||||
#include "opencv2/videoio.hpp"
|
|
||||||
#include "opencv2/highgui.hpp"
|
|
||||||
#include "opencv2/imgproc.hpp"
|
|
||||||
|
|
||||||
using namespace cv;
|
|
||||||
using namespace std;
|
|
||||||
|
|
||||||
int main()
|
|
||||||
{
|
|
||||||
VideoCapture capture(CAP_INTELPERC);
|
|
||||||
for(;;)
|
|
||||||
{
|
|
||||||
Mat depthMap;
|
|
||||||
Mat image;
|
|
||||||
Mat irImage;
|
|
||||||
Mat adjMap;
|
|
||||||
|
|
||||||
capture.grab();
|
|
||||||
capture.retrieve(depthMap,CAP_INTELPERC_DEPTH_MAP);
|
|
||||||
capture.retrieve(image,CAP_INTELPERC_IMAGE);
|
|
||||||
capture.retrieve(irImage,CAP_INTELPERC_IR_MAP);
|
|
||||||
|
|
||||||
normalize(depthMap, adjMap, 0, 255, NORM_MINMAX, CV_8UC1);
|
|
||||||
applyColorMap(adjMap, adjMap, COLORMAP_JET);
|
|
||||||
|
|
||||||
imshow("RGB", image);
|
|
||||||
imshow("IR", irImage);
|
|
||||||
imshow("DEPTH", adjMap);
|
|
||||||
if( waitKey( 30 ) >= 0 )
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
Loading…
Reference in New Issue
Block a user