Finalize calib3d module split for fisheye part.

This commit is contained in:
Alexander Smorkalov 2023-07-31 16:35:23 +03:00
parent 47188b7c7e
commit 285108e2e1
16 changed files with 1666 additions and 1613 deletions

View File

@ -2481,6 +2481,130 @@ void undistortImagePoints(InputArray src, OutputArray dst, InputArray cameraMatr
InputArray distCoeffs, InputArray distCoeffs,
TermCriteria = TermCriteria(TermCriteria::MAX_ITER + TermCriteria::EPS, 5, 0.01)); TermCriteria = TermCriteria(TermCriteria::MAX_ITER + TermCriteria::EPS, 5, 0.01));
namespace fisheye {
/** @brief Projects points using fisheye model
@param objectPoints Array of object points, 1xN/Nx1 3-channel (or vector\<Point3f\> ), where N is
the number of points in the view.
@param imagePoints Output array of image points, 2xN/Nx2 1-channel or 1xN/Nx1 2-channel, or
vector\<Point2f\>.
@param affine
@param K Camera intrinsic matrix \f$cameramatrix{K}\f$.
@param D Input vector of distortion coefficients \f$\distcoeffsfisheye\f$.
@param alpha The skew coefficient.
@param jacobian Optional output 2Nx15 jacobian matrix of derivatives of image points with respect
to components of the focal lengths, coordinates of the principal point, distortion coefficients,
rotation vector, translation vector, and the skew. In the old interface different components of
the jacobian are returned via different output parameters.
The function computes projections of 3D points to the image plane given intrinsic and extrinsic
camera parameters. Optionally, the function computes Jacobians - matrices of partial derivatives of
image points coordinates (as functions of all the input parameters) with respect to the particular
parameters, intrinsic and/or extrinsic.
*/
CV_EXPORTS void projectPoints(InputArray objectPoints, OutputArray imagePoints, const Affine3d& affine,
InputArray K, InputArray D, double alpha = 0, OutputArray jacobian = noArray());
/** @overload */
CV_EXPORTS_W void projectPoints(InputArray objectPoints, OutputArray imagePoints, InputArray rvec, InputArray tvec,
InputArray K, InputArray D, double alpha = 0, OutputArray jacobian = noArray());
/** @brief Distorts 2D points using fisheye model.
@param undistorted Array of object points, 1xN/Nx1 2-channel (or vector\<Point2f\> ), where N is
the number of points in the view.
@param K Camera intrinsic matrix \f$cameramatrix{K}\f$.
@param D Input vector of distortion coefficients \f$\distcoeffsfisheye\f$.
@param alpha The skew coefficient.
@param distorted Output array of image points, 1xN/Nx1 2-channel, or vector\<Point2f\> .
Note that the function assumes the camera intrinsic matrix of the undistorted points to be identity.
This means if you want to distort image points you have to multiply them with \f$K^{-1}\f$.
*/
CV_EXPORTS_W void distortPoints(InputArray undistorted, OutputArray distorted, InputArray K, InputArray D, double alpha = 0);
/** @brief Undistorts 2D points using fisheye model
@param distorted Array of object points, 1xN/Nx1 2-channel (or vector\<Point2f\> ), where N is the
number of points in the view.
@param K Camera intrinsic matrix \f$cameramatrix{K}\f$.
@param D Input vector of distortion coefficients \f$\distcoeffsfisheye\f$.
@param R Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
1-channel or 1x1 3-channel
@param P New camera intrinsic matrix (3x3) or new projection matrix (3x4)
@param criteria Termination criteria
@param undistorted Output array of image points, 1xN/Nx1 2-channel, or vector\<Point2f\> .
*/
CV_EXPORTS_W void undistortPoints(InputArray distorted, OutputArray undistorted,
InputArray K, InputArray D, InputArray R = noArray(), InputArray P = noArray(),
TermCriteria criteria = TermCriteria(TermCriteria::MAX_ITER + TermCriteria::EPS, 10, 1e-8));
/** @brief Estimates new camera intrinsic matrix for undistortion or rectification.
@param K Camera intrinsic matrix \f$cameramatrix{K}\f$.
@param image_size Size of the image
@param D Input vector of distortion coefficients \f$\distcoeffsfisheye\f$.
@param R Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
1-channel or 1x1 3-channel
@param P New camera intrinsic matrix (3x3) or new projection matrix (3x4)
@param balance Sets the new focal length in range between the min focal length and the max focal
length. Balance is in range of [0, 1].
@param new_size the new size
@param fov_scale Divisor for new focal length.
*/
CV_EXPORTS_W void estimateNewCameraMatrixForUndistortRectify(InputArray K, InputArray D, const Size &image_size, InputArray R,
OutputArray P, double balance = 0.0, const Size& new_size = Size(), double fov_scale = 1.0);
/** @brief Computes undistortion and rectification maps for image transform by cv::remap(). If D is empty zero
distortion is used, if R or P is empty identity matrixes are used.
@param K Camera intrinsic matrix \f$cameramatrix{K}\f$.
@param D Input vector of distortion coefficients \f$\distcoeffsfisheye\f$.
@param R Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
1-channel or 1x1 3-channel
@param P New camera intrinsic matrix (3x3) or new projection matrix (3x4)
@param size Undistorted image size.
@param m1type Type of the first output map that can be CV_32FC1 or CV_16SC2 . See convertMaps()
for details.
@param map1 The first output map.
@param map2 The second output map.
*/
CV_EXPORTS_W void initUndistortRectifyMap(InputArray K, InputArray D, InputArray R, InputArray P,
const cv::Size& size, int m1type, OutputArray map1, OutputArray map2);
/** @brief Transforms an image to compensate for fisheye lens distortion.
@param distorted image with fisheye lens distortion.
@param undistorted Output image with compensated fisheye lens distortion.
@param K Camera intrinsic matrix \f$cameramatrix{K}\f$.
@param D Input vector of distortion coefficients \f$\distcoeffsfisheye\f$.
@param Knew Camera intrinsic matrix of the distorted image. By default, it is the identity matrix but you
may additionally scale and shift the result by using a different matrix.
@param new_size the new size
The function transforms an image to compensate radial and tangential lens distortion.
The function is simply a combination of fisheye::initUndistortRectifyMap (with unity R ) and remap
(with bilinear interpolation). See the former function for details of the transformation being
performed.
See below the results of undistortImage.
- a\) result of undistort of perspective camera model (all possible coefficients (k_1, k_2, k_3,
k_4, k_5, k_6) of distortion were optimized under calibration)
- b\) result of fisheye::undistortImage of fisheye camera model (all possible coefficients (k_1, k_2,
k_3, k_4) of fisheye distortion were optimized under calibration)
- c\) original image was captured with fisheye lens
Pictures a) and b) almost the same. But if we consider points of image located far from the center
of image, we can notice that on image a) these points are distorted.
![image](pics/fisheye_undistorted.jpg)
*/
CV_EXPORTS_W void undistortImage(InputArray distorted, OutputArray undistorted,
InputArray K, InputArray D, InputArray Knew = cv::noArray(), const Size& new_size = Size());
} // namespace fisheye
/** @brief Octree for 3D vision. /** @brief Octree for 3D vision.
* *

View File

@ -1,4 +1,7 @@
{ {
"namespaces_dict": {
"cv.fisheye": "fisheye"
},
"func_arg_fix" : { "func_arg_fix" : {
"findFundamentalMat" : { "points1" : {"ctype" : "vector_Point2f"}, "findFundamentalMat" : { "points1" : {"ctype" : "vector_Point2f"},
"points2" : {"ctype" : "vector_Point2f"} }, "points2" : {"ctype" : "vector_Point2f"} },

View File

@ -556,4 +556,30 @@ public class Cv3dTest extends OpenCVTestCase {
} }
} }
public void testEstimateNewCameraMatrixForUndistortRectify() {
Mat K = new Mat().eye(3, 3, CvType.CV_64FC1);
Mat K_new = new Mat().eye(3, 3, CvType.CV_64FC1);
Mat K_new_truth = new Mat().eye(3, 3, CvType.CV_64FC1);
Mat D = new Mat().zeros(4, 1, CvType.CV_64FC1);
K.put(0,0,600.4447738238429);
K.put(1,1,578.9929805505851);
K.put(0,2,992.0642578801213);
K.put(1,2,549.2682624212172);
D.put(0,0,-0.05090103223466704);
D.put(1,0,0.030944413642173308);
D.put(2,0,-0.021509225493198905);
D.put(3,0,0.0043378096628297145);
K_new_truth.put(0,0, 387.5118215642316);
K_new_truth.put(0,2, 1033.936556777084);
K_new_truth.put(1,1, 373.6673784974842);
K_new_truth.put(1,2, 538.794152656429);
Cv3d.fisheye_estimateNewCameraMatrixForUndistortRectify(K,D,new Size(1920,1080),
new Mat().eye(3, 3, CvType.CV_64F), K_new, 0.0, new Size(1920,1080));
assertMatEqual(K_new, K_new_truth, EPS);
}
} }

View File

@ -27,4 +27,36 @@ PERF_TEST(Undistort, DISABLED_InitInverseRectificationMap)
SANITY_CHECK_NOTHING(); SANITY_CHECK_NOTHING();
} }
PERF_TEST(Undistort, fisheye_undistortPoints_100k_10iter)
{
const int pointsNumber = 100000;
const Size imageSize(1280, 800);
/* Set camera matrix */
const Matx33d K(558.478087865323, 0, 620.458515360843,
0, 560.506767351568, 381.939424848348,
0, 0, 1);
/* Set distortion coefficients */
const Matx14d D(2.81e-06, 1.31e-06, -4.42e-06, -1.25e-06);
/* Create two-channel points matrix */
Mat xy[2] = {};
xy[0].create(pointsNumber, 1, CV_64F);
theRNG().fill(xy[0], RNG::UNIFORM, 0, imageSize.width); // x
xy[1].create(pointsNumber, 1, CV_64F);
theRNG().fill(xy[1], RNG::UNIFORM, 0, imageSize.height); // y
Mat points;
merge(xy, 2, points);
/* Set fixed iteration number to check only c++ code, not algo convergence */
TermCriteria termCriteria(TermCriteria::MAX_ITER, 10, 0);
Mat undistortedPoints;
TEST_CYCLE() fisheye::undistortPoints(points, undistortedPoints, K, D, noArray(), noArray(), termCriteria);
SANITY_CHECK_NOTHING();
}
} // namespace } // namespace

596
modules/3d/src/fisheye.cpp Normal file
View File

@ -0,0 +1,596 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "precomp.hpp"
namespace cv {
struct JacobianRow
{
Vec2d df, dc;
Vec4d dk;
Vec3d dom, dT;
double dalpha;
};
void cv::fisheye::projectPoints(InputArray objectPoints, OutputArray imagePoints, const Affine3d& affine,
InputArray K, InputArray D, double alpha, OutputArray jacobian)
{
CV_INSTRUMENT_REGION();
projectPoints(objectPoints, imagePoints, affine.rvec(), affine.translation(), K, D, alpha, jacobian);
}
void cv::fisheye::projectPoints(InputArray objectPoints, OutputArray imagePoints, InputArray _rvec,
InputArray _tvec, InputArray _K, InputArray _D, double alpha, OutputArray jacobian)
{
CV_INSTRUMENT_REGION();
// will support only 3-channel data now for points
CV_Assert(objectPoints.type() == CV_32FC3 || objectPoints.type() == CV_64FC3);
imagePoints.create(objectPoints.size(), CV_MAKETYPE(objectPoints.depth(), 2));
size_t n = objectPoints.total();
CV_Assert(_rvec.total() * _rvec.channels() == 3 && (_rvec.depth() == CV_32F || _rvec.depth() == CV_64F));
CV_Assert(_tvec.total() * _tvec.channels() == 3 && (_tvec.depth() == CV_32F || _tvec.depth() == CV_64F));
CV_Assert(_tvec.getMat().isContinuous() && _rvec.getMat().isContinuous());
Vec3d om = _rvec.depth() == CV_32F ? (Vec3d)*_rvec.getMat().ptr<Vec3f>() : *_rvec.getMat().ptr<Vec3d>();
Vec3d T = _tvec.depth() == CV_32F ? (Vec3d)*_tvec.getMat().ptr<Vec3f>() : *_tvec.getMat().ptr<Vec3d>();
CV_Assert(_K.size() == Size(3,3) && (_K.type() == CV_32F || _K.type() == CV_64F) && _D.type() == _K.type() && _D.total() == 4);
Vec2d f, c;
if (_K.depth() == CV_32F)
{
Matx33f K = _K.getMat();
f = Vec2f(K(0, 0), K(1, 1));
c = Vec2f(K(0, 2), K(1, 2));
}
else
{
Matx33d K = _K.getMat();
f = Vec2d(K(0, 0), K(1, 1));
c = Vec2d(K(0, 2), K(1, 2));
}
Vec4d k = _D.depth() == CV_32F ? (Vec4d)*_D.getMat().ptr<Vec4f>(): *_D.getMat().ptr<Vec4d>();
const bool isJacobianNeeded = jacobian.needed();
JacobianRow *Jn = 0;
if (isJacobianNeeded)
{
int nvars = 2 + 2 + 1 + 4 + 3 + 3; // f, c, alpha, k, om, T,
jacobian.create(2*(int)n, nvars, CV_64F);
Jn = jacobian.getMat().ptr<JacobianRow>(0);
}
Matx33d R;
Matx<double, 3, 9> dRdom;
Rodrigues(om, R, dRdom);
Affine3d aff(om, T);
const Vec3f* Xf = objectPoints.getMat().ptr<Vec3f>();
const Vec3d* Xd = objectPoints.getMat().ptr<Vec3d>();
Vec2f *xpf = imagePoints.getMat().ptr<Vec2f>();
Vec2d *xpd = imagePoints.getMat().ptr<Vec2d>();
for(size_t i = 0; i < n; ++i)
{
Vec3d Xi = objectPoints.depth() == CV_32F ? (Vec3d)Xf[i] : Xd[i];
Vec3d Y = aff*Xi;
if (fabs(Y[2]) < DBL_MIN)
Y[2] = 1;
Vec2d x(Y[0]/Y[2], Y[1]/Y[2]);
double r2 = x.dot(x);
double r = std::sqrt(r2);
// Angle of the incoming ray:
double theta = std::atan(r);
double theta2 = theta*theta, theta3 = theta2*theta, theta4 = theta2*theta2, theta5 = theta4*theta,
theta6 = theta3*theta3, theta7 = theta6*theta, theta8 = theta4*theta4, theta9 = theta8*theta;
double theta_d = theta + k[0]*theta3 + k[1]*theta5 + k[2]*theta7 + k[3]*theta9;
double inv_r = r > 1e-8 ? 1.0/r : 1;
double cdist = r > 1e-8 ? theta_d * inv_r : 1;
Vec2d xd1 = x * cdist;
Vec2d xd3(xd1[0] + alpha*xd1[1], xd1[1]);
Vec2d final_point(xd3[0] * f[0] + c[0], xd3[1] * f[1] + c[1]);
if (objectPoints.depth() == CV_32F)
xpf[i] = final_point;
else
xpd[i] = final_point;
if (isJacobianNeeded)
{
//Vec3d Xi = pdepth == CV_32F ? (Vec3d)Xf[i] : Xd[i];
//Vec3d Y = aff*Xi;
double dYdR[] = { Xi[0], Xi[1], Xi[2], 0, 0, 0, 0, 0, 0,
0, 0, 0, Xi[0], Xi[1], Xi[2], 0, 0, 0,
0, 0, 0, 0, 0, 0, Xi[0], Xi[1], Xi[2] };
Matx33d dYdom_data = Matx<double, 3, 9>(dYdR) * dRdom.t();
const Vec3d *dYdom = (Vec3d*)dYdom_data.val;
Matx33d dYdT_data = Matx33d::eye();
const Vec3d *dYdT = (Vec3d*)dYdT_data.val;
//Vec2d x(Y[0]/Y[2], Y[1]/Y[2]);
Vec3d dxdom[2];
dxdom[0] = (1.0/Y[2]) * dYdom[0] - x[0]/Y[2] * dYdom[2];
dxdom[1] = (1.0/Y[2]) * dYdom[1] - x[1]/Y[2] * dYdom[2];
Vec3d dxdT[2];
dxdT[0] = (1.0/Y[2]) * dYdT[0] - x[0]/Y[2] * dYdT[2];
dxdT[1] = (1.0/Y[2]) * dYdT[1] - x[1]/Y[2] * dYdT[2];
//double r2 = x.dot(x);
Vec3d dr2dom = 2 * x[0] * dxdom[0] + 2 * x[1] * dxdom[1];
Vec3d dr2dT = 2 * x[0] * dxdT[0] + 2 * x[1] * dxdT[1];
//double r = std::sqrt(r2);
double drdr2 = r > 1e-8 ? 1.0/(2*r) : 1;
Vec3d drdom = drdr2 * dr2dom;
Vec3d drdT = drdr2 * dr2dT;
// Angle of the incoming ray:
//double theta = atan(r);
double dthetadr = 1.0/(1+r2);
Vec3d dthetadom = dthetadr * drdom;
Vec3d dthetadT = dthetadr * drdT;
//double theta_d = theta + k[0]*theta3 + k[1]*theta5 + k[2]*theta7 + k[3]*theta9;
double dtheta_ddtheta = 1 + 3*k[0]*theta2 + 5*k[1]*theta4 + 7*k[2]*theta6 + 9*k[3]*theta8;
Vec3d dtheta_ddom = dtheta_ddtheta * dthetadom;
Vec3d dtheta_ddT = dtheta_ddtheta * dthetadT;
Vec4d dtheta_ddk = Vec4d(theta3, theta5, theta7, theta9);
//double inv_r = r > 1e-8 ? 1.0/r : 1;
//double cdist = r > 1e-8 ? theta_d / r : 1;
Vec3d dcdistdom = inv_r * (dtheta_ddom - cdist*drdom);
Vec3d dcdistdT = inv_r * (dtheta_ddT - cdist*drdT);
Vec4d dcdistdk = inv_r * dtheta_ddk;
//Vec2d xd1 = x * cdist;
Vec4d dxd1dk[2];
Vec3d dxd1dom[2], dxd1dT[2];
dxd1dom[0] = x[0] * dcdistdom + cdist * dxdom[0];
dxd1dom[1] = x[1] * dcdistdom + cdist * dxdom[1];
dxd1dT[0] = x[0] * dcdistdT + cdist * dxdT[0];
dxd1dT[1] = x[1] * dcdistdT + cdist * dxdT[1];
dxd1dk[0] = x[0] * dcdistdk;
dxd1dk[1] = x[1] * dcdistdk;
//Vec2d xd3(xd1[0] + alpha*xd1[1], xd1[1]);
Vec4d dxd3dk[2];
Vec3d dxd3dom[2], dxd3dT[2];
dxd3dom[0] = dxd1dom[0] + alpha * dxd1dom[1];
dxd3dom[1] = dxd1dom[1];
dxd3dT[0] = dxd1dT[0] + alpha * dxd1dT[1];
dxd3dT[1] = dxd1dT[1];
dxd3dk[0] = dxd1dk[0] + alpha * dxd1dk[1];
dxd3dk[1] = dxd1dk[1];
Vec2d dxd3dalpha(xd1[1], 0);
//final jacobian
Jn[0].dom = f[0] * dxd3dom[0];
Jn[1].dom = f[1] * dxd3dom[1];
Jn[0].dT = f[0] * dxd3dT[0];
Jn[1].dT = f[1] * dxd3dT[1];
Jn[0].dk = f[0] * dxd3dk[0];
Jn[1].dk = f[1] * dxd3dk[1];
Jn[0].dalpha = f[0] * dxd3dalpha[0];
Jn[1].dalpha = 0; //f[1] * dxd3dalpha[1];
Jn[0].df = Vec2d(xd3[0], 0);
Jn[1].df = Vec2d(0, xd3[1]);
Jn[0].dc = Vec2d(1, 0);
Jn[1].dc = Vec2d(0, 1);
//step to jacobian rows for next point
Jn += 2;
}
}
}
void cv::fisheye::distortPoints(InputArray undistorted, OutputArray distorted, InputArray K, InputArray D, double alpha)
{
CV_INSTRUMENT_REGION();
// will support only 2-channel data now for points
CV_Assert(undistorted.type() == CV_32FC2 || undistorted.type() == CV_64FC2);
distorted.create(undistorted.size(), undistorted.type());
size_t n = undistorted.total();
CV_Assert(K.size() == Size(3,3) && (K.type() == CV_32F || K.type() == CV_64F) && D.total() == 4);
Vec2d f, c;
if (K.depth() == CV_32F)
{
Matx33f camMat = K.getMat();
f = Vec2f(camMat(0, 0), camMat(1, 1));
c = Vec2f(camMat(0, 2), camMat(1, 2));
}
else
{
Matx33d camMat = K.getMat();
f = Vec2d(camMat(0, 0), camMat(1, 1));
c = Vec2d(camMat(0 ,2), camMat(1, 2));
}
Vec4d k = D.depth() == CV_32F ? (Vec4d)*D.getMat().ptr<Vec4f>(): *D.getMat().ptr<Vec4d>();
const Vec2f* Xf = undistorted.getMat().ptr<Vec2f>();
const Vec2d* Xd = undistorted.getMat().ptr<Vec2d>();
Vec2f *xpf = distorted.getMat().ptr<Vec2f>();
Vec2d *xpd = distorted.getMat().ptr<Vec2d>();
for(size_t i = 0; i < n; ++i)
{
Vec2d x = undistorted.depth() == CV_32F ? (Vec2d)Xf[i] : Xd[i];
double r2 = x.dot(x);
double r = std::sqrt(r2);
// Angle of the incoming ray:
double theta = std::atan(r);
double theta2 = theta*theta, theta3 = theta2*theta, theta4 = theta2*theta2, theta5 = theta4*theta,
theta6 = theta3*theta3, theta7 = theta6*theta, theta8 = theta4*theta4, theta9 = theta8*theta;
double theta_d = theta + k[0]*theta3 + k[1]*theta5 + k[2]*theta7 + k[3]*theta9;
double inv_r = r > 1e-8 ? 1.0/r : 1;
double cdist = r > 1e-8 ? theta_d * inv_r : 1;
Vec2d xd1 = x * cdist;
Vec2d xd3(xd1[0] + alpha*xd1[1], xd1[1]);
Vec2d final_point(xd3[0] * f[0] + c[0], xd3[1] * f[1] + c[1]);
if (undistorted.depth() == CV_32F)
xpf[i] = final_point;
else
xpd[i] = final_point;
}
}
void cv::fisheye::undistortPoints( InputArray distorted, OutputArray undistorted, InputArray K, InputArray D,
InputArray R, InputArray P, TermCriteria criteria)
{
CV_INSTRUMENT_REGION();
// will support only 2-channel data now for points
CV_Assert(distorted.type() == CV_32FC2 || distorted.type() == CV_64FC2);
undistorted.create(distorted.size(), distorted.type());
CV_Assert(P.empty() || P.size() == Size(3, 3) || P.size() == Size(4, 3));
CV_Assert(R.empty() || R.size() == Size(3, 3) || R.total() * R.channels() == 3);
CV_Assert(D.total() == 4 && K.size() == Size(3, 3) && (K.depth() == CV_32F || K.depth() == CV_64F));
CV_Assert(criteria.isValid());
Vec2d f, c;
if (K.depth() == CV_32F)
{
Matx33f camMat = K.getMat();
f = Vec2f(camMat(0, 0), camMat(1, 1));
c = Vec2f(camMat(0, 2), camMat(1, 2));
}
else
{
Matx33d camMat = K.getMat();
f = Vec2d(camMat(0, 0), camMat(1, 1));
c = Vec2d(camMat(0, 2), camMat(1, 2));
}
Vec4d k = D.depth() == CV_32F ? (Vec4d)*D.getMat().ptr<Vec4f>(): *D.getMat().ptr<Vec4d>();
Matx33d RR = Matx33d::eye();
if (!R.empty() && R.total() * R.channels() == 3)
{
Vec3d rvec;
R.getMat().convertTo(rvec, CV_64F);
RR = cv::Affine3d(rvec).rotation();
}
else if (!R.empty() && R.size() == Size(3, 3))
R.getMat().convertTo(RR, CV_64F);
if(!P.empty())
{
Matx33d PP;
P.getMat().colRange(0, 3).convertTo(PP, CV_64F);
RR = PP * RR;
}
// start undistorting
const Vec2f* srcf = distorted.getMat().ptr<Vec2f>();
const Vec2d* srcd = distorted.getMat().ptr<Vec2d>();
Vec2f* dstf = undistorted.getMat().ptr<Vec2f>();
Vec2d* dstd = undistorted.getMat().ptr<Vec2d>();
size_t n = distorted.total();
int sdepth = distorted.depth();
const bool isEps = (criteria.type & TermCriteria::EPS) != 0;
/* Define max count for solver iterations */
int maxCount = std::numeric_limits<int>::max();
if (criteria.type & TermCriteria::MAX_ITER) {
maxCount = criteria.maxCount;
}
for(size_t i = 0; i < n; i++ )
{
Vec2d pi = sdepth == CV_32F ? (Vec2d)srcf[i] : srcd[i]; // image point
Vec2d pw((pi[0] - c[0])/f[0], (pi[1] - c[1])/f[1]); // world point
double theta_d = sqrt(pw[0]*pw[0] + pw[1]*pw[1]);
// the current camera model is only valid up to 180 FOV
// for larger FOV the loop below does not converge
// clip values so we still get plausible results for super fisheye images > 180 grad
theta_d = min(max(-CV_PI/2., theta_d), CV_PI/2.);
bool converged = false;
double theta = theta_d;
double scale = 0.0;
if (!isEps || fabs(theta_d) > criteria.epsilon)
{
// compensate distortion iteratively using Newton method
for (int j = 0; j < maxCount; j++)
{
double theta2 = theta*theta, theta4 = theta2*theta2, theta6 = theta4*theta2, theta8 = theta6*theta2;
double k0_theta2 = k[0] * theta2, k1_theta4 = k[1] * theta4, k2_theta6 = k[2] * theta6, k3_theta8 = k[3] * theta8;
/* new_theta = theta - theta_fix, theta_fix = f0(theta) / f0'(theta) */
double theta_fix = (theta * (1 + k0_theta2 + k1_theta4 + k2_theta6 + k3_theta8) - theta_d) /
(1 + 3*k0_theta2 + 5*k1_theta4 + 7*k2_theta6 + 9*k3_theta8);
theta = theta - theta_fix;
if (isEps && (fabs(theta_fix) < criteria.epsilon))
{
converged = true;
break;
}
}
scale = std::tan(theta) / theta_d;
}
else
{
converged = true;
}
// theta is monotonously increasing or decreasing depending on the sign of theta
// if theta has flipped, it might converge due to symmetry but on the opposite of the camera center
// so we can check whether theta has changed the sign during the optimization
bool theta_flipped = ((theta_d < 0 && theta > 0) || (theta_d > 0 && theta < 0));
if ((converged || !isEps) && !theta_flipped)
{
Vec2d pu = pw * scale; //undistorted point
// reproject
Vec3d pr = RR * Vec3d(pu[0], pu[1], 1.0); // rotated point optionally multiplied by new camera matrix
Vec2d fi(pr[0]/pr[2], pr[1]/pr[2]); // final
if( sdepth == CV_32F )
dstf[i] = fi;
else
dstd[i] = fi;
}
else
{
// Vec2d fi(std::numeric_limits<double>::quiet_NaN(), std::numeric_limits<double>::quiet_NaN());
Vec2d fi(-1000000.0, -1000000.0);
if( sdepth == CV_32F )
dstf[i] = fi;
else
dstd[i] = fi;
}
}
}
void cv::fisheye::initUndistortRectifyMap( InputArray K, InputArray D, InputArray R, InputArray P,
const cv::Size& size, int m1type, OutputArray map1, OutputArray map2 )
{
CV_INSTRUMENT_REGION();
CV_Assert( m1type == CV_16SC2 || m1type == CV_32F || m1type <=0 );
map1.create( size, m1type <= 0 ? CV_16SC2 : m1type );
map2.create( size, map1.type() == CV_16SC2 ? CV_16UC1 : CV_32F );
CV_Assert((K.depth() == CV_32F || K.depth() == CV_64F) && (D.depth() == CV_32F || D.depth() == CV_64F));
CV_Assert((P.empty() || P.depth() == CV_32F || P.depth() == CV_64F) && (R.empty() || R.depth() == CV_32F || R.depth() == CV_64F));
CV_Assert(K.size() == Size(3, 3) && (D.empty() || D.total() == 4));
CV_Assert(R.empty() || R.size() == Size(3, 3) || R.total() * R.channels() == 3);
CV_Assert(P.empty() || P.size() == Size(3, 3) || P.size() == Size(4, 3));
Vec2d f, c;
if (K.depth() == CV_32F)
{
Matx33f camMat = K.getMat();
f = Vec2f(camMat(0, 0), camMat(1, 1));
c = Vec2f(camMat(0, 2), camMat(1, 2));
}
else
{
Matx33d camMat = K.getMat();
f = Vec2d(camMat(0, 0), camMat(1, 1));
c = Vec2d(camMat(0, 2), camMat(1, 2));
}
Vec4d k = Vec4d::all(0);
if (!D.empty())
k = D.depth() == CV_32F ? (Vec4d)*D.getMat().ptr<Vec4f>(): *D.getMat().ptr<Vec4d>();
Matx33d RR = Matx33d::eye();
if (!R.empty() && R.total() * R.channels() == 3)
{
Vec3d rvec;
R.getMat().convertTo(rvec, CV_64F);
RR = Affine3d(rvec).rotation();
}
else if (!R.empty() && R.size() == Size(3, 3))
R.getMat().convertTo(RR, CV_64F);
Matx33d PP = Matx33d::eye();
if (!P.empty())
P.getMat().colRange(0, 3).convertTo(PP, CV_64F);
Matx33d iR = (PP * RR).inv(cv::DECOMP_SVD);
for( int i = 0; i < size.height; ++i)
{
float* m1f = map1.getMat().ptr<float>(i);
float* m2f = map2.getMat().ptr<float>(i);
short* m1 = (short*)m1f;
ushort* m2 = (ushort*)m2f;
double _x = i*iR(0, 1) + iR(0, 2),
_y = i*iR(1, 1) + iR(1, 2),
_w = i*iR(2, 1) + iR(2, 2);
for( int j = 0; j < size.width; ++j)
{
double u, v;
if( _w <= 0)
{
u = (_x > 0) ? -std::numeric_limits<double>::infinity() : std::numeric_limits<double>::infinity();
v = (_y > 0) ? -std::numeric_limits<double>::infinity() : std::numeric_limits<double>::infinity();
}
else
{
double x = _x/_w, y = _y/_w;
double r = sqrt(x*x + y*y);
double theta = std::atan(r);
double theta2 = theta*theta, theta4 = theta2*theta2, theta6 = theta4*theta2, theta8 = theta4*theta4;
double theta_d = theta * (1 + k[0]*theta2 + k[1]*theta4 + k[2]*theta6 + k[3]*theta8);
double scale = (r == 0) ? 1.0 : theta_d / r;
u = f[0]*x*scale + c[0];
v = f[1]*y*scale + c[1];
}
if( m1type == CV_16SC2 )
{
int iu = cv::saturate_cast<int>(u*cv::INTER_TAB_SIZE);
int iv = cv::saturate_cast<int>(v*cv::INTER_TAB_SIZE);
m1[j*2+0] = (short)(iu >> cv::INTER_BITS);
m1[j*2+1] = (short)(iv >> cv::INTER_BITS);
m2[j] = (ushort)((iv & (cv::INTER_TAB_SIZE-1))*cv::INTER_TAB_SIZE + (iu & (cv::INTER_TAB_SIZE-1)));
}
else if( m1type == CV_32FC1 )
{
m1f[j] = (float)u;
m2f[j] = (float)v;
}
_x += iR(0, 0);
_y += iR(1, 0);
_w += iR(2, 0);
}
}
}
void cv::fisheye::estimateNewCameraMatrixForUndistortRectify(InputArray K, InputArray D, const Size &image_size, InputArray R,
OutputArray P, double balance, const Size& new_size, double fov_scale)
{
CV_INSTRUMENT_REGION();
CV_Assert( K.size() == Size(3, 3) && (K.depth() == CV_32F || K.depth() == CV_64F));
CV_Assert(D.empty() || ((D.total() == 4) && (D.depth() == CV_32F || D.depth() == CV_64F)));
int w = image_size.width, h = image_size.height;
balance = std::min(std::max(balance, 0.0), 1.0);
Mat points(1, 4, CV_64FC2);
Vec2d* pptr = points.ptr<Vec2d>();
pptr[0] = Vec2d(w/2, 0);
pptr[1] = Vec2d(w, h/2);
pptr[2] = Vec2d(w/2, h);
pptr[3] = Vec2d(0, h/2);
fisheye::undistortPoints(points, points, K, D, R);
cv::Scalar center_mass = mean(points);
Vec2d cn(center_mass.val);
double aspect_ratio = (K.depth() == CV_32F) ? K.getMat().at<float >(0,0)/K.getMat().at<float> (1,1)
: K.getMat().at<double>(0,0)/K.getMat().at<double>(1,1);
// convert to identity ratio
cn[1] *= aspect_ratio;
for(size_t i = 0; i < points.total(); ++i)
pptr[i][1] *= aspect_ratio;
double minx = DBL_MAX, miny = DBL_MAX, maxx = -DBL_MAX, maxy = -DBL_MAX;
for(size_t i = 0; i < points.total(); ++i)
{
miny = std::min(miny, pptr[i][1]);
maxy = std::max(maxy, pptr[i][1]);
minx = std::min(minx, pptr[i][0]);
maxx = std::max(maxx, pptr[i][0]);
}
double f1 = w * 0.5/(cn[0] - minx);
double f2 = w * 0.5/(maxx - cn[0]);
double f3 = h * 0.5 * aspect_ratio/(cn[1] - miny);
double f4 = h * 0.5 * aspect_ratio/(maxy - cn[1]);
double fmin = std::min(f1, std::min(f2, std::min(f3, f4)));
double fmax = std::max(f1, std::max(f2, std::max(f3, f4)));
double f = balance * fmin + (1.0 - balance) * fmax;
f *= fov_scale > 0 ? 1.0/fov_scale : 1.0;
Vec2d new_f(f, f), new_c = -cn * f + Vec2d(w, h * aspect_ratio) * 0.5;
// restore aspect ratio
new_f[1] /= aspect_ratio;
new_c[1] /= aspect_ratio;
if (!new_size.empty())
{
double rx = new_size.width /(double)image_size.width;
double ry = new_size.height/(double)image_size.height;
new_f[0] *= rx; new_f[1] *= ry;
new_c[0] *= rx; new_c[1] *= ry;
}
Mat(Matx33d(new_f[0], 0, new_c[0],
0, new_f[1], new_c[1],
0, 0, 1)).convertTo(P, P.empty() ? K.type() : P.type());
}
void cv::fisheye::undistortImage(InputArray distorted, OutputArray undistorted,
InputArray K, InputArray D, InputArray Knew, const Size& new_size)
{
CV_INSTRUMENT_REGION();
Size size = !new_size.empty() ? new_size : distorted.size();
Mat map1, map2;
fisheye::initUndistortRectifyMap(K, D, Matx33d::eye(), Knew, size, CV_16SC2, map1, map2 );
cv::remap(distorted, undistorted, map1, map2, INTER_LINEAR, BORDER_CONSTANT);
}
} // namespace cv

View File

@ -0,0 +1,388 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "test_precomp.hpp"
#include <opencv2/ts/cuda_test.hpp> // EXPECT_MAT_NEAR
#include "opencv2/videoio.hpp"
namespace opencv_test { namespace {
class fisheyeTest : public ::testing::Test {
protected:
const static cv::Size imageSize;
const static cv::Matx33d K;
const static cv::Vec4d D;
std::string datasets_repository_path;
virtual void SetUp() {
datasets_repository_path = combine(cvtest::TS::ptr()->get_data_path(), "cv/cameracalibration/fisheye");
}
protected:
std::string combine(const std::string& _item1, const std::string& _item2);
};
const cv::Size fisheyeTest::imageSize(1280, 800);
const cv::Matx33d fisheyeTest::K(558.478087865323, 0, 620.458515360843,
0, 560.506767351568, 381.939424848348,
0, 0, 1);
const cv::Vec4d fisheyeTest::D(-0.0014613319981768, -0.00329861110580401, 0.00605760088590183, -0.00374209380722371);
std::string fisheyeTest::combine(const std::string& _item1, const std::string& _item2)
{
std::string item1 = _item1, item2 = _item2;
std::replace(item1.begin(), item1.end(), '\\', '/');
std::replace(item2.begin(), item2.end(), '\\', '/');
if (item1.empty())
return item2;
if (item2.empty())
return item1;
char last = item1[item1.size()-1];
return item1 + (last != '/' ? "/" : "") + item2;
}
TEST_F(fisheyeTest, projectPoints)
{
double cols = this->imageSize.width,
rows = this->imageSize.height;
const int N = 20;
cv::Mat distorted0(1, N*N, CV_64FC2), undist1, undist2, distorted1, distorted2;
undist2.create(distorted0.size(), CV_MAKETYPE(distorted0.depth(), 3));
cv::Vec2d* pts = distorted0.ptr<cv::Vec2d>();
cv::Vec2d c(this->K(0, 2), this->K(1, 2));
for(int y = 0, k = 0; y < N; ++y)
for(int x = 0; x < N; ++x)
{
cv::Vec2d point(x*cols/(N-1.f), y*rows/(N-1.f));
pts[k++] = (point - c) * 0.85 + c;
}
cv::fisheye::undistortPoints(distorted0, undist1, this->K, this->D);
cv::Vec2d* u1 = undist1.ptr<cv::Vec2d>();
cv::Vec3d* u2 = undist2.ptr<cv::Vec3d>();
for(int i = 0; i < (int)distorted0.total(); ++i)
u2[i] = cv::Vec3d(u1[i][0], u1[i][1], 1.0);
cv::fisheye::distortPoints(undist1, distorted1, this->K, this->D);
cv::fisheye::projectPoints(undist2, distorted2, cv::Vec3d::all(0), cv::Vec3d::all(0), this->K, this->D);
EXPECT_MAT_NEAR(distorted0, distorted1, 1e-10);
EXPECT_MAT_NEAR(distorted0, distorted2, 1e-10);
}
TEST_F(fisheyeTest, distortUndistortPoints)
{
int width = imageSize.width;
int height = imageSize.height;
/* Create test points */
std::vector<cv::Point2d> points0Vector;
cv::Mat principalPoints = (cv::Mat_<double>(5, 2) << K(0, 2), K(1, 2), // (cx, cy)
/* Image corners */
0, 0,
0, height,
width, 0,
width, height
);
/* Random points inside image */
cv::Mat xy[2] = {};
xy[0].create(100, 1, CV_64F);
theRNG().fill(xy[0], cv::RNG::UNIFORM, 0, width); // x
xy[1].create(100, 1, CV_64F);
theRNG().fill(xy[1], cv::RNG::UNIFORM, 0, height); // y
cv::Mat randomPoints;
merge(xy, 2, randomPoints);
cv::Mat points0;
cv::vconcat(principalPoints.reshape(2), randomPoints, points0);
/* Test with random D set */
for (size_t i = 0; i < 10; ++i) {
cv::Mat distortion(1, 4, CV_64F);
theRNG().fill(distortion, cv::RNG::UNIFORM, -0.00001, 0.00001);
/* Distort -> Undistort */
cv::Mat distortedPoints;
cv::fisheye::distortPoints(points0, distortedPoints, K, distortion);
cv::Mat undistortedPoints;
cv::fisheye::undistortPoints(distortedPoints, undistortedPoints, K, distortion);
EXPECT_MAT_NEAR(points0, undistortedPoints, 1e-8);
/* Undistort -> Distort */
cv::fisheye::undistortPoints(points0, undistortedPoints, K, distortion);
cv::fisheye::distortPoints(undistortedPoints, distortedPoints, K, distortion);
EXPECT_MAT_NEAR(points0, distortedPoints, 1e-8);
}
}
TEST_F(fisheyeTest, undistortImage)
{
// we use it to reduce patch size for images in testdata
auto throwAwayHalf = [](Mat img)
{
int whalf = img.cols / 2, hhalf = img.rows / 2;
Rect tl(0, 0, whalf, hhalf), br(whalf, hhalf, whalf, hhalf);
img(tl) = 0;
img(br) = 0;
};
cv::Matx33d theK = this->K;
cv::Mat theD = cv::Mat(this->D);
std::string file = combine(datasets_repository_path, "/calib-3_stereo_from_JY/left/stereo_pair_014.jpg");
cv::Matx33d newK = theK;
cv::Mat distorted = cv::imread(file), undistorted;
{
newK(0, 0) = 100;
newK(1, 1) = 100;
cv::fisheye::undistortImage(distorted, undistorted, theK, theD, newK);
std::string imageFilename = combine(datasets_repository_path, "new_f_100.png");
cv::Mat correct = cv::imread(imageFilename);
ASSERT_FALSE(correct.empty()) << "Correct image " << imageFilename.c_str() << " can not be read" << std::endl;
throwAwayHalf(correct);
throwAwayHalf(undistorted);
EXPECT_MAT_NEAR(correct, undistorted, 1e-10);
}
{
double balance = 1.0;
cv::fisheye::estimateNewCameraMatrixForUndistortRectify(theK, theD, distorted.size(), cv::noArray(), newK, balance);
cv::fisheye::undistortImage(distorted, undistorted, theK, theD, newK);
std::string imageFilename = combine(datasets_repository_path, "balance_1.0.png");
cv::Mat correct = cv::imread(imageFilename);
ASSERT_FALSE(correct.empty()) << "Correct image " << imageFilename.c_str() << " can not be read" << std::endl;
throwAwayHalf(correct);
throwAwayHalf(undistorted);
EXPECT_MAT_NEAR(correct, undistorted, 1e-10);
}
{
double balance = 0.0;
cv::fisheye::estimateNewCameraMatrixForUndistortRectify(theK, theD, distorted.size(), cv::noArray(), newK, balance);
cv::fisheye::undistortImage(distorted, undistorted, theK, theD, newK);
std::string imageFilename = combine(datasets_repository_path, "balance_0.0.png");
cv::Mat correct = cv::imread(imageFilename);
ASSERT_FALSE(correct.empty()) << "Correct image " << imageFilename.c_str() << " can not be read" << std::endl;
throwAwayHalf(correct);
throwAwayHalf(undistorted);
EXPECT_MAT_NEAR(correct, undistorted, 1e-10);
}
}
TEST_F(fisheyeTest, undistortAndDistortImage)
{
cv::Matx33d K_src = this->K;
cv::Mat D_src = cv::Mat(this->D);
std::string file = combine(datasets_repository_path, "/calib-3_stereo_from_JY/left/stereo_pair_014.jpg");
cv::Matx33d K_dst = K_src;
cv::Mat image = cv::imread(file), image_projected;
cv::Vec4d D_dst_vec (-1.0, 0.0, 0.0, 0.0);
cv::Mat D_dst = cv::Mat(D_dst_vec);
int imageWidth = (int)this->imageSize.width;
int imageHeight = (int)this->imageSize.height;
cv::Mat imagePoints(imageHeight, imageWidth, CV_32FC2), undPoints, distPoints;
cv::Vec2f* pts = imagePoints.ptr<cv::Vec2f>();
for(int y = 0, k = 0; y < imageHeight; ++y)
{
for(int x = 0; x < imageWidth; ++x)
{
cv::Vec2f point((float)x, (float)y);
pts[k++] = point;
}
}
cv::fisheye::undistortPoints(imagePoints, undPoints, K_dst, D_dst);
cv::fisheye::distortPoints(undPoints, distPoints, K_src, D_src);
cv::remap(image, image_projected, distPoints, cv::noArray(), cv::INTER_LINEAR);
float dx, dy, r_sq;
float R_MAX = 250;
float imageCenterX = (float)imageWidth / 2;
float imageCenterY = (float)imageHeight / 2;
cv::Mat undPointsGt(imageHeight, imageWidth, CV_32FC2);
cv::Mat imageGt(imageHeight, imageWidth, CV_8UC3);
for(int y = 0; y < imageHeight; ++y)
{
for(int x = 0; x < imageWidth; ++x)
{
dx = x - imageCenterX;
dy = y - imageCenterY;
r_sq = dy * dy + dx * dx;
Vec2f & und_vec = undPoints.at<Vec2f>(y,x);
Vec3b & pixel = image_projected.at<Vec3b>(y,x);
Vec2f & undist_vec_gt = undPointsGt.at<Vec2f>(y,x);
Vec3b & pixel_gt = imageGt.at<Vec3b>(y,x);
if (r_sq > R_MAX * R_MAX)
{
undist_vec_gt[0] = -1e6;
undist_vec_gt[1] = -1e6;
pixel_gt[0] = 0;
pixel_gt[1] = 0;
pixel_gt[2] = 0;
}
else
{
undist_vec_gt[0] = und_vec[0];
undist_vec_gt[1] = und_vec[1];
pixel_gt[0] = pixel[0];
pixel_gt[1] = pixel[1];
pixel_gt[2] = pixel[2];
}
}
}
EXPECT_MAT_NEAR(undPoints, undPointsGt, 1e-10);
EXPECT_MAT_NEAR(image_projected, imageGt, 1e-10);
Vec2f dist_point_1 = distPoints.at<Vec2f>(400, 640);
Vec2f dist_point_1_gt(640.044f, 400.041f);
Vec2f dist_point_2 = distPoints.at<Vec2f>(400, 440);
Vec2f dist_point_2_gt(409.731f, 403.029f);
Vec2f dist_point_3 = distPoints.at<Vec2f>(200, 640);
Vec2f dist_point_3_gt(643.341f, 168.896f);
Vec2f dist_point_4 = distPoints.at<Vec2f>(300, 480);
Vec2f dist_point_4_gt(463.402f, 290.317f);
Vec2f dist_point_5 = distPoints.at<Vec2f>(550, 750);
Vec2f dist_point_5_gt(797.51f, 611.637f);
EXPECT_MAT_NEAR(dist_point_1, dist_point_1_gt, 1e-2);
EXPECT_MAT_NEAR(dist_point_2, dist_point_2_gt, 1e-2);
EXPECT_MAT_NEAR(dist_point_3, dist_point_3_gt, 1e-2);
EXPECT_MAT_NEAR(dist_point_4, dist_point_4_gt, 1e-2);
EXPECT_MAT_NEAR(dist_point_5, dist_point_5_gt, 1e-2);
// Add the "--test_debug" to arguments for file output
if (cvtest::debugLevel > 0)
cv::imwrite(combine(datasets_repository_path, "new_distortion.png"), image_projected);
}
TEST_F(fisheyeTest, jacobians)
{
int n = 10;
cv::Mat X(1, n, CV_64FC3);
cv::Mat om(3, 1, CV_64F), theT(3, 1, CV_64F);
cv::Mat f(2, 1, CV_64F), c(2, 1, CV_64F);
cv::Mat k(4, 1, CV_64F);
double alpha;
cv::RNG r;
r.fill(X, cv::RNG::NORMAL, 2, 1);
X = cv::abs(X) * 10;
r.fill(om, cv::RNG::NORMAL, 0, 1);
om = cv::abs(om);
r.fill(theT, cv::RNG::NORMAL, 0, 1);
theT = cv::abs(theT); theT.at<double>(2) = 4; theT *= 10;
r.fill(f, cv::RNG::NORMAL, 0, 1);
f = cv::abs(f) * 1000;
r.fill(c, cv::RNG::NORMAL, 0, 1);
c = cv::abs(c) * 1000;
r.fill(k, cv::RNG::NORMAL, 0, 1);
k*= 0.5;
alpha = 0.01*r.gaussian(1);
cv::Mat x1, x2, xpred;
cv::Matx33d theK(f.at<double>(0), alpha * f.at<double>(0), c.at<double>(0),
0, f.at<double>(1), c.at<double>(1),
0, 0, 1);
cv::Mat jacobians;
cv::fisheye::projectPoints(X, x1, om, theT, theK, k, alpha, jacobians);
//test on T:
cv::Mat dT(3, 1, CV_64FC1);
r.fill(dT, cv::RNG::NORMAL, 0, 1);
dT *= 1e-9*cv::norm(theT);
cv::Mat T2 = theT + dT;
cv::fisheye::projectPoints(X, x2, om, T2, theK, k, alpha, cv::noArray());
xpred = x1 + cv::Mat(jacobians.colRange(11,14) * dT).reshape(2, 1);
CV_Assert (cv::norm(x2 - xpred) < 1e-10);
//test on om:
cv::Mat dom(3, 1, CV_64FC1);
r.fill(dom, cv::RNG::NORMAL, 0, 1);
dom *= 1e-9*cv::norm(om);
cv::Mat om2 = om + dom;
cv::fisheye::projectPoints(X, x2, om2, theT, theK, k, alpha, cv::noArray());
xpred = x1 + cv::Mat(jacobians.colRange(8,11) * dom).reshape(2, 1);
CV_Assert (cv::norm(x2 - xpred) < 1e-10);
//test on f:
cv::Mat df(2, 1, CV_64FC1);
r.fill(df, cv::RNG::NORMAL, 0, 1);
df *= 1e-9*cv::norm(f);
cv::Matx33d K2 = theK + cv::Matx33d(df.at<double>(0), df.at<double>(0) * alpha, 0, 0, df.at<double>(1), 0, 0, 0, 0);
cv::fisheye::projectPoints(X, x2, om, theT, K2, k, alpha, cv::noArray());
xpred = x1 + cv::Mat(jacobians.colRange(0,2) * df).reshape(2, 1);
CV_Assert (cv::norm(x2 - xpred) < 1e-10);
//test on c:
cv::Mat dc(2, 1, CV_64FC1);
r.fill(dc, cv::RNG::NORMAL, 0, 1);
dc *= 1e-9*cv::norm(c);
K2 = theK + cv::Matx33d(0, 0, dc.at<double>(0), 0, 0, dc.at<double>(1), 0, 0, 0);
cv::fisheye::projectPoints(X, x2, om, theT, K2, k, alpha, cv::noArray());
xpred = x1 + cv::Mat(jacobians.colRange(2,4) * dc).reshape(2, 1);
CV_Assert (cv::norm(x2 - xpred) < 1e-10);
//test on k:
cv::Mat dk(4, 1, CV_64FC1);
r.fill(dk, cv::RNG::NORMAL, 0, 1);
dk *= 1e-9*cv::norm(k);
cv::Mat k2 = k + dk;
cv::fisheye::projectPoints(X, x2, om, theT, theK, k2, alpha, cv::noArray());
xpred = x1 + cv::Mat(jacobians.colRange(4,8) * dk).reshape(2, 1);
CV_Assert (cv::norm(x2 - xpred) < 1e-10);
//test on alpha:
cv::Mat dalpha(1, 1, CV_64FC1);
r.fill(dalpha, cv::RNG::NORMAL, 0, 1);
dalpha *= 1e-9*cv::norm(f);
double alpha2 = alpha + dalpha.at<double>(0);
K2 = theK + cv::Matx33d(0, f.at<double>(0) * dalpha.at<double>(0), 0, 0, 0, 0, 0, 0, 0);
cv::fisheye::projectPoints(X, x2, om, theT, theK, k, alpha2, cv::noArray());
xpred = x1 + cv::Mat(jacobians.col(14) * dalpha).reshape(2, 1);
CV_Assert (cv::norm(x2 - xpred) < 1e-10);
}
}}

View File

@ -443,7 +443,7 @@ enum { CALIB_USE_INTRINSIC_GUESS = 0x00001, //!< Use user provided intrinsics as
CALIB_FIX_INTRINSIC = 0x00100, //!< For stereo and milti-camera calibration only. Do not optimize cameras intrinsics CALIB_FIX_INTRINSIC = 0x00100, //!< For stereo and milti-camera calibration only. Do not optimize cameras intrinsics
CALIB_SAME_FOCAL_LENGTH = 0x00200, //!< For stereo calibration only. Use the same focal length for cameras in pair. CALIB_SAME_FOCAL_LENGTH = 0x00200, //!< For stereo calibration only. Use the same focal length for cameras in pair.
// for stereo rectification // for stereo rectification
CALIB_ZERO_DISPARITY = 0x00400, //!< For @ref stereoRectify only. See the function description for more details. CALIB_ZERO_DISPARITY = 0x00400, //!< Deprecated synonim of @ref STEREO_ZERO_DISPARITY. See @ref stereoRectify.
CALIB_USE_LU = (1 << 17), //!< use LU instead of SVD decomposition for solving. much faster but potentially less precise CALIB_USE_LU = (1 << 17), //!< use LU instead of SVD decomposition for solving. much faster but potentially less precise
CALIB_USE_EXTRINSIC_GUESS = (1 << 22), //!< For stereo calibration only. Use user provided extrinsics (R, T) as initial point for optimization CALIB_USE_EXTRINSIC_GUESS = (1 << 22), //!< For stereo calibration only. Use user provided extrinsics (R, T) as initial point for optimization
// fisheye only flags // fisheye only flags
@ -1486,128 +1486,6 @@ using cv::CALIB_FIX_PRINCIPAL_POINT;
using cv::CALIB_ZERO_DISPARITY; using cv::CALIB_ZERO_DISPARITY;
using cv::CALIB_FIX_FOCAL_LENGTH; using cv::CALIB_FIX_FOCAL_LENGTH;
/** @brief Projects points using fisheye model
@param objectPoints Array of object points, 1xN/Nx1 3-channel (or vector\<Point3f\> ), where N is
the number of points in the view.
@param imagePoints Output array of image points, 2xN/Nx2 1-channel or 1xN/Nx1 2-channel, or
vector\<Point2f\>.
@param affine
@param K Camera intrinsic matrix \f$cameramatrix{K}\f$.
@param D Input vector of distortion coefficients \f$\distcoeffsfisheye\f$.
@param alpha The skew coefficient.
@param jacobian Optional output 2Nx15 jacobian matrix of derivatives of image points with respect
to components of the focal lengths, coordinates of the principal point, distortion coefficients,
rotation vector, translation vector, and the skew. In the old interface different components of
the jacobian are returned via different output parameters.
The function computes projections of 3D points to the image plane given intrinsic and extrinsic
camera parameters. Optionally, the function computes Jacobians - matrices of partial derivatives of
image points coordinates (as functions of all the input parameters) with respect to the particular
parameters, intrinsic and/or extrinsic.
*/
CV_EXPORTS void projectPoints(InputArray objectPoints, OutputArray imagePoints, const Affine3d& affine,
InputArray K, InputArray D, double alpha = 0, OutputArray jacobian = noArray());
/** @overload */
CV_EXPORTS_W void projectPoints(InputArray objectPoints, OutputArray imagePoints, InputArray rvec, InputArray tvec,
InputArray K, InputArray D, double alpha = 0, OutputArray jacobian = noArray());
/** @brief Distorts 2D points using fisheye model.
@param undistorted Array of object points, 1xN/Nx1 2-channel (or vector\<Point2f\> ), where N is
the number of points in the view.
@param K Camera intrinsic matrix \f$cameramatrix{K}\f$.
@param D Input vector of distortion coefficients \f$\distcoeffsfisheye\f$.
@param alpha The skew coefficient.
@param distorted Output array of image points, 1xN/Nx1 2-channel, or vector\<Point2f\> .
Note that the function assumes the camera intrinsic matrix of the undistorted points to be identity.
This means if you want to distort image points you have to multiply them with \f$K^{-1}\f$.
*/
CV_EXPORTS_W void distortPoints(InputArray undistorted, OutputArray distorted, InputArray K, InputArray D, double alpha = 0);
/** @brief Undistorts 2D points using fisheye model
@param distorted Array of object points, 1xN/Nx1 2-channel (or vector\<Point2f\> ), where N is the
number of points in the view.
@param K Camera intrinsic matrix \f$cameramatrix{K}\f$.
@param D Input vector of distortion coefficients \f$\distcoeffsfisheye\f$.
@param R Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
1-channel or 1x1 3-channel
@param P New camera intrinsic matrix (3x3) or new projection matrix (3x4)
@param criteria Termination criteria
@param undistorted Output array of image points, 1xN/Nx1 2-channel, or vector\<Point2f\> .
*/
CV_EXPORTS_W void undistortPoints(InputArray distorted, OutputArray undistorted,
InputArray K, InputArray D, InputArray R = noArray(), InputArray P = noArray(),
TermCriteria criteria = TermCriteria(TermCriteria::MAX_ITER + TermCriteria::EPS, 10, 1e-8));
/** @brief Computes undistortion and rectification maps for image transform by cv::remap(). If D is empty zero
distortion is used, if R or P is empty identity matrixes are used.
@param K Camera intrinsic matrix \f$cameramatrix{K}\f$.
@param D Input vector of distortion coefficients \f$\distcoeffsfisheye\f$.
@param R Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
1-channel or 1x1 3-channel
@param P New camera intrinsic matrix (3x3) or new projection matrix (3x4)
@param size Undistorted image size.
@param m1type Type of the first output map that can be CV_32FC1 or CV_16SC2 . See convertMaps()
for details.
@param map1 The first output map.
@param map2 The second output map.
*/
CV_EXPORTS_W void initUndistortRectifyMap(InputArray K, InputArray D, InputArray R, InputArray P,
const cv::Size& size, int m1type, OutputArray map1, OutputArray map2);
/** @brief Transforms an image to compensate for fisheye lens distortion.
@param distorted image with fisheye lens distortion.
@param undistorted Output image with compensated fisheye lens distortion.
@param K Camera intrinsic matrix \f$cameramatrix{K}\f$.
@param D Input vector of distortion coefficients \f$\distcoeffsfisheye\f$.
@param Knew Camera intrinsic matrix of the distorted image. By default, it is the identity matrix but you
may additionally scale and shift the result by using a different matrix.
@param new_size the new size
The function transforms an image to compensate radial and tangential lens distortion.
The function is simply a combination of fisheye::initUndistortRectifyMap (with unity R ) and remap
(with bilinear interpolation). See the former function for details of the transformation being
performed.
See below the results of undistortImage.
- a\) result of undistort of perspective camera model (all possible coefficients (k_1, k_2, k_3,
k_4, k_5, k_6) of distortion were optimized under calibration)
- b\) result of fisheye::undistortImage of fisheye camera model (all possible coefficients (k_1, k_2,
k_3, k_4) of fisheye distortion were optimized under calibration)
- c\) original image was captured with fisheye lens
Pictures a) and b) almost the same. But if we consider points of image located far from the center
of image, we can notice that on image a) these points are distorted.
![image](pics/fisheye_undistorted.jpg)
*/
CV_EXPORTS_W void undistortImage(InputArray distorted, OutputArray undistorted,
InputArray K, InputArray D, InputArray Knew = cv::noArray(), const Size& new_size = Size());
/** @brief Estimates new camera intrinsic matrix for undistortion or rectification.
@param K Camera intrinsic matrix \f$cameramatrix{K}\f$.
@param image_size Size of the image
@param D Input vector of distortion coefficients \f$\distcoeffsfisheye\f$.
@param R Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
1-channel or 1x1 3-channel
@param P New camera intrinsic matrix (3x3) or new projection matrix (3x4)
@param balance Sets the new focal length in range between the min focal length and the max focal
length. Balance is in range of [0, 1].
@param new_size the new size
@param fov_scale Divisor for new focal length.
*/
CV_EXPORTS_W void estimateNewCameraMatrixForUndistortRectify(InputArray K, InputArray D, const Size &image_size, InputArray R,
OutputArray P, double balance = 0.0, const Size& new_size = Size(), double fov_scale = 1.0);
/** @brief Performs camera calibration /** @brief Performs camera calibration
@param objectPoints vector of vectors of calibration pattern points in the calibration pattern @param objectPoints vector of vectors of calibration pattern points in the calibration pattern
@ -1647,40 +1525,6 @@ CV_EXPORTS_W double calibrate(InputArrayOfArrays objectPoints, InputArrayOfArray
InputOutputArray K, InputOutputArray D, OutputArrayOfArrays rvecs, OutputArrayOfArrays tvecs, int flags = 0, InputOutputArray K, InputOutputArray D, OutputArrayOfArrays rvecs, OutputArrayOfArrays tvecs, int flags = 0,
TermCriteria criteria = TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 100, DBL_EPSILON)); TermCriteria criteria = TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 100, DBL_EPSILON));
/** @brief Stereo rectification for fisheye camera model
@param K1 First camera intrinsic matrix.
@param D1 First camera distortion parameters.
@param K2 Second camera intrinsic matrix.
@param D2 Second camera distortion parameters.
@param imageSize Size of the image used for stereo calibration.
@param R Rotation matrix between the coordinate systems of the first and the second
cameras.
@param tvec Translation vector between coordinate systems of the cameras.
@param R1 Output 3x3 rectification transform (rotation matrix) for the first camera.
@param R2 Output 3x3 rectification transform (rotation matrix) for the second camera.
@param P1 Output 3x4 projection matrix in the new (rectified) coordinate systems for the first
camera.
@param P2 Output 3x4 projection matrix in the new (rectified) coordinate systems for the second
camera.
@param Q Output \f$4 \times 4\f$ disparity-to-depth mapping matrix (see reprojectImageTo3D ).
@param flags Operation flags that may be zero or @ref CALIB_ZERO_DISPARITY . If the flag is set,
the function makes the principal points of each camera have the same pixel coordinates in the
rectified views. And if the flag is not set, the function may still shift the images in the
horizontal or vertical direction (depending on the orientation of epipolar lines) to maximize the
useful image area.
@param newImageSize New image resolution after rectification. The same size should be passed to
#initUndistortRectifyMap (see the stereo_calib.cpp sample in OpenCV samples directory). When (0,0)
is passed (default), it is set to the original imageSize . Setting it to larger value can help you
preserve details in the original image, especially when there is a big radial distortion.
@param balance Sets the new focal length in range between the min focal length and the max focal
length. Balance is in range of [0, 1].
@param fov_scale Divisor for new focal length.
*/
CV_EXPORTS_W void stereoRectify(InputArray K1, InputArray D1, InputArray K2, InputArray D2, const Size &imageSize, InputArray R, InputArray tvec,
OutputArray R1, OutputArray R2, OutputArray P1, OutputArray P2, OutputArray Q, int flags, const Size &newImageSize = Size(),
double balance = 0.0, double fov_scale = 1.0);
/** @brief Performs stereo calibration /** @brief Performs stereo calibration
@param objectPoints Vector of vectors of the calibration pattern points. @param objectPoints Vector of vectors of the calibration pattern points.

View File

@ -125,32 +125,4 @@ public class CalibTest extends OpenCVTestCase {
assertEquals((1 << 17), Calib.CALIB_USE_LU); assertEquals((1 << 17), Calib.CALIB_USE_LU);
assertEquals((1 << 22), Calib.CALIB_USE_EXTRINSIC_GUESS); assertEquals((1 << 22), Calib.CALIB_USE_EXTRINSIC_GUESS);
} }
public void testEstimateNewCameraMatrixForUndistortRectify() {
Mat K = new Mat().eye(3, 3, CvType.CV_64FC1);
Mat K_new = new Mat().eye(3, 3, CvType.CV_64FC1);
Mat K_new_truth = new Mat().eye(3, 3, CvType.CV_64FC1);
Mat D = new Mat().zeros(4, 1, CvType.CV_64FC1);
K.put(0,0,600.4447738238429);
K.put(1,1,578.9929805505851);
K.put(0,2,992.0642578801213);
K.put(1,2,549.2682624212172);
D.put(0,0,-0.05090103223466704);
D.put(1,0,0.030944413642173308);
D.put(2,0,-0.021509225493198905);
D.put(3,0,0.0043378096628297145);
K_new_truth.put(0,0, 387.5118215642316);
K_new_truth.put(0,2, 1033.936556777084);
K_new_truth.put(1,1, 373.6673784974842);
K_new_truth.put(1,2, 538.794152656429);
Calib.fisheye_estimateNewCameraMatrixForUndistortRectify(K,D,new Size(1920,1080),
new Mat().eye(3, 3, CvType.CV_64F), K_new, 0.0, new Size(1920,1080));
assertMatEqual(K_new, K_new_truth, EPS);
}
} }

View File

@ -1,40 +0,0 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html
#include "perf_precomp.hpp"
namespace opencv_test { namespace {
PERF_TEST(Undistort, fisheye_undistortPoints_100k_10iter)
{
const int pointsNumber = 100000;
const Size imageSize(1280, 800);
/* Set camera matrix */
const Matx33d K(558.478087865323, 0, 620.458515360843,
0, 560.506767351568, 381.939424848348,
0, 0, 1);
/* Set distortion coefficients */
const Matx14d D(2.81e-06, 1.31e-06, -4.42e-06, -1.25e-06);
/* Create two-channel points matrix */
Mat xy[2] = {};
xy[0].create(pointsNumber, 1, CV_64F);
theRNG().fill(xy[0], RNG::UNIFORM, 0, imageSize.width); // x
xy[1].create(pointsNumber, 1, CV_64F);
theRNG().fill(xy[1], RNG::UNIFORM, 0, imageSize.height); // y
Mat points;
merge(xy, 2, points);
/* Set fixed iteration number to check only c++ code, not algo convergence */
TermCriteria termCriteria(TermCriteria::MAX_ITER, 10, 0);
Mat undistortedPoints;
TEST_CYCLE() fisheye::undistortPoints(points, undistortedPoints, K, D, noArray(), noArray(), termCriteria);
SANITY_CHECK_NOTHING();
}
}} // namespace

View File

@ -47,701 +47,10 @@
namespace cv { namespace cv {
namespace { namespace {
struct JacobianRow
{
Vec2d df, dc;
Vec4d dk;
Vec3d dom, dT;
double dalpha;
};
void subMatrix(const Mat& src, Mat& dst, const std::vector<uchar>& cols, const std::vector<uchar>& rows); void subMatrix(const Mat& src, Mat& dst, const std::vector<uchar>& cols, const std::vector<uchar>& rows);
}} }}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// cv::fisheye::projectPoints
void cv::fisheye::projectPoints(InputArray objectPoints, OutputArray imagePoints, const Affine3d& affine,
InputArray K, InputArray D, double alpha, OutputArray jacobian)
{
CV_INSTRUMENT_REGION();
projectPoints(objectPoints, imagePoints, affine.rvec(), affine.translation(), K, D, alpha, jacobian);
}
void cv::fisheye::projectPoints(InputArray objectPoints, OutputArray imagePoints, InputArray _rvec,
InputArray _tvec, InputArray _K, InputArray _D, double alpha, OutputArray jacobian)
{
CV_INSTRUMENT_REGION();
// will support only 3-channel data now for points
CV_Assert(objectPoints.type() == CV_32FC3 || objectPoints.type() == CV_64FC3);
imagePoints.create(objectPoints.size(), CV_MAKETYPE(objectPoints.depth(), 2));
size_t n = objectPoints.total();
CV_Assert(_rvec.total() * _rvec.channels() == 3 && (_rvec.depth() == CV_32F || _rvec.depth() == CV_64F));
CV_Assert(_tvec.total() * _tvec.channels() == 3 && (_tvec.depth() == CV_32F || _tvec.depth() == CV_64F));
CV_Assert(_tvec.getMat().isContinuous() && _rvec.getMat().isContinuous());
Vec3d om = _rvec.depth() == CV_32F ? (Vec3d)*_rvec.getMat().ptr<Vec3f>() : *_rvec.getMat().ptr<Vec3d>();
Vec3d T = _tvec.depth() == CV_32F ? (Vec3d)*_tvec.getMat().ptr<Vec3f>() : *_tvec.getMat().ptr<Vec3d>();
CV_Assert(_K.size() == Size(3,3) && (_K.type() == CV_32F || _K.type() == CV_64F) && _D.type() == _K.type() && _D.total() == 4);
Vec2d f, c;
if (_K.depth() == CV_32F)
{
Matx33f K = _K.getMat();
f = Vec2f(K(0, 0), K(1, 1));
c = Vec2f(K(0, 2), K(1, 2));
}
else
{
Matx33d K = _K.getMat();
f = Vec2d(K(0, 0), K(1, 1));
c = Vec2d(K(0, 2), K(1, 2));
}
Vec4d k = _D.depth() == CV_32F ? (Vec4d)*_D.getMat().ptr<Vec4f>(): *_D.getMat().ptr<Vec4d>();
const bool isJacobianNeeded = jacobian.needed();
JacobianRow *Jn = 0;
if (isJacobianNeeded)
{
int nvars = 2 + 2 + 1 + 4 + 3 + 3; // f, c, alpha, k, om, T,
jacobian.create(2*(int)n, nvars, CV_64F);
Jn = jacobian.getMat().ptr<JacobianRow>(0);
}
Matx33d R;
Matx<double, 3, 9> dRdom;
Rodrigues(om, R, dRdom);
Affine3d aff(om, T);
const Vec3f* Xf = objectPoints.getMat().ptr<Vec3f>();
const Vec3d* Xd = objectPoints.getMat().ptr<Vec3d>();
Vec2f *xpf = imagePoints.getMat().ptr<Vec2f>();
Vec2d *xpd = imagePoints.getMat().ptr<Vec2d>();
for(size_t i = 0; i < n; ++i)
{
Vec3d Xi = objectPoints.depth() == CV_32F ? (Vec3d)Xf[i] : Xd[i];
Vec3d Y = aff*Xi;
if (fabs(Y[2]) < DBL_MIN)
Y[2] = 1;
Vec2d x(Y[0]/Y[2], Y[1]/Y[2]);
double r2 = x.dot(x);
double r = std::sqrt(r2);
// Angle of the incoming ray:
double theta = std::atan(r);
double theta2 = theta*theta, theta3 = theta2*theta, theta4 = theta2*theta2, theta5 = theta4*theta,
theta6 = theta3*theta3, theta7 = theta6*theta, theta8 = theta4*theta4, theta9 = theta8*theta;
double theta_d = theta + k[0]*theta3 + k[1]*theta5 + k[2]*theta7 + k[3]*theta9;
double inv_r = r > 1e-8 ? 1.0/r : 1;
double cdist = r > 1e-8 ? theta_d * inv_r : 1;
Vec2d xd1 = x * cdist;
Vec2d xd3(xd1[0] + alpha*xd1[1], xd1[1]);
Vec2d final_point(xd3[0] * f[0] + c[0], xd3[1] * f[1] + c[1]);
if (objectPoints.depth() == CV_32F)
xpf[i] = final_point;
else
xpd[i] = final_point;
if (isJacobianNeeded)
{
//Vec3d Xi = pdepth == CV_32F ? (Vec3d)Xf[i] : Xd[i];
//Vec3d Y = aff*Xi;
double dYdR[] = { Xi[0], Xi[1], Xi[2], 0, 0, 0, 0, 0, 0,
0, 0, 0, Xi[0], Xi[1], Xi[2], 0, 0, 0,
0, 0, 0, 0, 0, 0, Xi[0], Xi[1], Xi[2] };
Matx33d dYdom_data = Matx<double, 3, 9>(dYdR) * dRdom.t();
const Vec3d *dYdom = (Vec3d*)dYdom_data.val;
Matx33d dYdT_data = Matx33d::eye();
const Vec3d *dYdT = (Vec3d*)dYdT_data.val;
//Vec2d x(Y[0]/Y[2], Y[1]/Y[2]);
Vec3d dxdom[2];
dxdom[0] = (1.0/Y[2]) * dYdom[0] - x[0]/Y[2] * dYdom[2];
dxdom[1] = (1.0/Y[2]) * dYdom[1] - x[1]/Y[2] * dYdom[2];
Vec3d dxdT[2];
dxdT[0] = (1.0/Y[2]) * dYdT[0] - x[0]/Y[2] * dYdT[2];
dxdT[1] = (1.0/Y[2]) * dYdT[1] - x[1]/Y[2] * dYdT[2];
//double r2 = x.dot(x);
Vec3d dr2dom = 2 * x[0] * dxdom[0] + 2 * x[1] * dxdom[1];
Vec3d dr2dT = 2 * x[0] * dxdT[0] + 2 * x[1] * dxdT[1];
//double r = std::sqrt(r2);
double drdr2 = r > 1e-8 ? 1.0/(2*r) : 1;
Vec3d drdom = drdr2 * dr2dom;
Vec3d drdT = drdr2 * dr2dT;
// Angle of the incoming ray:
//double theta = atan(r);
double dthetadr = 1.0/(1+r2);
Vec3d dthetadom = dthetadr * drdom;
Vec3d dthetadT = dthetadr * drdT;
//double theta_d = theta + k[0]*theta3 + k[1]*theta5 + k[2]*theta7 + k[3]*theta9;
double dtheta_ddtheta = 1 + 3*k[0]*theta2 + 5*k[1]*theta4 + 7*k[2]*theta6 + 9*k[3]*theta8;
Vec3d dtheta_ddom = dtheta_ddtheta * dthetadom;
Vec3d dtheta_ddT = dtheta_ddtheta * dthetadT;
Vec4d dtheta_ddk = Vec4d(theta3, theta5, theta7, theta9);
//double inv_r = r > 1e-8 ? 1.0/r : 1;
//double cdist = r > 1e-8 ? theta_d / r : 1;
Vec3d dcdistdom = inv_r * (dtheta_ddom - cdist*drdom);
Vec3d dcdistdT = inv_r * (dtheta_ddT - cdist*drdT);
Vec4d dcdistdk = inv_r * dtheta_ddk;
//Vec2d xd1 = x * cdist;
Vec4d dxd1dk[2];
Vec3d dxd1dom[2], dxd1dT[2];
dxd1dom[0] = x[0] * dcdistdom + cdist * dxdom[0];
dxd1dom[1] = x[1] * dcdistdom + cdist * dxdom[1];
dxd1dT[0] = x[0] * dcdistdT + cdist * dxdT[0];
dxd1dT[1] = x[1] * dcdistdT + cdist * dxdT[1];
dxd1dk[0] = x[0] * dcdistdk;
dxd1dk[1] = x[1] * dcdistdk;
//Vec2d xd3(xd1[0] + alpha*xd1[1], xd1[1]);
Vec4d dxd3dk[2];
Vec3d dxd3dom[2], dxd3dT[2];
dxd3dom[0] = dxd1dom[0] + alpha * dxd1dom[1];
dxd3dom[1] = dxd1dom[1];
dxd3dT[0] = dxd1dT[0] + alpha * dxd1dT[1];
dxd3dT[1] = dxd1dT[1];
dxd3dk[0] = dxd1dk[0] + alpha * dxd1dk[1];
dxd3dk[1] = dxd1dk[1];
Vec2d dxd3dalpha(xd1[1], 0);
//final jacobian
Jn[0].dom = f[0] * dxd3dom[0];
Jn[1].dom = f[1] * dxd3dom[1];
Jn[0].dT = f[0] * dxd3dT[0];
Jn[1].dT = f[1] * dxd3dT[1];
Jn[0].dk = f[0] * dxd3dk[0];
Jn[1].dk = f[1] * dxd3dk[1];
Jn[0].dalpha = f[0] * dxd3dalpha[0];
Jn[1].dalpha = 0; //f[1] * dxd3dalpha[1];
Jn[0].df = Vec2d(xd3[0], 0);
Jn[1].df = Vec2d(0, xd3[1]);
Jn[0].dc = Vec2d(1, 0);
Jn[1].dc = Vec2d(0, 1);
//step to jacobian rows for next point
Jn += 2;
}
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// cv::fisheye::distortPoints
void cv::fisheye::distortPoints(InputArray undistorted, OutputArray distorted, InputArray K, InputArray D, double alpha)
{
CV_INSTRUMENT_REGION();
// will support only 2-channel data now for points
CV_Assert(undistorted.type() == CV_32FC2 || undistorted.type() == CV_64FC2);
distorted.create(undistorted.size(), undistorted.type());
size_t n = undistorted.total();
CV_Assert(K.size() == Size(3,3) && (K.type() == CV_32F || K.type() == CV_64F) && D.total() == 4);
Vec2d f, c;
if (K.depth() == CV_32F)
{
Matx33f camMat = K.getMat();
f = Vec2f(camMat(0, 0), camMat(1, 1));
c = Vec2f(camMat(0, 2), camMat(1, 2));
}
else
{
Matx33d camMat = K.getMat();
f = Vec2d(camMat(0, 0), camMat(1, 1));
c = Vec2d(camMat(0 ,2), camMat(1, 2));
}
Vec4d k = D.depth() == CV_32F ? (Vec4d)*D.getMat().ptr<Vec4f>(): *D.getMat().ptr<Vec4d>();
const Vec2f* Xf = undistorted.getMat().ptr<Vec2f>();
const Vec2d* Xd = undistorted.getMat().ptr<Vec2d>();
Vec2f *xpf = distorted.getMat().ptr<Vec2f>();
Vec2d *xpd = distorted.getMat().ptr<Vec2d>();
for(size_t i = 0; i < n; ++i)
{
Vec2d x = undistorted.depth() == CV_32F ? (Vec2d)Xf[i] : Xd[i];
double r2 = x.dot(x);
double r = std::sqrt(r2);
// Angle of the incoming ray:
double theta = std::atan(r);
double theta2 = theta*theta, theta3 = theta2*theta, theta4 = theta2*theta2, theta5 = theta4*theta,
theta6 = theta3*theta3, theta7 = theta6*theta, theta8 = theta4*theta4, theta9 = theta8*theta;
double theta_d = theta + k[0]*theta3 + k[1]*theta5 + k[2]*theta7 + k[3]*theta9;
double inv_r = r > 1e-8 ? 1.0/r : 1;
double cdist = r > 1e-8 ? theta_d * inv_r : 1;
Vec2d xd1 = x * cdist;
Vec2d xd3(xd1[0] + alpha*xd1[1], xd1[1]);
Vec2d final_point(xd3[0] * f[0] + c[0], xd3[1] * f[1] + c[1]);
if (undistorted.depth() == CV_32F)
xpf[i] = final_point;
else
xpd[i] = final_point;
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// cv::fisheye::undistortPoints
void cv::fisheye::undistortPoints( InputArray distorted, OutputArray undistorted, InputArray K, InputArray D,
InputArray R, InputArray P, TermCriteria criteria)
{
CV_INSTRUMENT_REGION();
// will support only 2-channel data now for points
CV_Assert(distorted.type() == CV_32FC2 || distorted.type() == CV_64FC2);
undistorted.create(distorted.size(), distorted.type());
CV_Assert(P.empty() || P.size() == Size(3, 3) || P.size() == Size(4, 3));
CV_Assert(R.empty() || R.size() == Size(3, 3) || R.total() * R.channels() == 3);
CV_Assert(D.total() == 4 && K.size() == Size(3, 3) && (K.depth() == CV_32F || K.depth() == CV_64F));
CV_Assert(criteria.isValid());
Vec2d f, c;
if (K.depth() == CV_32F)
{
Matx33f camMat = K.getMat();
f = Vec2f(camMat(0, 0), camMat(1, 1));
c = Vec2f(camMat(0, 2), camMat(1, 2));
}
else
{
Matx33d camMat = K.getMat();
f = Vec2d(camMat(0, 0), camMat(1, 1));
c = Vec2d(camMat(0, 2), camMat(1, 2));
}
Vec4d k = D.depth() == CV_32F ? (Vec4d)*D.getMat().ptr<Vec4f>(): *D.getMat().ptr<Vec4d>();
Matx33d RR = Matx33d::eye();
if (!R.empty() && R.total() * R.channels() == 3)
{
Vec3d rvec;
R.getMat().convertTo(rvec, CV_64F);
RR = cv::Affine3d(rvec).rotation();
}
else if (!R.empty() && R.size() == Size(3, 3))
R.getMat().convertTo(RR, CV_64F);
if(!P.empty())
{
Matx33d PP;
P.getMat().colRange(0, 3).convertTo(PP, CV_64F);
RR = PP * RR;
}
// start undistorting
const Vec2f* srcf = distorted.getMat().ptr<Vec2f>();
const Vec2d* srcd = distorted.getMat().ptr<Vec2d>();
Vec2f* dstf = undistorted.getMat().ptr<Vec2f>();
Vec2d* dstd = undistorted.getMat().ptr<Vec2d>();
size_t n = distorted.total();
int sdepth = distorted.depth();
const bool isEps = (criteria.type & TermCriteria::EPS) != 0;
/* Define max count for solver iterations */
int maxCount = std::numeric_limits<int>::max();
if (criteria.type & TermCriteria::MAX_ITER) {
maxCount = criteria.maxCount;
}
for(size_t i = 0; i < n; i++ )
{
Vec2d pi = sdepth == CV_32F ? (Vec2d)srcf[i] : srcd[i]; // image point
Vec2d pw((pi[0] - c[0])/f[0], (pi[1] - c[1])/f[1]); // world point
double theta_d = sqrt(pw[0]*pw[0] + pw[1]*pw[1]);
// the current camera model is only valid up to 180 FOV
// for larger FOV the loop below does not converge
// clip values so we still get plausible results for super fisheye images > 180 grad
theta_d = min(max(-CV_PI/2., theta_d), CV_PI/2.);
bool converged = false;
double theta = theta_d;
double scale = 0.0;
if (!isEps || fabs(theta_d) > criteria.epsilon)
{
// compensate distortion iteratively using Newton method
for (int j = 0; j < maxCount; j++)
{
double theta2 = theta*theta, theta4 = theta2*theta2, theta6 = theta4*theta2, theta8 = theta6*theta2;
double k0_theta2 = k[0] * theta2, k1_theta4 = k[1] * theta4, k2_theta6 = k[2] * theta6, k3_theta8 = k[3] * theta8;
/* new_theta = theta - theta_fix, theta_fix = f0(theta) / f0'(theta) */
double theta_fix = (theta * (1 + k0_theta2 + k1_theta4 + k2_theta6 + k3_theta8) - theta_d) /
(1 + 3*k0_theta2 + 5*k1_theta4 + 7*k2_theta6 + 9*k3_theta8);
theta = theta - theta_fix;
if (isEps && (fabs(theta_fix) < criteria.epsilon))
{
converged = true;
break;
}
}
scale = std::tan(theta) / theta_d;
}
else
{
converged = true;
}
// theta is monotonously increasing or decreasing depending on the sign of theta
// if theta has flipped, it might converge due to symmetry but on the opposite of the camera center
// so we can check whether theta has changed the sign during the optimization
bool theta_flipped = ((theta_d < 0 && theta > 0) || (theta_d > 0 && theta < 0));
if ((converged || !isEps) && !theta_flipped)
{
Vec2d pu = pw * scale; //undistorted point
// reproject
Vec3d pr = RR * Vec3d(pu[0], pu[1], 1.0); // rotated point optionally multiplied by new camera matrix
Vec2d fi(pr[0]/pr[2], pr[1]/pr[2]); // final
if( sdepth == CV_32F )
dstf[i] = fi;
else
dstd[i] = fi;
}
else
{
// Vec2d fi(std::numeric_limits<double>::quiet_NaN(), std::numeric_limits<double>::quiet_NaN());
Vec2d fi(-1000000.0, -1000000.0);
if( sdepth == CV_32F )
dstf[i] = fi;
else
dstd[i] = fi;
}
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// cv::fisheye::initUndistortRectifyMap
void cv::fisheye::initUndistortRectifyMap( InputArray K, InputArray D, InputArray R, InputArray P,
const cv::Size& size, int m1type, OutputArray map1, OutputArray map2 )
{
CV_INSTRUMENT_REGION();
CV_Assert( m1type == CV_16SC2 || m1type == CV_32F || m1type <=0 );
map1.create( size, m1type <= 0 ? CV_16SC2 : m1type );
map2.create( size, map1.type() == CV_16SC2 ? CV_16UC1 : CV_32F );
CV_Assert((K.depth() == CV_32F || K.depth() == CV_64F) && (D.depth() == CV_32F || D.depth() == CV_64F));
CV_Assert((P.empty() || P.depth() == CV_32F || P.depth() == CV_64F) && (R.empty() || R.depth() == CV_32F || R.depth() == CV_64F));
CV_Assert(K.size() == Size(3, 3) && (D.empty() || D.total() == 4));
CV_Assert(R.empty() || R.size() == Size(3, 3) || R.total() * R.channels() == 3);
CV_Assert(P.empty() || P.size() == Size(3, 3) || P.size() == Size(4, 3));
Vec2d f, c;
if (K.depth() == CV_32F)
{
Matx33f camMat = K.getMat();
f = Vec2f(camMat(0, 0), camMat(1, 1));
c = Vec2f(camMat(0, 2), camMat(1, 2));
}
else
{
Matx33d camMat = K.getMat();
f = Vec2d(camMat(0, 0), camMat(1, 1));
c = Vec2d(camMat(0, 2), camMat(1, 2));
}
Vec4d k = Vec4d::all(0);
if (!D.empty())
k = D.depth() == CV_32F ? (Vec4d)*D.getMat().ptr<Vec4f>(): *D.getMat().ptr<Vec4d>();
Matx33d RR = Matx33d::eye();
if (!R.empty() && R.total() * R.channels() == 3)
{
Vec3d rvec;
R.getMat().convertTo(rvec, CV_64F);
RR = Affine3d(rvec).rotation();
}
else if (!R.empty() && R.size() == Size(3, 3))
R.getMat().convertTo(RR, CV_64F);
Matx33d PP = Matx33d::eye();
if (!P.empty())
P.getMat().colRange(0, 3).convertTo(PP, CV_64F);
Matx33d iR = (PP * RR).inv(cv::DECOMP_SVD);
for( int i = 0; i < size.height; ++i)
{
float* m1f = map1.getMat().ptr<float>(i);
float* m2f = map2.getMat().ptr<float>(i);
short* m1 = (short*)m1f;
ushort* m2 = (ushort*)m2f;
double _x = i*iR(0, 1) + iR(0, 2),
_y = i*iR(1, 1) + iR(1, 2),
_w = i*iR(2, 1) + iR(2, 2);
for( int j = 0; j < size.width; ++j)
{
double u, v;
if( _w <= 0)
{
u = (_x > 0) ? -std::numeric_limits<double>::infinity() : std::numeric_limits<double>::infinity();
v = (_y > 0) ? -std::numeric_limits<double>::infinity() : std::numeric_limits<double>::infinity();
}
else
{
double x = _x/_w, y = _y/_w;
double r = sqrt(x*x + y*y);
double theta = std::atan(r);
double theta2 = theta*theta, theta4 = theta2*theta2, theta6 = theta4*theta2, theta8 = theta4*theta4;
double theta_d = theta * (1 + k[0]*theta2 + k[1]*theta4 + k[2]*theta6 + k[3]*theta8);
double scale = (r == 0) ? 1.0 : theta_d / r;
u = f[0]*x*scale + c[0];
v = f[1]*y*scale + c[1];
}
if( m1type == CV_16SC2 )
{
int iu = cv::saturate_cast<int>(u*cv::INTER_TAB_SIZE);
int iv = cv::saturate_cast<int>(v*cv::INTER_TAB_SIZE);
m1[j*2+0] = (short)(iu >> cv::INTER_BITS);
m1[j*2+1] = (short)(iv >> cv::INTER_BITS);
m2[j] = (ushort)((iv & (cv::INTER_TAB_SIZE-1))*cv::INTER_TAB_SIZE + (iu & (cv::INTER_TAB_SIZE-1)));
}
else if( m1type == CV_32FC1 )
{
m1f[j] = (float)u;
m2f[j] = (float)v;
}
_x += iR(0, 0);
_y += iR(1, 0);
_w += iR(2, 0);
}
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// cv::fisheye::undistortImage
void cv::fisheye::undistortImage(InputArray distorted, OutputArray undistorted,
InputArray K, InputArray D, InputArray Knew, const Size& new_size)
{
CV_INSTRUMENT_REGION();
Size size = !new_size.empty() ? new_size : distorted.size();
Mat map1, map2;
fisheye::initUndistortRectifyMap(K, D, Matx33d::eye(), Knew, size, CV_16SC2, map1, map2 );
cv::remap(distorted, undistorted, map1, map2, INTER_LINEAR, BORDER_CONSTANT);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// cv::fisheye::estimateNewCameraMatrixForUndistortRectify
void cv::fisheye::estimateNewCameraMatrixForUndistortRectify(InputArray K, InputArray D, const Size &image_size, InputArray R,
OutputArray P, double balance, const Size& new_size, double fov_scale)
{
CV_INSTRUMENT_REGION();
CV_Assert( K.size() == Size(3, 3) && (K.depth() == CV_32F || K.depth() == CV_64F));
CV_Assert(D.empty() || ((D.total() == 4) && (D.depth() == CV_32F || D.depth() == CV_64F)));
int w = image_size.width, h = image_size.height;
balance = std::min(std::max(balance, 0.0), 1.0);
Mat points(1, 4, CV_64FC2);
Vec2d* pptr = points.ptr<Vec2d>();
pptr[0] = Vec2d(w/2, 0);
pptr[1] = Vec2d(w, h/2);
pptr[2] = Vec2d(w/2, h);
pptr[3] = Vec2d(0, h/2);
fisheye::undistortPoints(points, points, K, D, R);
cv::Scalar center_mass = mean(points);
Vec2d cn(center_mass.val);
double aspect_ratio = (K.depth() == CV_32F) ? K.getMat().at<float >(0,0)/K.getMat().at<float> (1,1)
: K.getMat().at<double>(0,0)/K.getMat().at<double>(1,1);
// convert to identity ratio
cn[1] *= aspect_ratio;
for(size_t i = 0; i < points.total(); ++i)
pptr[i][1] *= aspect_ratio;
double minx = DBL_MAX, miny = DBL_MAX, maxx = -DBL_MAX, maxy = -DBL_MAX;
for(size_t i = 0; i < points.total(); ++i)
{
miny = std::min(miny, pptr[i][1]);
maxy = std::max(maxy, pptr[i][1]);
minx = std::min(minx, pptr[i][0]);
maxx = std::max(maxx, pptr[i][0]);
}
double f1 = w * 0.5/(cn[0] - minx);
double f2 = w * 0.5/(maxx - cn[0]);
double f3 = h * 0.5 * aspect_ratio/(cn[1] - miny);
double f4 = h * 0.5 * aspect_ratio/(maxy - cn[1]);
double fmin = std::min(f1, std::min(f2, std::min(f3, f4)));
double fmax = std::max(f1, std::max(f2, std::max(f3, f4)));
double f = balance * fmin + (1.0 - balance) * fmax;
f *= fov_scale > 0 ? 1.0/fov_scale : 1.0;
Vec2d new_f(f, f), new_c = -cn * f + Vec2d(w, h * aspect_ratio) * 0.5;
// restore aspect ratio
new_f[1] /= aspect_ratio;
new_c[1] /= aspect_ratio;
if (!new_size.empty())
{
double rx = new_size.width /(double)image_size.width;
double ry = new_size.height/(double)image_size.height;
new_f[0] *= rx; new_f[1] *= ry;
new_c[0] *= rx; new_c[1] *= ry;
}
Mat(Matx33d(new_f[0], 0, new_c[0],
0, new_f[1], new_c[1],
0, 0, 1)).convertTo(P, P.empty() ? K.type() : P.type());
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// cv::fisheye::stereoRectify
void cv::fisheye::stereoRectify( InputArray K1, InputArray D1, InputArray K2, InputArray D2, const Size& imageSize,
InputArray _R, InputArray _tvec, OutputArray R1, OutputArray R2, OutputArray P1, OutputArray P2,
OutputArray Q, int flags, const Size& newImageSize, double balance, double fov_scale)
{
CV_INSTRUMENT_REGION();
CV_Assert((_R.size() == Size(3, 3) || _R.total() * _R.channels() == 3) && (_R.depth() == CV_32F || _R.depth() == CV_64F));
CV_Assert(_tvec.total() * _tvec.channels() == 3 && (_tvec.depth() == CV_32F || _tvec.depth() == CV_64F));
Mat aaa = _tvec.getMat().reshape(3, 1);
Vec3d rvec; // Rodrigues vector
if (_R.size() == Size(3, 3))
{
Matx33d rmat;
_R.getMat().convertTo(rmat, CV_64F);
rvec = Affine3d(rmat).rvec();
}
else if (_R.total() * _R.channels() == 3)
_R.getMat().convertTo(rvec, CV_64F);
Vec3d tvec;
_tvec.getMat().convertTo(tvec, CV_64F);
// rectification algorithm
rvec *= -0.5; // get average rotation
Matx33d r_r;
Rodrigues(rvec, r_r); // rotate cameras to same orientation by averaging
Vec3d t = r_r * tvec;
Vec3d uu(t[0] > 0 ? 1 : -1, 0, 0);
// calculate global Z rotation
Vec3d ww = t.cross(uu);
double nw = norm(ww);
if (nw > 0.0)
ww *= std::acos(fabs(t[0])/cv::norm(t))/nw;
Matx33d wr;
Rodrigues(ww, wr);
// apply to both views
Matx33d ri1 = wr * r_r.t();
Mat(ri1, false).convertTo(R1, R1.empty() ? CV_64F : R1.type());
Matx33d ri2 = wr * r_r;
Mat(ri2, false).convertTo(R2, R2.empty() ? CV_64F : R2.type());
Vec3d tnew = ri2 * tvec;
// calculate projection/camera matrices. these contain the relevant rectified image internal params (fx, fy=fx, cx, cy)
Matx33d newK1, newK2;
estimateNewCameraMatrixForUndistortRectify(K1, D1, imageSize, R1, newK1, balance, newImageSize, fov_scale);
estimateNewCameraMatrixForUndistortRectify(K2, D2, imageSize, R2, newK2, balance, newImageSize, fov_scale);
double fc_new = std::min(newK1(1,1), newK2(1,1));
Point2d cc_new[2] = { Vec2d(newK1(0, 2), newK1(1, 2)), Vec2d(newK2(0, 2), newK2(1, 2)) };
// Vertical focal length must be the same for both images to keep the epipolar constraint use fy for fx also.
// For simplicity, set the principal points for both cameras to be the average
// of the two principal points (either one of or both x- and y- coordinates)
if( flags & CALIB_ZERO_DISPARITY )
cc_new[0] = cc_new[1] = (cc_new[0] + cc_new[1]) * 0.5;
else
cc_new[0].y = cc_new[1].y = (cc_new[0].y + cc_new[1].y)*0.5;
Mat(Matx34d(fc_new, 0, cc_new[0].x, 0,
0, fc_new, cc_new[0].y, 0,
0, 0, 1, 0), false).convertTo(P1, P1.empty() ? CV_64F : P1.type());
Mat(Matx34d(fc_new, 0, cc_new[1].x, tnew[0]*fc_new, // baseline * focal length;,
0, fc_new, cc_new[1].y, 0,
0, 0, 1, 0), false).convertTo(P2, P2.empty() ? CV_64F : P2.type());
if (Q.needed())
Mat(Matx44d(1, 0, 0, -cc_new[0].x,
0, 1, 0, -cc_new[0].y,
0, 0, 0, fc_new,
0, 0, -1./tnew[0], (cc_new[0].x - cc_new[1].x)/tnew[0]), false).convertTo(Q, Q.empty() ? CV_64F : Q.depth());
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// cv::fisheye::calibrate /// cv::fisheye::calibrate

View File

@ -1226,10 +1226,6 @@ public:
~CV_StereoCalibrationTest(); ~CV_StereoCalibrationTest();
void clear(); void clear();
protected: protected:
bool checkPandROI( int test_case_idx,
const Mat& M, const Mat& D, const Mat& R,
const Mat& P, Size imgsize, Rect roi );
// covers of tested functions // covers of tested functions
virtual double calibrateStereoCamera( const vector<vector<Point3f> >& objectPoints, virtual double calibrateStereoCamera( const vector<vector<Point3f> >& objectPoints,
const vector<vector<Point2f> >& imagePoints1, const vector<vector<Point2f> >& imagePoints1,
@ -1278,52 +1274,6 @@ void CV_StereoCalibrationTest::clear()
cvtest::BaseTest::clear(); cvtest::BaseTest::clear();
} }
bool CV_StereoCalibrationTest::checkPandROI( int test_case_idx, const Mat& M, const Mat& D, const Mat& R,
const Mat& P, Size imgsize, Rect roi )
{
const double eps = 0.05;
const int N = 21;
int x, y, k;
vector<Point2f> pts, upts;
// step 1. check that all the original points belong to the destination image
for( y = 0; y < N; y++ )
for( x = 0; x < N; x++ )
pts.push_back(Point2f((float)x*imgsize.width/(N-1), (float)y*imgsize.height/(N-1)));
undistortPoints(pts, upts, M, D, R, P );
for( k = 0; k < N*N; k++ )
if( upts[k].x < -imgsize.width*eps || upts[k].x > imgsize.width*(1+eps) ||
upts[k].y < -imgsize.height*eps || upts[k].y > imgsize.height*(1+eps) )
{
ts->printf(cvtest::TS::LOG, "Test #%d. The point (%g, %g) was mapped to (%g, %g) which is out of image\n",
test_case_idx, pts[k].x, pts[k].y, upts[k].x, upts[k].y);
return false;
}
// step 2. check that all the points inside ROI belong to the original source image
Mat temp(imgsize, CV_8U), utemp, map1, map2;
temp = Scalar::all(1);
initUndistortRectifyMap(M, D, R, P, imgsize, CV_16SC2, map1, map2);
remap(temp, utemp, map1, map2, INTER_LINEAR);
if(roi.x < 0 || roi.y < 0 || roi.x + roi.width > imgsize.width || roi.y + roi.height > imgsize.height)
{
ts->printf(cvtest::TS::LOG, "Test #%d. The ROI=(%d, %d, %d, %d) is outside of the imge rectangle\n",
test_case_idx, roi.x, roi.y, roi.width, roi.height);
return false;
}
double s = sum(utemp(roi))[0];
if( s > roi.area() || roi.area() - s > roi.area()*(1-eps) )
{
ts->printf(cvtest::TS::LOG, "Test #%d. The ratio of black pixels inside the valid ROI (~%g%%) is too large\n",
test_case_idx, s*100./roi.area());
return false;
}
return true;
}
int CV_StereoCalibrationTest::compare(double* val, double* ref_val, int len, int CV_StereoCalibrationTest::compare(double* val, double* ref_val, int len,
double eps, const char* param_name ) double eps, const char* param_name )
{ {
@ -1529,38 +1479,6 @@ void CV_StereoCalibrationTest::run( int )
Mat R1, R2, P1, P2, Q; Mat R1, R2, P1, P2, Q;
Rect roi1, roi2; Rect roi1, roi2;
rectify(M1, D1, M2, D2, imgsize, R, T, R1, R2, P1, P2, Q, 1, imgsize, &roi1, &roi2, 0); rectify(M1, D1, M2, D2, imgsize, R, T, R1, R2, P1, P2, Q, 1, imgsize, &roi1, &roi2, 0);
Mat eye33 = Mat::eye(3,3,CV_64F);
Mat R1t = R1.t(), R2t = R2.t();
if( cvtest::norm(R1t*R1 - eye33, NORM_L2) > 0.01 ||
cvtest::norm(R2t*R2 - eye33, NORM_L2) > 0.01 ||
abs(determinant(F)) > 0.01)
{
ts->printf( cvtest::TS::LOG, "The computed (by rectify) R1 and R2 are not orthogonal,"
"or the computed (by calibrate) F is not singular, testcase %d\n", testcase);
ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_OUTPUT );
return;
}
if(!checkPandROI(testcase, M1, D1, R1, P1, imgsize, roi1))
{
ts->set_failed_test_info( cvtest::TS::FAIL_BAD_ACCURACY );
return;
}
if(!checkPandROI(testcase, M2, D2, R2, P2, imgsize, roi2))
{
ts->set_failed_test_info( cvtest::TS::FAIL_BAD_ACCURACY );
return;
}
//check that Tx after rectification is equal to distance between cameras
double tx = fabs(P2.at<double>(0, 3) / P2.at<double>(0, 0));
if (fabs(tx - cvtest::norm(T, NORM_L2)) > 1e-5)
{
ts->set_failed_test_info( cvtest::TS::FAIL_BAD_ACCURACY );
return;
}
//check that Q reprojects points before the camera //check that Q reprojects points before the camera
double testPoint[4] = {0.0, 0.0, 100.0, 1.0}; double testPoint[4] = {0.0, 0.0, 100.0, 1.0};
@ -1826,7 +1744,7 @@ void CV_StereoCalibrationTest_CPP::rectify( const Mat& cameraMatrix1, const Mat&
Rect* validPixROI1, Rect* validPixROI2, int flags ) Rect* validPixROI1, Rect* validPixROI2, int flags )
{ {
stereoRectify( cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2, stereoRectify( cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2,
imageSize, R, T, R1, R2, P1, P2, Q, flags, alpha, newImageSize,validPixROI1, validPixROI2 ); imageSize, R, T, R1, R2, P1, P2, Q, flags, alpha, newImageSize, validPixROI1, validPixROI2 );
} }
bool CV_StereoCalibrationTest_CPP::rectifyUncalibrated( const Mat& points1, bool CV_StereoCalibrationTest_CPP::rectifyUncalibrated( const Mat& points1,
@ -2223,82 +2141,6 @@ TEST(Calib3d_StereoCalibrate_CPP, extended)
EXPECT_TRUE(err.total() == 2); EXPECT_TRUE(err.total() == 2);
} }
TEST(Calib3d_StereoCalibrate, regression_10791)
{
const Matx33d M1(
853.1387981631528, 0, 704.154907802121,
0, 853.6445089162528, 520.3600712930319,
0, 0, 1
);
const Matx33d M2(
848.6090216909176, 0, 701.6162856852185,
0, 849.7040162357157, 509.1864036137,
0, 0, 1
);
const Matx<double, 14, 1> D1(-6.463598629567206, 79.00104930508179, -0.0001006144444464403, -0.0005437499822299972,
12.56900616588467, -6.056719942752855, 76.3842481414836, 45.57460250612659,
0, 0, 0, 0, 0, 0);
const Matx<double, 14, 1> D2(0.6123436439798265, -0.4671756923224087, -0.0001261947899033442, -0.000597334584036978,
-0.05660119809538371, 1.037075740629769, -0.3076042835831711, -0.2502169324283623,
0, 0, 0, 0, 0, 0);
const Matx33d R(
0.9999926627018476, -0.0001095586963765905, 0.003829169539302921,
0.0001021735876758584, 0.9999981346680941, 0.0019287874145156,
-0.003829373712065528, -0.001928382022437616, 0.9999908085776333
);
const Matx31d T(-58.9161771697128, -0.01581306249996402, -0.8492960216760961);
const Size imageSize(1280, 960);
Mat R1, R2, P1, P2, Q;
Rect roi1, roi2;
stereoRectify(M1, D1, M2, D2, imageSize, R, T,
R1, R2, P1, P2, Q,
CALIB_ZERO_DISPARITY, 1, imageSize, &roi1, &roi2);
EXPECT_GE(roi1.area(), 400*300) << roi1;
EXPECT_GE(roi2.area(), 400*300) << roi2;
}
TEST(Calib3d_StereoCalibrate, regression_11131)
{
const Matx33d M1(
1457.572438721727, 0, 1212.945694211622,
0, 1457.522226502963, 1007.32058848921,
0, 0, 1
);
const Matx33d M2(
1460.868570835972, 0, 1215.024068023046,
0, 1460.791367088, 1011.107202932225,
0, 0, 1
);
const Matx<double, 5, 1> D1(0, 0, 0, 0, 0);
const Matx<double, 5, 1> D2(0, 0, 0, 0, 0);
const Matx33d R(
0.9985404059825475, 0.02963547172078553, -0.04515303352041626,
-0.03103795276460111, 0.9990471552537432, -0.03068268351343364,
0.04420071389006859, 0.03203935697372317, 0.9985087763742083
);
const Matx31d T(0.9995500167379527, 0.0116311595111068, 0.02764923448462666);
const Size imageSize(2456, 2058);
Mat R1, R2, P1, P2, Q;
Rect roi1, roi2;
stereoRectify(M1, D1, M2, D2, imageSize, R, T,
R1, R2, P1, P2, Q,
CALIB_ZERO_DISPARITY, 1, imageSize, &roi1, &roi2);
EXPECT_GT(P1.at<double>(0, 0), 0);
EXPECT_GT(P2.at<double>(0, 0), 0);
EXPECT_GT(R1.at<double>(0, 0), 0);
EXPECT_GT(R2.at<double>(0, 0), 0);
EXPECT_GE(roi1.area(), 400*300) << roi1;
EXPECT_GE(roi2.area(), 400*300) << roi2;
}
TEST(Calib_StereoCalibrate, regression_22421) TEST(Calib_StereoCalibrate, regression_22421)
{ {
cv::Mat K1, K2, dist1, dist2; cv::Mat K1, K2, dist1, dist2;

View File

@ -63,347 +63,37 @@ protected:
protected: protected:
std::string combine(const std::string& _item1, const std::string& _item2); std::string combine(const std::string& _item1, const std::string& _item2);
static void merge4(const cv::Mat& tl, const cv::Mat& tr, const cv::Mat& bl, const cv::Mat& br, cv::Mat& merged);
}; };
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// const cv::Size fisheyeTest::imageSize(1280, 800);
/// TESTS::
TEST_F(fisheyeTest, projectPoints) const cv::Matx33d fisheyeTest::K(558.478087865323, 0, 620.458515360843,
0, 560.506767351568, 381.939424848348,
0, 0, 1);
const cv::Vec4d fisheyeTest::D(-0.0014613319981768, -0.00329861110580401, 0.00605760088590183, -0.00374209380722371);
const cv::Matx33d fisheyeTest::R ( 9.9756700084424932e-01, 6.9698277640183867e-02, 1.4929569991321144e-03,
-6.9711825162322980e-02, 9.9748249845531767e-01, 1.2997180766418455e-02,
-5.8331736398316541e-04,-1.3069635393884985e-02, 9.9991441852366736e-01);
const cv::Vec3d fisheyeTest::T(-9.9217369356044638e-02, 3.1741831972356663e-03, 1.8551007952921010e-04);
std::string fisheyeTest::combine(const std::string& _item1, const std::string& _item2)
{ {
double cols = this->imageSize.width, std::string item1 = _item1, item2 = _item2;
rows = this->imageSize.height; std::replace(item1.begin(), item1.end(), '\\', '/');
std::replace(item2.begin(), item2.end(), '\\', '/');
const int N = 20; if (item1.empty())
cv::Mat distorted0(1, N*N, CV_64FC2), undist1, undist2, distorted1, distorted2; return item2;
undist2.create(distorted0.size(), CV_MAKETYPE(distorted0.depth(), 3));
cv::Vec2d* pts = distorted0.ptr<cv::Vec2d>();
cv::Vec2d c(this->K(0, 2), this->K(1, 2)); if (item2.empty())
for(int y = 0, k = 0; y < N; ++y) return item1;
for(int x = 0; x < N; ++x)
{
cv::Vec2d point(x*cols/(N-1.f), y*rows/(N-1.f));
pts[k++] = (point - c) * 0.85 + c;
}
cv::fisheye::undistortPoints(distorted0, undist1, this->K, this->D); char last = item1[item1.size()-1];
return item1 + (last != '/' ? "/" : "") + item2;
cv::Vec2d* u1 = undist1.ptr<cv::Vec2d>();
cv::Vec3d* u2 = undist2.ptr<cv::Vec3d>();
for(int i = 0; i < (int)distorted0.total(); ++i)
u2[i] = cv::Vec3d(u1[i][0], u1[i][1], 1.0);
cv::fisheye::distortPoints(undist1, distorted1, this->K, this->D);
cv::fisheye::projectPoints(undist2, distorted2, cv::Vec3d::all(0), cv::Vec3d::all(0), this->K, this->D);
EXPECT_MAT_NEAR(distorted0, distorted1, 1e-10);
EXPECT_MAT_NEAR(distorted0, distorted2, 1e-10);
}
TEST_F(fisheyeTest, distortUndistortPoints)
{
int width = imageSize.width;
int height = imageSize.height;
/* Create test points */
std::vector<cv::Point2d> points0Vector;
cv::Mat principalPoints = (cv::Mat_<double>(5, 2) << K(0, 2), K(1, 2), // (cx, cy)
/* Image corners */
0, 0,
0, height,
width, 0,
width, height
);
/* Random points inside image */
cv::Mat xy[2] = {};
xy[0].create(100, 1, CV_64F);
theRNG().fill(xy[0], cv::RNG::UNIFORM, 0, width); // x
xy[1].create(100, 1, CV_64F);
theRNG().fill(xy[1], cv::RNG::UNIFORM, 0, height); // y
cv::Mat randomPoints;
merge(xy, 2, randomPoints);
cv::Mat points0;
cv::vconcat(principalPoints.reshape(2), randomPoints, points0);
/* Test with random D set */
for (size_t i = 0; i < 10; ++i) {
cv::Mat distortion(1, 4, CV_64F);
theRNG().fill(distortion, cv::RNG::UNIFORM, -0.00001, 0.00001);
/* Distort -> Undistort */
cv::Mat distortedPoints;
cv::fisheye::distortPoints(points0, distortedPoints, K, distortion);
cv::Mat undistortedPoints;
cv::fisheye::undistortPoints(distortedPoints, undistortedPoints, K, distortion);
EXPECT_MAT_NEAR(points0, undistortedPoints, 1e-8);
/* Undistort -> Distort */
cv::fisheye::undistortPoints(points0, undistortedPoints, K, distortion);
cv::fisheye::distortPoints(undistortedPoints, distortedPoints, K, distortion);
EXPECT_MAT_NEAR(points0, distortedPoints, 1e-8);
}
}
TEST_F(fisheyeTest, undistortImage)
{
// we use it to reduce patch size for images in testdata
auto throwAwayHalf = [](Mat img)
{
int whalf = img.cols / 2, hhalf = img.rows / 2;
Rect tl(0, 0, whalf, hhalf), br(whalf, hhalf, whalf, hhalf);
img(tl) = 0;
img(br) = 0;
};
cv::Matx33d theK = this->K;
cv::Mat theD = cv::Mat(this->D);
std::string file = combine(datasets_repository_path, "/calib-3_stereo_from_JY/left/stereo_pair_014.jpg");
cv::Matx33d newK = theK;
cv::Mat distorted = cv::imread(file), undistorted;
{
newK(0, 0) = 100;
newK(1, 1) = 100;
cv::fisheye::undistortImage(distorted, undistorted, theK, theD, newK);
std::string imageFilename = combine(datasets_repository_path, "new_f_100.png");
cv::Mat correct = cv::imread(imageFilename);
ASSERT_FALSE(correct.empty()) << "Correct image " << imageFilename.c_str() << " can not be read" << std::endl;
throwAwayHalf(correct);
throwAwayHalf(undistorted);
EXPECT_MAT_NEAR(correct, undistorted, 1e-10);
}
{
double balance = 1.0;
cv::fisheye::estimateNewCameraMatrixForUndistortRectify(theK, theD, distorted.size(), cv::noArray(), newK, balance);
cv::fisheye::undistortImage(distorted, undistorted, theK, theD, newK);
std::string imageFilename = combine(datasets_repository_path, "balance_1.0.png");
cv::Mat correct = cv::imread(imageFilename);
ASSERT_FALSE(correct.empty()) << "Correct image " << imageFilename.c_str() << " can not be read" << std::endl;
throwAwayHalf(correct);
throwAwayHalf(undistorted);
EXPECT_MAT_NEAR(correct, undistorted, 1e-10);
}
{
double balance = 0.0;
cv::fisheye::estimateNewCameraMatrixForUndistortRectify(theK, theD, distorted.size(), cv::noArray(), newK, balance);
cv::fisheye::undistortImage(distorted, undistorted, theK, theD, newK);
std::string imageFilename = combine(datasets_repository_path, "balance_0.0.png");
cv::Mat correct = cv::imread(imageFilename);
ASSERT_FALSE(correct.empty()) << "Correct image " << imageFilename.c_str() << " can not be read" << std::endl;
throwAwayHalf(correct);
throwAwayHalf(undistorted);
EXPECT_MAT_NEAR(correct, undistorted, 1e-10);
}
}
TEST_F(fisheyeTest, undistortAndDistortImage)
{
cv::Matx33d K_src = this->K;
cv::Mat D_src = cv::Mat(this->D);
std::string file = combine(datasets_repository_path, "/calib-3_stereo_from_JY/left/stereo_pair_014.jpg");
cv::Matx33d K_dst = K_src;
cv::Mat image = cv::imread(file), image_projected;
cv::Vec4d D_dst_vec (-1.0, 0.0, 0.0, 0.0);
cv::Mat D_dst = cv::Mat(D_dst_vec);
int imageWidth = (int)this->imageSize.width;
int imageHeight = (int)this->imageSize.height;
cv::Mat imagePoints(imageHeight, imageWidth, CV_32FC2), undPoints, distPoints;
cv::Vec2f* pts = imagePoints.ptr<cv::Vec2f>();
for(int y = 0, k = 0; y < imageHeight; ++y)
{
for(int x = 0; x < imageWidth; ++x)
{
cv::Vec2f point((float)x, (float)y);
pts[k++] = point;
}
}
cv::fisheye::undistortPoints(imagePoints, undPoints, K_dst, D_dst);
cv::fisheye::distortPoints(undPoints, distPoints, K_src, D_src);
cv::remap(image, image_projected, distPoints, cv::noArray(), cv::INTER_LINEAR);
float dx, dy, r_sq;
float R_MAX = 250;
float imageCenterX = (float)imageWidth / 2;
float imageCenterY = (float)imageHeight / 2;
cv::Mat undPointsGt(imageHeight, imageWidth, CV_32FC2);
cv::Mat imageGt(imageHeight, imageWidth, CV_8UC3);
for(int y = 0; y < imageHeight; ++y)
{
for(int x = 0; x < imageWidth; ++x)
{
dx = x - imageCenterX;
dy = y - imageCenterY;
r_sq = dy * dy + dx * dx;
Vec2f & und_vec = undPoints.at<Vec2f>(y,x);
Vec3b & pixel = image_projected.at<Vec3b>(y,x);
Vec2f & undist_vec_gt = undPointsGt.at<Vec2f>(y,x);
Vec3b & pixel_gt = imageGt.at<Vec3b>(y,x);
if (r_sq > R_MAX * R_MAX)
{
undist_vec_gt[0] = -1e6;
undist_vec_gt[1] = -1e6;
pixel_gt[0] = 0;
pixel_gt[1] = 0;
pixel_gt[2] = 0;
}
else
{
undist_vec_gt[0] = und_vec[0];
undist_vec_gt[1] = und_vec[1];
pixel_gt[0] = pixel[0];
pixel_gt[1] = pixel[1];
pixel_gt[2] = pixel[2];
}
}
}
EXPECT_MAT_NEAR(undPoints, undPointsGt, 1e-10);
EXPECT_MAT_NEAR(image_projected, imageGt, 1e-10);
Vec2f dist_point_1 = distPoints.at<Vec2f>(400, 640);
Vec2f dist_point_1_gt(640.044f, 400.041f);
Vec2f dist_point_2 = distPoints.at<Vec2f>(400, 440);
Vec2f dist_point_2_gt(409.731f, 403.029f);
Vec2f dist_point_3 = distPoints.at<Vec2f>(200, 640);
Vec2f dist_point_3_gt(643.341f, 168.896f);
Vec2f dist_point_4 = distPoints.at<Vec2f>(300, 480);
Vec2f dist_point_4_gt(463.402f, 290.317f);
Vec2f dist_point_5 = distPoints.at<Vec2f>(550, 750);
Vec2f dist_point_5_gt(797.51f, 611.637f);
EXPECT_MAT_NEAR(dist_point_1, dist_point_1_gt, 1e-2);
EXPECT_MAT_NEAR(dist_point_2, dist_point_2_gt, 1e-2);
EXPECT_MAT_NEAR(dist_point_3, dist_point_3_gt, 1e-2);
EXPECT_MAT_NEAR(dist_point_4, dist_point_4_gt, 1e-2);
EXPECT_MAT_NEAR(dist_point_5, dist_point_5_gt, 1e-2);
// Add the "--test_debug" to arguments for file output
if (cvtest::debugLevel > 0)
cv::imwrite(combine(datasets_repository_path, "new_distortion.png"), image_projected);
}
TEST_F(fisheyeTest, jacobians)
{
int n = 10;
cv::Mat X(1, n, CV_64FC3);
cv::Mat om(3, 1, CV_64F), theT(3, 1, CV_64F);
cv::Mat f(2, 1, CV_64F), c(2, 1, CV_64F);
cv::Mat k(4, 1, CV_64F);
double alpha;
cv::RNG r;
r.fill(X, cv::RNG::NORMAL, 2, 1);
X = cv::abs(X) * 10;
r.fill(om, cv::RNG::NORMAL, 0, 1);
om = cv::abs(om);
r.fill(theT, cv::RNG::NORMAL, 0, 1);
theT = cv::abs(theT); theT.at<double>(2) = 4; theT *= 10;
r.fill(f, cv::RNG::NORMAL, 0, 1);
f = cv::abs(f) * 1000;
r.fill(c, cv::RNG::NORMAL, 0, 1);
c = cv::abs(c) * 1000;
r.fill(k, cv::RNG::NORMAL, 0, 1);
k*= 0.5;
alpha = 0.01*r.gaussian(1);
cv::Mat x1, x2, xpred;
cv::Matx33d theK(f.at<double>(0), alpha * f.at<double>(0), c.at<double>(0),
0, f.at<double>(1), c.at<double>(1),
0, 0, 1);
cv::Mat jacobians;
cv::fisheye::projectPoints(X, x1, om, theT, theK, k, alpha, jacobians);
//test on T:
cv::Mat dT(3, 1, CV_64FC1);
r.fill(dT, cv::RNG::NORMAL, 0, 1);
dT *= 1e-9*cv::norm(theT);
cv::Mat T2 = theT + dT;
cv::fisheye::projectPoints(X, x2, om, T2, theK, k, alpha, cv::noArray());
xpred = x1 + cv::Mat(jacobians.colRange(11,14) * dT).reshape(2, 1);
CV_Assert (cv::norm(x2 - xpred) < 1e-10);
//test on om:
cv::Mat dom(3, 1, CV_64FC1);
r.fill(dom, cv::RNG::NORMAL, 0, 1);
dom *= 1e-9*cv::norm(om);
cv::Mat om2 = om + dom;
cv::fisheye::projectPoints(X, x2, om2, theT, theK, k, alpha, cv::noArray());
xpred = x1 + cv::Mat(jacobians.colRange(8,11) * dom).reshape(2, 1);
CV_Assert (cv::norm(x2 - xpred) < 1e-10);
//test on f:
cv::Mat df(2, 1, CV_64FC1);
r.fill(df, cv::RNG::NORMAL, 0, 1);
df *= 1e-9*cv::norm(f);
cv::Matx33d K2 = theK + cv::Matx33d(df.at<double>(0), df.at<double>(0) * alpha, 0, 0, df.at<double>(1), 0, 0, 0, 0);
cv::fisheye::projectPoints(X, x2, om, theT, K2, k, alpha, cv::noArray());
xpred = x1 + cv::Mat(jacobians.colRange(0,2) * df).reshape(2, 1);
CV_Assert (cv::norm(x2 - xpred) < 1e-10);
//test on c:
cv::Mat dc(2, 1, CV_64FC1);
r.fill(dc, cv::RNG::NORMAL, 0, 1);
dc *= 1e-9*cv::norm(c);
K2 = theK + cv::Matx33d(0, 0, dc.at<double>(0), 0, 0, dc.at<double>(1), 0, 0, 0);
cv::fisheye::projectPoints(X, x2, om, theT, K2, k, alpha, cv::noArray());
xpred = x1 + cv::Mat(jacobians.colRange(2,4) * dc).reshape(2, 1);
CV_Assert (cv::norm(x2 - xpred) < 1e-10);
//test on k:
cv::Mat dk(4, 1, CV_64FC1);
r.fill(dk, cv::RNG::NORMAL, 0, 1);
dk *= 1e-9*cv::norm(k);
cv::Mat k2 = k + dk;
cv::fisheye::projectPoints(X, x2, om, theT, theK, k2, alpha, cv::noArray());
xpred = x1 + cv::Mat(jacobians.colRange(4,8) * dk).reshape(2, 1);
CV_Assert (cv::norm(x2 - xpred) < 1e-10);
//test on alpha:
cv::Mat dalpha(1, 1, CV_64FC1);
r.fill(dalpha, cv::RNG::NORMAL, 0, 1);
dalpha *= 1e-9*cv::norm(f);
double alpha2 = alpha + dalpha.at<double>(0);
K2 = theK + cv::Matx33d(0, f.at<double>(0) * dalpha.at<double>(0), 0, 0, 0, 0, 0, 0, 0);
cv::fisheye::projectPoints(X, x2, om, theT, theK, k, alpha2, cv::noArray());
xpred = x1 + cv::Mat(jacobians.col(14) * dalpha).reshape(2, 1);
CV_Assert (cv::norm(x2 - xpred) < 1e-10);
} }
TEST_F(fisheyeTest, Calibration) TEST_F(fisheyeTest, Calibration)
@ -605,113 +295,6 @@ TEST_F(fisheyeTest, EstimateUncertainties)
CV_Assert(errors.alpha == 0); CV_Assert(errors.alpha == 0);
} }
TEST_F(fisheyeTest, stereoRectify)
{
// For consistency purposes
CV_StaticAssert(
static_cast<int>(cv::CALIB_ZERO_DISPARITY) == static_cast<int>(cv::CALIB_ZERO_DISPARITY),
"For the purpose of continuity the following should be true: cv::CALIB_ZERO_DISPARITY == cv::CALIB_ZERO_DISPARITY"
);
const std::string folder = combine(datasets_repository_path, "calib-3_stereo_from_JY");
cv::Size calibration_size = this->imageSize, requested_size = calibration_size;
cv::Matx33d K1 = this->K, K2 = K1;
cv::Mat D1 = cv::Mat(this->D), D2 = D1;
cv::Vec3d theT = this->T;
cv::Matx33d theR = this->R;
double balance = 0.0, fov_scale = 1.1;
cv::Mat R1, R2, P1, P2, Q;
cv::fisheye::stereoRectify(K1, D1, K2, D2, calibration_size, theR, theT, R1, R2, P1, P2, Q,
cv::CALIB_ZERO_DISPARITY, requested_size, balance, fov_scale);
// Collected with these CMake flags: -DWITH_IPP=OFF -DCV_ENABLE_INTRINSICS=OFF -DCV_DISABLE_OPTIMIZATION=ON -DCMAKE_BUILD_TYPE=Debug
cv::Matx33d R1_ref(
0.9992853269091279, 0.03779164101000276, -0.0007920188690205426,
-0.03778569762983931, 0.9992646472015868, 0.006511981857667881,
0.001037534936357442, -0.006477400933964018, 0.9999784831677112
);
cv::Matx33d R2_ref(
0.9994868963898833, -0.03197579751378937, -0.001868774538573449,
0.03196298186616116, 0.9994677442608699, -0.0065265589947392,
0.002076471801477729, 0.006463478587068991, 0.9999769555891836
);
cv::Matx34d P1_ref(
420.9684016542647, 0, 586.3059567784627, 0,
0, 420.9684016542647, 374.8571836462291, 0,
0, 0, 1, 0
);
cv::Matx34d P2_ref(
420.9684016542647, 0, 586.3059567784627, -41.78881938824554,
0, 420.9684016542647, 374.8571836462291, 0,
0, 0, 1, 0
);
cv::Matx44d Q_ref(
1, 0, 0, -586.3059567784627,
0, 1, 0, -374.8571836462291,
0, 0, 0, 420.9684016542647,
0, 0, 10.07370889670733, -0
);
const double eps = 1e-10;
EXPECT_MAT_NEAR(R1_ref, R1, eps);
EXPECT_MAT_NEAR(R2_ref, R2, eps);
EXPECT_MAT_NEAR(P1_ref, P1, eps);
EXPECT_MAT_NEAR(P2_ref, P2, eps);
EXPECT_MAT_NEAR(Q_ref, Q, eps);
if (::testing::Test::HasFailure())
{
std::cout << "Actual values are:" << std::endl
<< "R1 =" << std::endl << R1 << std::endl
<< "R2 =" << std::endl << R2 << std::endl
<< "P1 =" << std::endl << P1 << std::endl
<< "P2 =" << std::endl << P2 << std::endl
<< "Q =" << std::endl << Q << std::endl;
}
if (cvtest::debugLevel == 0)
return;
// DEBUG code is below
cv::Mat lmapx, lmapy, rmapx, rmapy;
//rewrite for fisheye
cv::fisheye::initUndistortRectifyMap(K1, D1, R1, P1, requested_size, CV_32F, lmapx, lmapy);
cv::fisheye::initUndistortRectifyMap(K2, D2, R2, P2, requested_size, CV_32F, rmapx, rmapy);
cv::Mat l, r, lundist, rundist;
for (int i = 0; i < 34; ++i)
{
SCOPED_TRACE(cv::format("image %d", i));
l = imread(combine(folder, cv::format("left/stereo_pair_%03d.jpg", i)), cv::IMREAD_COLOR);
r = imread(combine(folder, cv::format("right/stereo_pair_%03d.jpg", i)), cv::IMREAD_COLOR);
ASSERT_FALSE(l.empty());
ASSERT_FALSE(r.empty());
int ndisp = 128;
cv::rectangle(l, cv::Rect(255, 0, 829, l.rows-1), cv::Scalar(0, 0, 255));
cv::rectangle(r, cv::Rect(255, 0, 829, l.rows-1), cv::Scalar(0, 0, 255));
cv::rectangle(r, cv::Rect(255-ndisp, 0, 829+ndisp ,l.rows-1), cv::Scalar(0, 0, 255));
cv::remap(l, lundist, lmapx, lmapy, cv::INTER_LINEAR);
cv::remap(r, rundist, rmapx, rmapy, cv::INTER_LINEAR);
for (int ii = 0; ii < lundist.rows; ii += 20)
{
cv::line(lundist, cv::Point(0, ii), cv::Point(lundist.cols, ii), cv::Scalar(0, 255, 0));
cv::line(rundist, cv::Point(0, ii), cv::Point(lundist.cols, ii), cv::Scalar(0, 255, 0));
}
cv::Mat rectification;
merge4(l, r, lundist, rundist, rectification);
// Add the "--test_debug" to arguments for file output
if (cvtest::debugLevel > 0)
cv::imwrite(cv::format("fisheye_rectification_AB_%03d.png", i), rectification);
}
}
TEST_F(fisheyeTest, stereoCalibrate) TEST_F(fisheyeTest, stereoCalibrate)
{ {
const int n_images = 34; const int n_images = 34;
@ -992,53 +575,6 @@ TEST_F(fisheyeTest, stereoCalibrateWithPerViewTransformations)
EXPECT_NEAR(rmsErrorStereoCalib, rmsErrorFromReprojectedImgPts, 1e-4); EXPECT_NEAR(rmsErrorStereoCalib, rmsErrorFromReprojectedImgPts, 1e-4);
} }
TEST_F(fisheyeTest, estimateNewCameraMatrixForUndistortRectify)
{
cv::Size size(1920, 1080);
cv::Mat K_fullhd(3, 3, cv::DataType<double>::type);
K_fullhd.at<double>(0, 0) = 600.44477382;
K_fullhd.at<double>(0, 1) = 0.0;
K_fullhd.at<double>(0, 2) = 992.06425788;
K_fullhd.at<double>(1, 0) = 0.0;
K_fullhd.at<double>(1, 1) = 578.99298055;
K_fullhd.at<double>(1, 2) = 549.26826242;
K_fullhd.at<double>(2, 0) = 0.0;
K_fullhd.at<double>(2, 1) = 0.0;
K_fullhd.at<double>(2, 2) = 1.0;
cv::Mat K_new_truth(3, 3, cv::DataType<double>::type);
K_new_truth.at<double>(0, 0) = 387.5118215642316;
K_new_truth.at<double>(0, 1) = 0.0;
K_new_truth.at<double>(0, 2) = 1033.936556777084;
K_new_truth.at<double>(1, 0) = 0.0;
K_new_truth.at<double>(1, 1) = 373.6673784974842;
K_new_truth.at<double>(1, 2) = 538.794152656429;
K_new_truth.at<double>(2, 0) = 0.0;
K_new_truth.at<double>(2, 1) = 0.0;
K_new_truth.at<double>(2, 2) = 1.0;
cv::Mat D_fullhd(4, 1, cv::DataType<double>::type);
D_fullhd.at<double>(0, 0) = -0.05090103223466704;
D_fullhd.at<double>(1, 0) = 0.030944413642173308;
D_fullhd.at<double>(2, 0) = -0.021509225493198905;
D_fullhd.at<double>(3, 0) = 0.0043378096628297145;
cv::Mat E = cv::Mat::eye(3, 3, cv::DataType<double>::type);
cv::Mat K_new(3, 3, cv::DataType<double>::type);
cv::fisheye::estimateNewCameraMatrixForUndistortRectify(K_fullhd, D_fullhd, size, E, K_new, 0.0, size);
EXPECT_MAT_NEAR(K_new, K_new_truth, 1e-6);
}
TEST_F(fisheyeTest, multiview_calibration) TEST_F(fisheyeTest, multiview_calibration)
{ {
const int n_images = 34; const int n_images = 34;
@ -1114,54 +650,4 @@ TEST_F(fisheyeTest, multiview_calibration)
EXPECT_MAT_NEAR(distortions[1], D2_correct, 5e-2); EXPECT_MAT_NEAR(distortions[1], D2_correct, 5e-2);
} }
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// fisheyeTest::
const cv::Size fisheyeTest::imageSize(1280, 800);
const cv::Matx33d fisheyeTest::K(558.478087865323, 0, 620.458515360843,
0, 560.506767351568, 381.939424848348,
0, 0, 1);
const cv::Vec4d fisheyeTest::D(-0.0014613319981768, -0.00329861110580401, 0.00605760088590183, -0.00374209380722371);
const cv::Matx33d fisheyeTest::R ( 9.9756700084424932e-01, 6.9698277640183867e-02, 1.4929569991321144e-03,
-6.9711825162322980e-02, 9.9748249845531767e-01, 1.2997180766418455e-02,
-5.8331736398316541e-04,-1.3069635393884985e-02, 9.9991441852366736e-01);
const cv::Vec3d fisheyeTest::T(-9.9217369356044638e-02, 3.1741831972356663e-03, 1.8551007952921010e-04);
std::string fisheyeTest::combine(const std::string& _item1, const std::string& _item2)
{
std::string item1 = _item1, item2 = _item2;
std::replace(item1.begin(), item1.end(), '\\', '/');
std::replace(item2.begin(), item2.end(), '\\', '/');
if (item1.empty())
return item2;
if (item2.empty())
return item1;
char last = item1[item1.size()-1];
return item1 + (last != '/' ? "/" : "") + item2;
}
void fisheyeTest::merge4(const cv::Mat& tl, const cv::Mat& tr, const cv::Mat& bl, const cv::Mat& br, cv::Mat& merged)
{
int type = tl.type();
cv::Size sz = tl.size();
ASSERT_EQ(type, tr.type()); ASSERT_EQ(type, bl.type()); ASSERT_EQ(type, br.type());
ASSERT_EQ(sz.width, tr.cols); ASSERT_EQ(sz.width, bl.cols); ASSERT_EQ(sz.width, br.cols);
ASSERT_EQ(sz.height, tr.rows); ASSERT_EQ(sz.height, bl.rows); ASSERT_EQ(sz.height, br.rows);
merged.create(cv::Size(sz.width * 2, sz.height * 2), type);
tl.copyTo(merged(cv::Rect(0, 0, sz.width, sz.height)));
tr.copyTo(merged(cv::Rect(sz.width, 0, sz.width, sz.height)));
bl.copyTo(merged(cv::Rect(0, sz.height, sz.width, sz.height)));
br.copyTo(merged(cv::Rect(sz.width, sz.height, sz.width, sz.height)));
}
}} // namespace }} // namespace

View File

@ -198,6 +198,44 @@ CV_EXPORTS float rectify3Collinear( InputArray _cameraMatrix1, InputArray _distC
double alpha, Size newImgSize, double alpha, Size newImgSize,
Rect* roi1, Rect* roi2, int flags ); Rect* roi1, Rect* roi2, int flags );
namespace fisheye {
/** @brief Stereo rectification for fisheye camera model
@param K1 First camera intrinsic matrix.
@param D1 First camera distortion parameters.
@param K2 Second camera intrinsic matrix.
@param D2 Second camera distortion parameters.
@param imageSize Size of the image used for stereo calibration.
@param R Rotation matrix between the coordinate systems of the first and the second
cameras.
@param tvec Translation vector between coordinate systems of the cameras.
@param R1 Output 3x3 rectification transform (rotation matrix) for the first camera.
@param R2 Output 3x3 rectification transform (rotation matrix) for the second camera.
@param P1 Output 3x4 projection matrix in the new (rectified) coordinate systems for the first
camera.
@param P2 Output 3x4 projection matrix in the new (rectified) coordinate systems for the second
camera.
@param Q Output \f$4 \times 4\f$ disparity-to-depth mapping matrix (see reprojectImageTo3D ).
@param flags Operation flags that may be zero or @ref CALIB_ZERO_DISPARITY . If the flag is set,
the function makes the principal points of each camera have the same pixel coordinates in the
rectified views. And if the flag is not set, the function may still shift the images in the
horizontal or vertical direction (depending on the orientation of epipolar lines) to maximize the
useful image area.
@param newImageSize New image resolution after rectification. The same size should be passed to
#initUndistortRectifyMap (see the stereo_calib.cpp sample in OpenCV samples directory). When (0,0)
is passed (default), it is set to the original imageSize . Setting it to larger value can help you
preserve details in the original image, especially when there is a big radial distortion.
@param balance Sets the new focal length in range between the min focal length and the max focal
length. Balance is in range of [0, 1].
@param fov_scale Divisor for new focal length.
*/
CV_EXPORTS_W void stereoRectify(InputArray K1, InputArray D1, InputArray K2, InputArray D2, const Size &imageSize, InputArray R, InputArray tvec,
OutputArray R1, OutputArray R2, OutputArray P1, OutputArray P2, OutputArray Q, int flags, const Size &newImageSize = Size(),
double balance = 0.0, double fov_scale = 1.0);
} // namespace fisheye
/** @brief The base class for stereo correspondence algorithms. /** @brief The base class for stereo correspondence algorithms.
*/ */
class CV_EXPORTS_W StereoMatcher : public Algorithm class CV_EXPORTS_W StereoMatcher : public Algorithm

View File

@ -0,0 +1,5 @@
{
"namespaces_dict": {
"cv.fisheye": "fisheye"
}
}

View File

@ -628,4 +628,85 @@ float rectify3Collinear( InputArray _cameraMatrix1, InputArray _distCoeffs1,
(P2.at<double>(idx,3)/P2.at<double>(idx,idx))); (P2.at<double>(idx,3)/P2.at<double>(idx,idx)));
} }
void cv::fisheye::stereoRectify( InputArray K1, InputArray D1, InputArray K2, InputArray D2, const Size& imageSize,
InputArray _R, InputArray _tvec, OutputArray R1, OutputArray R2, OutputArray P1, OutputArray P2,
OutputArray Q, int flags, const Size& newImageSize, double balance, double fov_scale)
{
CV_INSTRUMENT_REGION();
CV_Assert((_R.size() == Size(3, 3) || _R.total() * _R.channels() == 3) && (_R.depth() == CV_32F || _R.depth() == CV_64F));
CV_Assert(_tvec.total() * _tvec.channels() == 3 && (_tvec.depth() == CV_32F || _tvec.depth() == CV_64F));
Mat aaa = _tvec.getMat().reshape(3, 1);
Vec3d rvec; // Rodrigues vector
if (_R.size() == Size(3, 3))
{
Matx33d rmat;
_R.getMat().convertTo(rmat, CV_64F);
rvec = Affine3d(rmat).rvec();
}
else if (_R.total() * _R.channels() == 3)
_R.getMat().convertTo(rvec, CV_64F);
Vec3d tvec;
_tvec.getMat().convertTo(tvec, CV_64F);
// rectification algorithm
rvec *= -0.5; // get average rotation
Matx33d r_r;
Rodrigues(rvec, r_r); // rotate cameras to same orientation by averaging
Vec3d t = r_r * tvec;
Vec3d uu(t[0] > 0 ? 1 : -1, 0, 0);
// calculate global Z rotation
Vec3d ww = t.cross(uu);
double nw = norm(ww);
if (nw > 0.0)
ww *= std::acos(fabs(t[0])/cv::norm(t))/nw;
Matx33d wr;
Rodrigues(ww, wr);
// apply to both views
Matx33d ri1 = wr * r_r.t();
Mat(ri1, false).convertTo(R1, R1.empty() ? CV_64F : R1.type());
Matx33d ri2 = wr * r_r;
Mat(ri2, false).convertTo(R2, R2.empty() ? CV_64F : R2.type());
Vec3d tnew = ri2 * tvec;
// calculate projection/camera matrices. these contain the relevant rectified image internal params (fx, fy=fx, cx, cy)
Matx33d newK1, newK2;
fisheye::estimateNewCameraMatrixForUndistortRectify(K1, D1, imageSize, R1, newK1, balance, newImageSize, fov_scale);
fisheye::estimateNewCameraMatrixForUndistortRectify(K2, D2, imageSize, R2, newK2, balance, newImageSize, fov_scale);
double fc_new = std::min(newK1(1,1), newK2(1,1));
Point2d cc_new[2] = { Vec2d(newK1(0, 2), newK1(1, 2)), Vec2d(newK2(0, 2), newK2(1, 2)) };
// Vertical focal length must be the same for both images to keep the epipolar constraint use fy for fx also.
// For simplicity, set the principal points for both cameras to be the average
// of the two principal points (either one of or both x- and y- coordinates)
if( flags & STEREO_ZERO_DISPARITY )
cc_new[0] = cc_new[1] = (cc_new[0] + cc_new[1]) * 0.5;
else
cc_new[0].y = cc_new[1].y = (cc_new[0].y + cc_new[1].y)*0.5;
Mat(Matx34d(fc_new, 0, cc_new[0].x, 0,
0, fc_new, cc_new[0].y, 0,
0, 0, 1, 0), false).convertTo(P1, P1.empty() ? CV_64F : P1.type());
Mat(Matx34d(fc_new, 0, cc_new[1].x, tnew[0]*fc_new, // baseline * focal length;,
0, fc_new, cc_new[1].y, 0,
0, 0, 1, 0), false).convertTo(P2, P2.empty() ? CV_64F : P2.type());
if (Q.needed())
Mat(Matx44d(1, 0, 0, -cc_new[0].x,
0, 1, 0, -cc_new[0].y,
0, 0, 0, fc_new,
0, 0, -1./tnew[0], (cc_new[0].x - cc_new[1].x)/tnew[0]), false).convertTo(Q, Q.empty() ? CV_64F : Q.depth());
}
} }

View File

@ -0,0 +1,347 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "test_precomp.hpp"
#include <opencv2/ts/cuda_test.hpp> // EXPECT_MAT_NEAR
#include "opencv2/3d.hpp"
#include <opencv2/core/utils/logger.hpp>
namespace opencv_test { namespace {
static bool checkPandROI(const Matx33d& M, const Matx<double, 5, 1>& D,
const Mat& R, const Mat& P, Size imgsize, Rect roi)
{
const double eps = 0.05;
const int N = 21;
int x, y, k;
vector<Point2f> pts, upts;
// step 1. check that all the original points belong to the destination image
for( y = 0; y < N; y++ )
for( x = 0; x < N; x++ )
pts.push_back(Point2f((float)x*imgsize.width/(N-1), (float)y*imgsize.height/(N-1)));
undistortPoints(pts, upts, M, D, R, P );
for( k = 0; k < N*N; k++ )
if( upts[k].x < -imgsize.width*eps || upts[k].x > imgsize.width*(1+eps) ||
upts[k].y < -imgsize.height*eps || upts[k].y > imgsize.height*(1+eps) )
{
CV_LOG_ERROR(NULL, cv::format("The point (%g, %g) was mapped to (%g, %g) which is out of image\n",
pts[k].x, pts[k].y, upts[k].x, upts[k].y));
return false;
}
// step 2. check that all the points inside ROI belong to the original source image
Mat temp(imgsize, CV_8U), utemp, map1, map2;
temp = Scalar::all(1);
initUndistortRectifyMap(M, D, R, P, imgsize, CV_16SC2, map1, map2);
remap(temp, utemp, map1, map2, INTER_LINEAR);
if(roi.x < 0 || roi.y < 0 || roi.x + roi.width > imgsize.width || roi.y + roi.height > imgsize.height)
{
CV_LOG_ERROR(NULL, cv::format("The ROI=(%d, %d, %d, %d) is outside of the imge rectangle\n",
roi.x, roi.y, roi.width, roi.height));
return false;
}
double s = sum(utemp(roi))[0];
if( s > roi.area() || roi.area() - s > roi.area()*(1-eps) )
{
CV_LOG_ERROR(NULL, cv::format("The ratio of black pixels inside the valid ROI (~%g%%) is too large\n",
s*100./roi.area()));
return false;
}
return true;
}
TEST(StereoGeometry, stereoRectify)
{
// camera parameters are extracted from the original calib3d test CV_StereoCalibrationTest::run
const Matx33d M1(
530.4643719672913, 0, 319.5,
0, 529.7477570329314, 239.5,
0, 0, 1);
const Matx<double, 5, 1> D1(-0.2982901576925627, 0.1134645765152131, 0, 0, 0);
const Matx33d M2(
530.4643719672913, 0, 319.5,
0, 529.7477570329314, 239.5,
0, 0, 1);
const Matx<double, 5, 1> D2(-0.2833068597502156, 0.0944810713984697, 0, 0, 0);
const Matx33d R(0.9996903750450727, 0.005330951201286465, -0.02430504066096785,
-0.004837810799471072, 0.9997821583334892, 0.02030348405319902,
0.02440798289310936, -0.02017961439967296, 0.9994983909610711);
const Matx31d T(-3.328706469151101, 0.05621025406095936, -0.02956576727262086);
const Size imageSize(640, 480);
Mat R1, R2, P1, P2, Q;
Rect roi1, roi2;
stereoRectify( M1, D1, M2, D2, imageSize, R, T, R1, R2, P1, P2, Q, 0, 1, imageSize, &roi1, &roi2 );
Mat eye33 = Mat::eye(3,3,CV_64F);
Mat R1t = R1.t(), R2t = R2.t();
EXPECT_LE(cvtest::norm(R1t*R1 - eye33, NORM_L2), 0.01) << "R1 is not orthogonal!";
EXPECT_LE(cvtest::norm(R2t*R2 - eye33, NORM_L2), 0.01) << "R2 is not orthogonal!";
//check that Tx after rectification is equal to distance between cameras
double tx = fabs(P2.at<double>(0, 3) / P2.at<double>(0, 0));
EXPECT_LE(fabs(tx - cvtest::norm(T, NORM_L2)), 1e-5);
EXPECT_TRUE(checkPandROI(M1, D1, R1, P1, imageSize, roi1));
EXPECT_TRUE(checkPandROI(M2, D2, R2, P2, imageSize, roi2));
//check that Q reprojects points before the camera
double testPoint[4] = {0.0, 0.0, 100.0, 1.0};
Mat reprojectedTestPoint = Q * Mat_<double>(4, 1, testPoint);
CV_Assert(reprojectedTestPoint.type() == CV_64FC1);
EXPECT_GT( reprojectedTestPoint.at<double>(2) / reprojectedTestPoint.at<double>(3), 0 ) << \
"A point after rectification is reprojected behind the camera";
}
TEST(StereoGeometry, regression_10791)
{
const Matx33d M1(
853.1387981631528, 0, 704.154907802121,
0, 853.6445089162528, 520.3600712930319,
0, 0, 1
);
const Matx33d M2(
848.6090216909176, 0, 701.6162856852185,
0, 849.7040162357157, 509.1864036137,
0, 0, 1
);
const Matx<double, 14, 1> D1(-6.463598629567206, 79.00104930508179, -0.0001006144444464403, -0.0005437499822299972,
12.56900616588467, -6.056719942752855, 76.3842481414836, 45.57460250612659,
0, 0, 0, 0, 0, 0);
const Matx<double, 14, 1> D2(0.6123436439798265, -0.4671756923224087, -0.0001261947899033442, -0.000597334584036978,
-0.05660119809538371, 1.037075740629769, -0.3076042835831711, -0.2502169324283623,
0, 0, 0, 0, 0, 0);
const Matx33d R(
0.9999926627018476, -0.0001095586963765905, 0.003829169539302921,
0.0001021735876758584, 0.9999981346680941, 0.0019287874145156,
-0.003829373712065528, -0.001928382022437616, 0.9999908085776333
);
const Matx31d T(-58.9161771697128, -0.01581306249996402, -0.8492960216760961);
const Size imageSize(1280, 960);
Mat R1, R2, P1, P2, Q;
Rect roi1, roi2;
stereoRectify(M1, D1, M2, D2, imageSize, R, T,
R1, R2, P1, P2, Q,
STEREO_ZERO_DISPARITY, 1, imageSize, &roi1, &roi2);
EXPECT_GE(roi1.area(), 400*300) << roi1;
EXPECT_GE(roi2.area(), 400*300) << roi2;
}
TEST(StereoGeometry, regression_11131)
{
const Matx33d M1(
1457.572438721727, 0, 1212.945694211622,
0, 1457.522226502963, 1007.32058848921,
0, 0, 1
);
const Matx33d M2(
1460.868570835972, 0, 1215.024068023046,
0, 1460.791367088, 1011.107202932225,
0, 0, 1
);
const Matx<double, 5, 1> D1(0, 0, 0, 0, 0);
const Matx<double, 5, 1> D2(0, 0, 0, 0, 0);
const Matx33d R(
0.9985404059825475, 0.02963547172078553, -0.04515303352041626,
-0.03103795276460111, 0.9990471552537432, -0.03068268351343364,
0.04420071389006859, 0.03203935697372317, 0.9985087763742083
);
const Matx31d T(0.9995500167379527, 0.0116311595111068, 0.02764923448462666);
const Size imageSize(2456, 2058);
Mat R1, R2, P1, P2, Q;
Rect roi1, roi2;
stereoRectify(M1, D1, M2, D2, imageSize, R, T,
R1, R2, P1, P2, Q,
STEREO_ZERO_DISPARITY, 1, imageSize, &roi1, &roi2);
EXPECT_GT(P1.at<double>(0, 0), 0);
EXPECT_GT(P2.at<double>(0, 0), 0);
EXPECT_GT(R1.at<double>(0, 0), 0);
EXPECT_GT(R2.at<double>(0, 0), 0);
EXPECT_GE(roi1.area(), 400*300) << roi1;
EXPECT_GE(roi2.area(), 400*300) << roi2;
}
class fisheyeTest : public ::testing::Test {
protected:
const static cv::Size imageSize;
const static cv::Matx33d K;
const static cv::Vec4d D;
const static cv::Matx33d R;
const static cv::Vec3d T;
std::string datasets_repository_path;
virtual void SetUp() {
datasets_repository_path = combine(cvtest::TS::ptr()->get_data_path(), "cv/cameracalibration/fisheye");
}
protected:
std::string combine(const std::string& _item1, const std::string& _item2);
static void merge4(const cv::Mat& tl, const cv::Mat& tr, const cv::Mat& bl, const cv::Mat& br, cv::Mat& merged);
};
const cv::Size fisheyeTest::imageSize(1280, 800);
const cv::Matx33d fisheyeTest::K(558.478087865323, 0, 620.458515360843,
0, 560.506767351568, 381.939424848348,
0, 0, 1);
const cv::Vec4d fisheyeTest::D(-0.0014613319981768, -0.00329861110580401, 0.00605760088590183, -0.00374209380722371);
const cv::Matx33d fisheyeTest::R ( 9.9756700084424932e-01, 6.9698277640183867e-02, 1.4929569991321144e-03,
-6.9711825162322980e-02, 9.9748249845531767e-01, 1.2997180766418455e-02,
-5.8331736398316541e-04,-1.3069635393884985e-02, 9.9991441852366736e-01);
const cv::Vec3d fisheyeTest::T(-9.9217369356044638e-02, 3.1741831972356663e-03, 1.8551007952921010e-04);
std::string fisheyeTest::combine(const std::string& _item1, const std::string& _item2)
{
std::string item1 = _item1, item2 = _item2;
std::replace(item1.begin(), item1.end(), '\\', '/');
std::replace(item2.begin(), item2.end(), '\\', '/');
if (item1.empty())
return item2;
if (item2.empty())
return item1;
char last = item1[item1.size()-1];
return item1 + (last != '/' ? "/" : "") + item2;
}
void fisheyeTest::merge4(const cv::Mat& tl, const cv::Mat& tr, const cv::Mat& bl, const cv::Mat& br, cv::Mat& merged)
{
int type = tl.type();
cv::Size sz = tl.size();
ASSERT_EQ(type, tr.type()); ASSERT_EQ(type, bl.type()); ASSERT_EQ(type, br.type());
ASSERT_EQ(sz.width, tr.cols); ASSERT_EQ(sz.width, bl.cols); ASSERT_EQ(sz.width, br.cols);
ASSERT_EQ(sz.height, tr.rows); ASSERT_EQ(sz.height, bl.rows); ASSERT_EQ(sz.height, br.rows);
merged.create(cv::Size(sz.width * 2, sz.height * 2), type);
tl.copyTo(merged(cv::Rect(0, 0, sz.width, sz.height)));
tr.copyTo(merged(cv::Rect(sz.width, 0, sz.width, sz.height)));
bl.copyTo(merged(cv::Rect(0, sz.height, sz.width, sz.height)));
br.copyTo(merged(cv::Rect(sz.width, sz.height, sz.width, sz.height)));
}
TEST_F(fisheyeTest, stereoRectify)
{
const std::string folder = combine(datasets_repository_path, "calib-3_stereo_from_JY");
cv::Size calibration_size = this->imageSize, requested_size = calibration_size;
cv::Matx33d K1 = this->K, K2 = K1;
cv::Mat D1 = cv::Mat(this->D), D2 = D1;
cv::Vec3d theT = this->T;
cv::Matx33d theR = this->R;
double balance = 0.0, fov_scale = 1.1;
cv::Mat R1, R2, P1, P2, Q;
cv::fisheye::stereoRectify(K1, D1, K2, D2, calibration_size, theR, theT, R1, R2, P1, P2, Q,
cv::STEREO_ZERO_DISPARITY, requested_size, balance, fov_scale);
// Collected with these CMake flags: -DWITH_IPP=OFF -DCV_ENABLE_INTRINSICS=OFF -DCV_DISABLE_OPTIMIZATION=ON -DCMAKE_BUILD_TYPE=Debug
cv::Matx33d R1_ref(
0.9992853269091279, 0.03779164101000276, -0.0007920188690205426,
-0.03778569762983931, 0.9992646472015868, 0.006511981857667881,
0.001037534936357442, -0.006477400933964018, 0.9999784831677112
);
cv::Matx33d R2_ref(
0.9994868963898833, -0.03197579751378937, -0.001868774538573449,
0.03196298186616116, 0.9994677442608699, -0.0065265589947392,
0.002076471801477729, 0.006463478587068991, 0.9999769555891836
);
cv::Matx34d P1_ref(
420.9684016542647, 0, 586.3059567784627, 0,
0, 420.9684016542647, 374.8571836462291, 0,
0, 0, 1, 0
);
cv::Matx34d P2_ref(
420.9684016542647, 0, 586.3059567784627, -41.78881938824554,
0, 420.9684016542647, 374.8571836462291, 0,
0, 0, 1, 0
);
cv::Matx44d Q_ref(
1, 0, 0, -586.3059567784627,
0, 1, 0, -374.8571836462291,
0, 0, 0, 420.9684016542647,
0, 0, 10.07370889670733, -0
);
const double eps = 1e-10;
EXPECT_MAT_NEAR(R1_ref, R1, eps);
EXPECT_MAT_NEAR(R2_ref, R2, eps);
EXPECT_MAT_NEAR(P1_ref, P1, eps);
EXPECT_MAT_NEAR(P2_ref, P2, eps);
EXPECT_MAT_NEAR(Q_ref, Q, eps);
if (::testing::Test::HasFailure())
{
std::cout << "Actual values are:" << std::endl
<< "R1 =" << std::endl << R1 << std::endl
<< "R2 =" << std::endl << R2 << std::endl
<< "P1 =" << std::endl << P1 << std::endl
<< "P2 =" << std::endl << P2 << std::endl
<< "Q =" << std::endl << Q << std::endl;
}
if (cvtest::debugLevel == 0)
return;
// DEBUG code is below
cv::Mat lmapx, lmapy, rmapx, rmapy;
//rewrite for fisheye
cv::fisheye::initUndistortRectifyMap(K1, D1, R1, P1, requested_size, CV_32F, lmapx, lmapy);
cv::fisheye::initUndistortRectifyMap(K2, D2, R2, P2, requested_size, CV_32F, rmapx, rmapy);
cv::Mat l, r, lundist, rundist;
for (int i = 0; i < 34; ++i)
{
SCOPED_TRACE(cv::format("image %d", i));
l = imread(combine(folder, cv::format("left/stereo_pair_%03d.jpg", i)), cv::IMREAD_COLOR);
r = imread(combine(folder, cv::format("right/stereo_pair_%03d.jpg", i)), cv::IMREAD_COLOR);
ASSERT_FALSE(l.empty());
ASSERT_FALSE(r.empty());
int ndisp = 128;
cv::rectangle(l, cv::Rect(255, 0, 829, l.rows-1), cv::Scalar(0, 0, 255));
cv::rectangle(r, cv::Rect(255, 0, 829, l.rows-1), cv::Scalar(0, 0, 255));
cv::rectangle(r, cv::Rect(255-ndisp, 0, 829+ndisp ,l.rows-1), cv::Scalar(0, 0, 255));
cv::remap(l, lundist, lmapx, lmapy, cv::INTER_LINEAR);
cv::remap(r, rundist, rmapx, rmapy, cv::INTER_LINEAR);
for (int ii = 0; ii < lundist.rows; ii += 20)
{
cv::line(lundist, cv::Point(0, ii), cv::Point(lundist.cols, ii), cv::Scalar(0, 255, 0));
cv::line(rundist, cv::Point(0, ii), cv::Point(lundist.cols, ii), cv::Scalar(0, 255, 0));
}
cv::Mat rectification;
merge4(l, r, lundist, rundist, rectification);
// Add the "--test_debug" to arguments for file output
if (cvtest::debugLevel > 0)
cv::imwrite(cv::format("fisheye_rectification_AB_%03d.png", i), rectification);
}
}
}}